_id
stringlengths 2
6
| title
stringlengths 0
58
| partition
stringclasses 3
values | text
stringlengths 52
373k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q11700
|
copyNewerSingle
|
train
|
async function copyNewerSingle(srcpath, destpath, opts) {
let { interval = 1000, verbose = false } = opts
let stat = await FS.stat(srcpath)
// Stat and check the filesystem entry type.
if (stat.isDirectory()) {
// Directory, ensure destination exists and return.
let made = await mkdirp(destpath)
if (verbose && made) { console.log(`${made} - directory created`)}
return 'dir'
}
else if (!stat.isFile()) {
// Not a file.
throw new Error("Not supported.")
}
let srcmtime = stat.mtime
let destmtime
try {
// Stat destpath and get the mtime.
destmtime = (await FS.stat(destpath)).mtime
}
catch (err) {
// path does not exist
}
if (destmtime !== undefined && srcmtime - destmtime <= interval) {
// destpath does not exist or mtime is equal, return.
if (verbose) { console.log(`${srcpath} == ${destpath}`) }
return false
}
// Ensure parent directory exists.
await mkdirp(`${destpath}/..`)
// Commence copying.
let rs = FS.createReadStream(srcpath)
let ws = FS.createWriteStream(destpath)
rs.pipe(ws)
await waitForStreamEnd(ws)
// Set mtime to be equal to the source file.
await FS.utimes(destpath, new Date(), stat.mtime)
if (verbose) { console.log(`${srcpath} -> ${destpath}`) }
return true
}
|
javascript
|
{
"resource": ""
}
|
q11701
|
train
|
function(database, callback){
var dbName = database.databaseName || database.name;
var options = {
name:dbName + '/_collections',
service:feathersMongoColls(amityMongo.db.db(dbName))
};
amityMongo.amity_collManager.push(options);
callback();
}
|
javascript
|
{
"resource": ""
}
|
|
q11702
|
train
|
function(collection, callback){
// Prep the collection name.
var colName = collection.name.split('.');
var dbName = colName.shift();
colName = colName.join('.');
var database = amityMongo.db.db(dbName);
var options = {
name:dbName + '/' + colName,
service:feathersMongo({collection:database.collection(collection.name)})
};
amityMongo.amity_collections.push(options);
callback();
}
|
javascript
|
{
"resource": ""
}
|
|
q11703
|
loadFileContents
|
train
|
function loadFileContents(filePath, preloaded)
{
//use pre-supplied contents?
if (typeof preloaded !== 'undefined' && preloaded !== null)
{
//return a promise which will get resolved with the preloaded value
if (preloaded instanceof Buffer)
{
return promiseValue(preloaded.toString());
}
else
{
return promiseValue(String(preloaded));
}
}
else
{
return Q.nfcall(fs.readFile, filePath).invoke('toString');
}
}
|
javascript
|
{
"resource": ""
}
|
q11704
|
returnClient
|
train
|
function returnClient(options, ignoreCache) {
if (typeof options === 'string') {
options = {
url: options,
usePromise: true
};
}
if (ignoreCache === true) {
return generateClient(options);
}
const optionsJSON = JSON.stringify(options);
if (cachedClients[optionsJSON] instanceof Swagger) {
return cachedClients[optionsJSON];
} else {
cachedClients[optionsJSON] = generateClient(options);
return cachedClients[optionsJSON];
}
}
|
javascript
|
{
"resource": ""
}
|
q11705
|
parse
|
train
|
function parse(data, callback) {
var parsed;
try {
parsed = JSON.parse(data);
} catch (e) {
return callback(e);
}
return callback(null, parsed);
}
|
javascript
|
{
"resource": ""
}
|
q11706
|
BindingIdentifier
|
train
|
function BindingIdentifier(node, parent, scope, state) {
if (node.name === state.name) {
state.deopted = true;
}
}
|
javascript
|
{
"resource": ""
}
|
q11707
|
train
|
function(name, zoneLetter, cf, vpc, routeTable) {
var self = this;
self.isPublic = false;
base.Base.call(this, cf, 'EC2::Subnet', name);
self.addProperty('VpcId', vpc.ref());
self.zone = self.addProperty('AvailabilityZone', vpc.az(zoneLetter));
self.cidrBlock = function(cidr) {
self.cidr = vpc.mkSubnetCidrBlock(cidr);
return self.addProperty('CidrBlock', self.cidr);
};
self.mapPublicIpOnLaunch = function(value) {
if (arguments.length === 0) { value = true; }
return self.addProperty('MapPublicIpOnLaunch', !!value);
};
var assocName = name+'RouteTableAssociation';
cf.data.Resources[assocName] = subnet.makeSubnetRouteTableAssociation(assocName, cf, self, routeTable);
//self.output(name+"CidrBlock", self.getAtt("CidrBlock"), "Subnet "+name+"CIDR block"); /* Do not know why CidrBlock is not available */
}
|
javascript
|
{
"resource": ""
}
|
|
q11708
|
stringifyStackTrace
|
train
|
function stringifyStackTrace (stackTrace) {
var i, len, result = [];
for (i = 0, len = stackTrace.length; i < len; i++) {
if (stackTrace[i]) {
result.push(stackTrace[i].methodName + "::" + stackTrace[i].className + " (" + stackTrace[i].fileName + ":" + stackTrace[i].lineNumber + ")");
}
}
return result.join("\n");
}
|
javascript
|
{
"resource": ""
}
|
q11709
|
createInternalConnection
|
train
|
function createInternalConnection (mode) {
var result;
switch (mode) {
case 'async':
result = require('then-request');
break;
case 'sync':
result = require('sync-req');
break;
default:
throw new Error('Expected options.mode to be "async" or "sync" but got ' + JSON.stringify(mode));
}
return result;
}
|
javascript
|
{
"resource": ""
}
|
q11710
|
branch
|
train
|
function branch () {
const headpath = path.join(process.cwd(), '.git/HEAD')
if (!fs.existsSync(headpath)) {
console.warn('No HEAD found, aborting.')
return null
}
const headcontent = fs.readFileSync(headpath, 'utf8')
const branchRegex = /ref: refs\/heads\/(\S+)/
const branchname = branchRegex.exec(headcontent)
return branchname && branchname[1]
}
|
javascript
|
{
"resource": ""
}
|
q11711
|
train
|
function(topic, message) {
/*
* so should a filter manage the last message it has? and give it an ID and store it...
* to prevent duplicates...
* or should this filter do it...
* this shold just execute 1 filter -- NO more than 1. But if a topic matches
* multiple filters then we need to call one filter and mark it dirty or something
*
* maybe save the lastMessage w/ a timestamp and if it matches, then skip it...
*
* given topic A that matches 2 filters so there will be 2 messages...
*
*
* return number of matches...
*
*
*/
for( var key in myFilters) {
if (myFilters.hasOwnProperty(key)) {
myFilters[key].doFilter(topic,message) ;
}
}
// count is how many times filter ran...
}
|
javascript
|
{
"resource": ""
}
|
|
q11712
|
train
|
function(config) {
var em = new RtcConnector(config);
var unique = (config && config.unique) ? true : false;
l('DEBUG') && console.log('createRtcConnector() config is: ',config);
var key = em.key = createKey(em.config.user||null, em.config.password || null, em.config.server || null, em.config.port || null, em.config.eventPath || 'rtcomm');
var mon = find(key);
// Monitors not requesting to be UNIQUE.
var umon = [];
mon.forEach(function(m){ !m.unique && umon.push(m); });
if (unique || umon.length === 0) {
rtcConnectors[em.id] = em;
return em;
}
// return first nonunique that matched the key
return umon[0] || null;
}
|
javascript
|
{
"resource": ""
}
|
|
q11713
|
stringify
|
train
|
function stringify() {
return map(function (chunk, next) {
var stringified;
try {
stringified = JSON.stringify(chunk);
} catch (e) {
return next(e);
}
return next(null, stringified);
});
}
|
javascript
|
{
"resource": ""
}
|
q11714
|
deriveSecret
|
train
|
function deriveSecret(parent_secret, child_name, hash_algorithm) {
if (!hash_algorithm) {
hash_algorithm = 'sha512';
}
return crypto.createHmac(hash_algorithm, parent_secret).update(child_name).digest('hex');
}
|
javascript
|
{
"resource": ""
}
|
q11715
|
deriveKeypair
|
train
|
function deriveKeypair(parent_secret, child_name, signature_scheme) {
if (!signature_scheme) {
signature_scheme = 'ec_secp256k1';
}
var pair = {};
if (signature_scheme === 'ec_secp256k1') {
pair.private = deriveSecret(parent_secret, child_name, 'sha256');
// If the private key is greater than the curve modulus we
// use a counter to get a new random secret
var modulus = new BigInteger(ecurve.getCurveByName('secp256k1').n, 16);
var counter = 1;
while (!pair.private || modulus.compareTo(pair.private) < 0) {
pair.private = deriveSecret(parent_secret, child_name + '_' + counter, 'sha256');
counter += 1;
}
pair.public = new bitcoinjs.ECKey(new BigInteger(pair.private, 16), false).pub.toHex();
} else {
throw new Error('Signature scheme: ' + signature_scheme + ' not currently supported');
}
return pair;
}
|
javascript
|
{
"resource": ""
}
|
q11716
|
sign
|
train
|
function sign(private_key, data) {
var key = new bitcoinjs.ECKey(new BigInteger(private_key, 16), false);
var hash = bitcoinjs.crypto.hash256(new Buffer(data, 'hex'));
return key.sign(hash).toDER().toString('hex');
}
|
javascript
|
{
"resource": ""
}
|
q11717
|
verify
|
train
|
function verify(public_key, data, signature) {
var pubkey = bitcoinjs.ECPubKey.fromHex(public_key);
var hash = bitcoinjs.crypto.hash256(new Buffer(data, 'hex'));
var ecsignature = bitcoinjs.ECSignature.fromDER(new Buffer(signature, 'hex'));
return pubkey.verify(hash, ecsignature);
}
|
javascript
|
{
"resource": ""
}
|
q11718
|
getSpecificity
|
train
|
function getSpecificity(text, parsed) {
var expressions = parsed || parse(text),
spec = [ 0, 0, 0, 0 ],
nots = [],
i,
expression,
pseudos,
p,
ii,
not,
jj;
for (i = 0; i < expressions.length; i++) {
expression = expressions[i];
pseudos = expression.pseudos;
// id awards a point in the second column
if (expression.id) {
spec[1]++;
}
// classes and attributes award a point each in the third column
if (expression.attributes) {
spec[2] += expression.attributes.length;
}
if (expression.classList) {
spec[2] += expression.classList.length;
}
// tag awards a point in the fourth column
if (expression.tag && expression.tag !== '*') {
spec[3]++;
}
// pseudos award a point each in the fourth column
if (pseudos) {
spec[3] += pseudos.length;
for (p = 0; p < pseudos.length; p++) {
if (pseudos[p].key === 'not') {
nots.push(pseudos[p].value);
spec[3]--;
}
}
}
}
for (ii = nots.length; ii--;) {
not = getSpecificity(nots[ii]);
for (jj = 4; jj--;) {
spec[jj] += not[jj];
}
}
return spec;
}
|
javascript
|
{
"resource": ""
}
|
q11719
|
selectVersion
|
train
|
function selectVersion(v1, v2) {
return function (firstParam) {
// the v1 API took a stream as the first argument
if (firstParam instanceof stream.Stream) {
return v1.apply(v1, arguments);
}
// the v2 API returns a stream for piping
return v2.apply(v2, arguments);
};
}
|
javascript
|
{
"resource": ""
}
|
q11720
|
evalPromise
|
train
|
function evalPromise(promise) {
// wrap in `countdown`
exports.countdown(promise, timeout)
.then(function(resolvedValue) {
deferred.resolve(resolvedValue);
}).fail(function(err) {
deferred.reject(err);
});
}
|
javascript
|
{
"resource": ""
}
|
q11721
|
train
|
function (rules) {
if (!_.isArray(rules)) {
rules = [ rules ];
}
var that = this;
return _.map(rules, function (rule) {
if (typeof rule === 'string') {
return that._parseString(rule);
}
if (_.isPlainObject(rule)) {
return that._parseObject(rule);
}
throw new Error('#V1: invalid rule type, must be string or plain object');
});
}
|
javascript
|
{
"resource": ""
}
|
|
q11722
|
train
|
function (ruleStr) {
var name;
var params = '';
ruleStr.replace(this._RULE_STRING_FORMAT, function ($0, _name, _params) {
name = _name;
params = _params.trim();
});
params = params.length ? this._parseParamsString(params) : null;
return Rule.create(name, params);
}
|
javascript
|
{
"resource": ""
}
|
|
q11723
|
train
|
function (jsonSource) {
var json = null;
try {
json = JSON.parse(jsonSource);
} catch (err) {
json = jsonSource; // string
}
return json;
}
|
javascript
|
{
"resource": ""
}
|
|
q11724
|
eventData
|
train
|
function eventData(emitter) {
return emitter[eventEmitter.EVENT_DATA_PROPERTY] || Object.defineProperty(emitter, eventEmitter.EVENT_DATA_PROPERTY, {
value: {},
enumerable: false
})[eventEmitter.EVENT_DATA_PROPERTY];
}
|
javascript
|
{
"resource": ""
}
|
q11725
|
_createClient
|
train
|
function _createClient (name, options, callback) {
const log = logger.child({ redis: name })
log.debug('Redis creating client: ' + name)
let isReady = false
const config = {}
deepAssign(config, _defaults, options)
let client = redis.createClient(config)
callback = _once(callback)
_clients[ name ] = client
_clients[ name ].log = log
log.debug({ clients: Object.keys(_clients) }, 'Redis clients')
client.on('error', function (err) {
log.error({ err: err }, 'Redis client error')
callback(err)
})
client.on('warning', function (err) {
log.warn({ err: err }, 'Redis client warning')
})
client.on('connect', function () {
log.debug('Redis connected: ' + name)
})
client.on('ready', function () {
log.debug('Redis client ready: ' + name)
log.debug({ config: config }, 'Redis client config')
log.debug(`Redis server version: ${safeGet(() => client.server_info.redis_version)}`)
isReady = true
callback(null, client)
})
client.on('reconnecting', function () {
log.debug('Redis client reconnecting: ' + name)
})
client.on('end', function () {
log.debug('Redis client end: ' + name)
// Close the connection before removing reference.
client.quit()
client = null
delete _clients[ name ]
log.debug({ clients: Object.keys(_clients) }, 'Redis clients: ' + _clients.length)
if (!isReady) {
callback(new Error('Done - Failed to connect to Redis'))
}
})
}
|
javascript
|
{
"resource": ""
}
|
q11726
|
getFileMIME
|
train
|
function getFileMIME(file) {
var idx = file.lastIndexOf('.')
var ext = idx > -1 ? file.slice(idx) : null
if (ext) ext = ext.toLowerCase().substring(1)
return MIMES[ext] || MIMES.txt
}
|
javascript
|
{
"resource": ""
}
|
q11727
|
isCharEnglishPunctuation
|
train
|
function isCharEnglishPunctuation(char = '') {
if (isEmpty(char)) return false;
return ENGLISH_PUNCTUATION_RANGES.some(([start, end]) => isCharInRange(char, start, end));
}
|
javascript
|
{
"resource": ""
}
|
q11728
|
expand
|
train
|
function expand(property, value, recurse) {
ASSERT(arguments.length, 'property argument is required');
if (arguments.length < 3) {
if (typeof value === 'boolean') {
recurse = value;
value = undefined;
} else {
recurse = true;
}
}
var undefvalue = typeof value === 'undefined';
return undefvalue?
expandAsArray(property, recurse)
: expandAsObject(property, value, recurse);
}
|
javascript
|
{
"resource": ""
}
|
q11729
|
normalizeEntity
|
train
|
function normalizeEntity(base, input) {
if (!input) return {};
var result = Object.assign(clone(input), {
class: normalizeClass(input.class),
properties: normalizeProperties(input.properties),
entities: normalizeEntities(base, input.entities),
links: normalizeLinks(base, input.links),
actions: normalizeActions(base, input.actions),
title: input.title
});
// strip undefined values from the result
return filter(result, function (v) {
return typeof v !== 'undefined';
});
}
|
javascript
|
{
"resource": ""
}
|
q11730
|
normalizeRel
|
train
|
function normalizeRel(base, input) {
if (!input) return;
if (!Array.isArray(input)) input = [ input ];
return flatten(input).map(function (rel) {
return rel in iana ? rel : url.resolve(base, rel);
});
}
|
javascript
|
{
"resource": ""
}
|
q11731
|
normalizeProperties
|
train
|
function normalizeProperties(input) {
if (!input) return;
if (Array.isArray(input)) {
return flatten(input).reduce(function (acc, o) {
return Object.assign(acc, o);
}, {});
}
return clone(input);
}
|
javascript
|
{
"resource": ""
}
|
q11732
|
normalizeEntities
|
train
|
function normalizeEntities(base, input) {
if (!input) return;
if (!Array.isArray(input)) input = [ input ];
return flatten(input).filter(Boolean).map(function (entity) {
assert(entity.rel, 'sub-entities must have a rel');
var ret = normalizeEntity(base, entity);
ret.rel = normalizeRel(base, entity.rel);
if (entity.href) ret.href = normalizeHref(base, entity.href);
return ret;
});
}
|
javascript
|
{
"resource": ""
}
|
q11733
|
normalizeLinks
|
train
|
function normalizeLinks(base, input) {
if (!input) return;
if (!Array.isArray(input)) input = [ input ];
return flatten(input).filter(Boolean).map(function (link) {
return normalizeLink(base, link);
});
}
|
javascript
|
{
"resource": ""
}
|
q11734
|
normalizeAction
|
train
|
function normalizeAction(base, input) {
assert(input.name, 'actions must have a name');
assert(input.href, 'actions must have an href');
var ret = clone(input);
ret.href = normalizeHref(base, input.href);
if (input.method) ret.method = normalizeMethod(input.method);
var cls = normalizeClass(input.class);
if (cls) ret.class = cls;
var fields = normalizeFields(input.fields);
if (fields) ret.fields = fields;
return ret;
}
|
javascript
|
{
"resource": ""
}
|
q11735
|
normalizeActions
|
train
|
function normalizeActions(base, input) {
if (!input) return;
if (!Array.isArray(input)) input = [ input ];
return flatten(input).filter(Boolean).map(function (action) {
return normalizeAction(base, action);
});
}
|
javascript
|
{
"resource": ""
}
|
q11736
|
normalizeFields
|
train
|
function normalizeFields(input) {
if (!input) return;
if (!Array.isArray(input)) input = [ input ];
return flatten(input).filter(Boolean).map(normalizeField);
}
|
javascript
|
{
"resource": ""
}
|
q11737
|
registerExtension
|
train
|
function registerExtension(extension) {
try {
var ext; // Actual extension
if (is(extension, 'buildCore', 'function')) {
// If extension has buildCore function then give it an instance of the core.
extension.buildCore(self);
addOne(extensions, extension);
return; // No need to extend as that will be handled in buildCore
}
if (is(extension, 'function')) {
// If extension is a function then give it an instance of the core.
ext = extension(self);
} else if (has(extension, 'core')) {
// If extension has `core` property then extend core with it.
ext = extension.core;
} else {
// Otherwise extend core with the extension itself.
ext = extension;
}
if (ext) {
extend(self, ext);
addOne(extensions, extension);
}
} catch (ex) {
error('Fatal error during application initialization. ', 'Failed to build core with extension "', extension, 'See following exception for more details.', ex);
}
return extension;
}
|
javascript
|
{
"resource": ""
}
|
q11738
|
buildSandbox
|
train
|
function buildSandbox(id) {
if (!has(id)) {
throw new Error('Sandbox name is required to build a sandbox.');
}
// Create module instance specific sandbox
var sandbox = {
type: self.type,
object: self.object,
array: self.array,
log: self.log
};
// Add extensions to sandbox
extensions.forEach(function (extension) {
try {
// If extension has buildSandbox method use it to build sandbox
if (is(extension, 'buildSandbox', 'function')) {
extension.buildSandbox(sandbox);
}
// If extension has a sandbox object add it
else if (has(extension, 'sandbox')) {
extend(sandbox, extension.sandbox);
}
// Otherwise extend the sandbox with the extension
else {
extend(sandbox, extension);
}
} catch (ex) {
error('Fatal error during application initialization. ', 'Failed to build sandbox with extension "', extension, 'See following exception for more details.', ex);
throw ex;
}
});
return sandbox;
}
|
javascript
|
{
"resource": ""
}
|
q11739
|
handleErrorRequest
|
train
|
function handleErrorRequest(result, req, res) {
const err = _.isError(result.msg) ? result.msg : result;
const finalResp = STATUSES[result.status] || STATUSES[err.message] || STATUSES['error'];
finalResp.msg = err.message;
if (isDebug) finalResp.ext = err;
logRequestError(err, req, finalResp);
res.status(finalResp.statusCode);
const view = result.view || errorView;
if (view) {
returnHTML(view, finalResp, res);
} else {
returnJSON(finalResp, res);
}
}
|
javascript
|
{
"resource": ""
}
|
q11740
|
json
|
train
|
function json (opts) {
return {
...opts,
headers: {
...opts.headers,
'content-type': 'application/json'
},
body: JSON.stringify(opts.body)
}
}
|
javascript
|
{
"resource": ""
}
|
q11741
|
error
|
train
|
function error (response, status, message) {
let e = new Error(`${status}: ${message}`)
e.response = response
e.status = status
return e
}
|
javascript
|
{
"resource": ""
}
|
q11742
|
getJSON
|
train
|
function getJSON (response) {
return response.json().then((body) => {
if (body.status !== 'ok') {
throw error(response, body.status, body.data[0])
}
return body
})
}
|
javascript
|
{
"resource": ""
}
|
q11743
|
getSessionCookie
|
train
|
function getSessionCookie (headers) {
if (!headers) return
const cookie = parse(headers)
if (cookie.session) return cookie.session
}
|
javascript
|
{
"resource": ""
}
|
q11744
|
getCsrf
|
train
|
function getCsrf (opts) {
// for testing
if (opts._simulateMaintenance) {
return new Promise((resolve, reject) => {
setTimeout(() => {
reject(new Error('Could not find CSRF token'))
}, 300)
})
}
return fetch(`${opts.host}/_/mobile/init`, json(opts))
.then((response) => props({
csrf: getJSON(response).then((body) => body.data[0].c),
session: getSessionCookie(response.headers.get('set-cookie'))
}))
}
|
javascript
|
{
"resource": ""
}
|
q11745
|
doLogin
|
train
|
function doLogin (opts, csrf, email, password) {
return fetch(`${opts.host}/_/auth/login`, json({
...opts,
method: 'post',
body: { csrf, email, password }
})).then((response) => props({
session: getSessionCookie(response.headers.get('set-cookie')),
body: getJSON(response)
}))
}
|
javascript
|
{
"resource": ""
}
|
q11746
|
modifyProperties
|
train
|
function modifyProperties(schema, paths) {
if (!paths.length) { return; }
const options = paths[0].options;
/**
* Modify document enum (string) properties to an object, with (original) value and
* values (enumValues)
*
*/
function populatePropertyFor (documents, next) {
if (this._mongooseOptions.lean) {
asArray(documents).forEach(function(doc) {
paths.forEach(function(path) {
try {
const splitted = path.path.split('.');
const key = splitted.shift();
const insert = { values: path.enumValues };
const value = doc[key];
insert.value = determineValue(splitted, value);
doc[key] = nest(splitted, insert);
} catch (error) { return next(error); }
});
});
}
next();
}
/**
* If a document is modified, this method will locate the value on updates and assign it to the
* appropriate property, allowing for proper validations later.
* @param <Function>: next - function that notifies mongoose this middleware is complete
*/
function reformatUpdateProperty(next) {
const document = this._update['$set'];
if (document) {
paths.forEach(function(path) {
try {
const splitted = path.path.split('.');
const key = splitted.shift();
if (document[key] === undefined) { return; }
const value = determineValue(splitted, document[key]);
document[key] = nest(splitted, value);
} catch (error) { return next(error); }
});
}
next();
}
/**
* If a document is modified, this method will locate the value before save/validation and assign it to the
* appropriate property, allowing for proper validations later.
* @param <Function>: next - function that notifies mongoose this middleware is complete
*/
function reformatProperty(next) {
const self = this;
paths.forEach(function(path) {
try {
const splitted = path.path.split('.');
const key = splitted.shift();
const value = determineValue(splitted, self[key]);
self[key] = nest(splitted, value);
} catch (error) { return next(error); }
});
next();
}
/**
* Setup handlers for modifying properties -- ['find', 'findOne']
*/
options.modify.on.forEach(function(on) {
schema.post(on, populatePropertyFor);
});
schema.pre((options.validateBeforeSave ? 'validate' : 'save'), reformatProperty);
/*
* Setup handlers for updating documents (there may be more to consider)
*/
['update', 'findOneAndUpdate'].forEach(function(on) {
schema.pre(on, reformatUpdateProperty);
});
}
|
javascript
|
{
"resource": ""
}
|
q11747
|
reformatUpdateProperty
|
train
|
function reformatUpdateProperty(next) {
const document = this._update['$set'];
if (document) {
paths.forEach(function(path) {
try {
const splitted = path.path.split('.');
const key = splitted.shift();
if (document[key] === undefined) { return; }
const value = determineValue(splitted, document[key]);
document[key] = nest(splitted, value);
} catch (error) { return next(error); }
});
}
next();
}
|
javascript
|
{
"resource": ""
}
|
q11748
|
findPaths
|
train
|
function findPaths(schema, options) {
const paths = [];
schema.eachPath(function(path, type) {
if (type.enumValues && type.enumValues.length) {
paths.push(
{
path: path,
enumValues: type.enumValues,
options: options
}
);
}
});
return filterPaths(paths, options);
}
|
javascript
|
{
"resource": ""
}
|
q11749
|
setVirtuals
|
train
|
function setVirtuals(schema, paths) {
paths.forEach(path => {
const props = path.options.virtual.properties;
if (props[path.path]) {
schema.virtual(props[path.path]).get(function() {
return path.enumValues;
});
}
});
}
|
javascript
|
{
"resource": ""
}
|
q11750
|
attachProperties
|
train
|
function attachProperties(schema, paths) {
paths.forEach(path => {
const props = path.options.attach.properties;
if (props[path.path]) {
(props[path.path].on || []).forEach(on => {
/**
* Setup post callbacks
*/
schema.post(on, function(documents, next) {
asArray(documents).forEach(function(doc) {
paths.forEach(function(path) {
doc[props[path.path].as] = path.enumValues;
});
});
next();
});
});
}
});
}
|
javascript
|
{
"resource": ""
}
|
q11751
|
determineValue
|
train
|
function determineValue(keys, doc) {
try {
for (const key of keys) {
doc = doc[key];
}
/**
the keys array should transition through any nesting,
so any object is assumed to be enumValues:
{ value: 'string', enumValues: ['strings'] }
*/
return typeof doc === 'object' ? doc.value : doc;
} catch (error) { return doc; }
}
|
javascript
|
{
"resource": ""
}
|
q11752
|
nest
|
train
|
function nest(array, insert) {
let obj;
array.reverse().forEach(key => {
obj = { [key]: insert };
insert = obj;
});
return insert;
}
|
javascript
|
{
"resource": ""
}
|
q11753
|
setOptions
|
train
|
function setOptions(schema, options) {
function setDefaults(array) {
if (!array.length) {
if (options.find) { array.push('find'); }
if (options.findOne || !options.find) { array.push('findOne'); }
}
}
options = options || {};
options.only = options.only || [];
options.validateBeforeSave = options.validateBeforeSave === undefined
? schema.options.validateBeforeSave
: Boolean(options.validateBeforeSave);
['virtual', 'attach', 'modify'].filter(prop => options[prop])
.forEach(
prop => {
if (typeof options[prop] !== 'object') {
options[prop] = {};
}
options[prop].properties = options[prop].properties || {};
options[prop].only = options[prop].only || Object.keys(options[prop].properties);
}
);
if (options.modify) {
delete options.modify.properties;
options.modify.on = options.modify.on || [];
setDefaults(options.modify.on);
}
if (options.attach) {
Object.keys(options.attach.properties).forEach(property => {
options.attach.properties[property].on = options.attach.properties[property].on || [];
setDefaults(options.attach.properties[property].on);
});
}
return options;
}
|
javascript
|
{
"resource": ""
}
|
q11754
|
parse
|
train
|
function parse(options) {
var settings = _.extend({
error: true
}, options);
return map(function (chunk, next) {
var parsed;
try {
parsed = JSON.parse(chunk);
} catch (e) {
if (settings.error === true) {
return next(e);
}
return next();
}
return next(null, parsed);
});
}
|
javascript
|
{
"resource": ""
}
|
q11755
|
lerp
|
train
|
function lerp (vX, vY, x) {
if (x < vX[0]) {
// Don't interpolate leftward, Cuellar says this is done on purpose
return vY[0];
}
var i = search(vX, x, function (a, b) {return a - b;});
if (i >= 0) {
// found exact match, no need to interpolate
return vY[i];
} else {
// when not found, binary-search returns the -(index_x_should_be + 1),
// see https://github.com/darkskyapp/binary-search/issues/1
i = Math.abs(i + 1);
if (i >= vX.length) {
// extrapolate using last 2 values
i = vX.length - 1;
}
var y0 = vY[i - 1];
var y1 = vY[i];
var x0 = vX[i - 1];
var x1 = vX[i];
return y0 + (y1 - y0) * (x - x0) / (x1 - x0);
}
}
|
javascript
|
{
"resource": ""
}
|
q11756
|
eliminate
|
train
|
async function eliminate(dir) {
const stat = await lstat(dir);
if (!stat.isDirectory()) {
await unlink(dir);
return;
}
const files = await readdir(dir);
for (const file of files) {
const path = join(dir, file);
if (fs.existsSync(path)) {
await eliminate(path);
} else {
await unlink(path);
}
}
await rmdir(dir);
}
|
javascript
|
{
"resource": ""
}
|
q11757
|
JSXAttribute
|
train
|
function JSXAttribute(node, print) {
print.plain(node.name);
if (node.value) {
this.push("=");
print.plain(node.value);
}
}
|
javascript
|
{
"resource": ""
}
|
q11758
|
JSXElement
|
train
|
function JSXElement(node, print) {
var open = node.openingElement;
print.plain(open);
if (open.selfClosing) return;
this.indent();
var _arr = node.children;
for (var _i = 0; _i < _arr.length; _i++) {
var child = _arr[_i];
if (t.isLiteral(child)) {
this.push(child.value, true);
} else {
print.plain(child);
}
}
this.dedent();
print.plain(node.closingElement);
}
|
javascript
|
{
"resource": ""
}
|
q11759
|
JSXOpeningElement
|
train
|
function JSXOpeningElement(node, print) {
this.push("<");
print.plain(node.name);
if (node.attributes.length > 0) {
this.push(" ");
print.join(node.attributes, { separator: " " });
}
this.push(node.selfClosing ? " />" : ">");
}
|
javascript
|
{
"resource": ""
}
|
q11760
|
BlockStatement
|
train
|
function BlockStatement(node, print) {
this.push("{");
if (node.body.length) {
this.newline();
print.sequence(node.body, { indent: true });
if (!this.format.retainLines) this.removeLast("\n");
this.rightBrace();
} else {
print.printInnerComments();
this.push("}");
}
}
|
javascript
|
{
"resource": ""
}
|
q11761
|
getTransformer
|
train
|
function getTransformer(name) {
if (name in transformers) {
return transformers[name]
}
const transformer = toTransformer(name)
transformers[name] = transformer ? jstransformer(transformer) : false
return transformers[name]
}
|
javascript
|
{
"resource": ""
}
|
q11762
|
renderPartial
|
train
|
function renderPartial(name) {
// The name is a required input.
if (!name) {
throw new Error('When calling .partial(), name is required.')
}
// Ensure the partial is available in the metadata.
if (!(name in metalsmith.metadata().partials)) {
throw new Error('The partial "' + name + '" was not found.')
}
// Construct the partial function arguments.
const fnarray = []
for (let i = 1; i < arguments.length; i++) {
fnarray.push(arguments[i])
}
// Call the partial function with the given array arguments.
return metalsmith.metadata().partials[name].apply(metalsmith.metadata(), fnarray)
}
|
javascript
|
{
"resource": ""
}
|
q11763
|
filterFile
|
train
|
function filterFile(file, done) {
if (files[file].partial) {
// Discover whether it is explicitly declared as a partial.
return done(null, files[file].partial)
} else if (opts.pattern) {
// Check if it matches the partial pattern.
return done(null, minimatch(file, opts.pattern))
}
// The file is not a partial.
done(null, false)
}
|
javascript
|
{
"resource": ""
}
|
q11764
|
addPartial
|
train
|
function addPartial(filename, done) {
// Create a copy of the file and delete it from the database.
const file = clone(files[filename])
delete files[filename]
// Compile the partial.
const info = path.parse(filename)
const transform = info.ext ? info.ext.substring(1) : null
const transformer = getTransformer(transform)
if (transformer) {
// Construct the options.
const options = extend({}, metalsmith.metadata(), file, {
filename: path.join(metalsmith.source(), filename)
})
// Compile the partial.
transformer.compileAsync(file.contents.toString(), options).then(template => {
/**
* Define the partial as a function.
*/
metalsmith.metadata().partials[info.name] = locals => {
const opt = extend({}, options, locals)
return template.fn.apply(file, [opt])
}
metalsmith.metadata().partials[info.name].file = file
done()
}, done)
} else {
// Do not error out when the Transformer is not found.
done()
}
}
|
javascript
|
{
"resource": ""
}
|
q11765
|
publishRobotsTxt
|
train
|
function publishRobotsTxt () {
var path = 'robots.txt';
return fs
.statAsync(path)
.then(function (pStat) {
if (!pStat.isFile()) {
// go to our default course of action, that is create robots.txt file
throw new Error ();
}
})
.catch(Error, function () {
return fs
.openAsync(path, 'w')
.then(function () {
return fs.writeFileAsync(path, 'User-agent: *\nSitemap: ' + config.siteUrl + '/sitemap.xml');
})
.catch(function (pErr) {
log.error(pErr);
});
});
}
|
javascript
|
{
"resource": ""
}
|
q11766
|
train
|
function () {
var self = this;
return function () {
var status = self.statusCode;
if (status === 0 || (status >= 400 && status < 600)) {
return context.asRejectedPromise(self);
}
return context.asPromise(self);
};
}
|
javascript
|
{
"resource": ""
}
|
|
q11767
|
parseURL
|
train
|
function parseURL(s) {
var url = null;
try {
url = new URL(s);
} catch (error) {
console.error(error);
}
return url;
}
|
javascript
|
{
"resource": ""
}
|
q11768
|
firstObj
|
train
|
function firstObj(stream, onEnd) {
var data;
/**
* Send the correct data to the onEnd callback.
*
* @private
* @param {Error} [err] - Optional error.
* @returns {undefined}
*/
var done = _.once(function (err) {
if (err) {
return onEnd(err);
}
return onEnd(null, data);
});
stream.once('data', function (chunk) {
data = chunk;
});
stream.on('error', done);
stream.on('end', done);
}
|
javascript
|
{
"resource": ""
}
|
q11769
|
first
|
train
|
function first(stream, onEnd) {
firstObj(stream, function (err, data) {
if (err) {
return onEnd(err);
}
return onEnd(null, new Buffer(data));
});
}
|
javascript
|
{
"resource": ""
}
|
q11770
|
firstJson
|
train
|
function firstJson(stream, onEnd) {
first(stream, function (err, data) {
if (err) {
return onEnd(err);
}
return parse(data, onEnd);
});
}
|
javascript
|
{
"resource": ""
}
|
q11771
|
parallel
|
train
|
function parallel (promiseFunctions, limit = Infinity, silenceErrors = false) {
const contraMethod = (tasks, callback) => concurrent(tasks, limit, callback)
return generatePromise(promiseFunctions, contraMethod, silenceErrors)
}
|
javascript
|
{
"resource": ""
}
|
q11772
|
generatePromise
|
train
|
function generatePromise (promiseFunctions, contraMethod, silenceErrors) {
return new Promise((resolve, reject) => {
// Generate a function that executes the promise function and
// calls back in a way that the contra library requires
for (let i in promiseFunctions) {
let promiseFunction = promiseFunctions[i]
if (!isFunction(promiseFunction)) {
return reject(new Error('One of the supplied promise functions is not a function'))
}
promiseFunctions[i] = (contraCallback) => {
promiseFunction()
.then(data => contraCallback(null, data))
.catch(err => silenceErrors ? contraCallback(null, null) : contraCallback(err))
}
}
contraMethod(promiseFunctions, (err, results) => {
if (err) return reject(err)
resolve(results)
})
})
}
|
javascript
|
{
"resource": ""
}
|
q11773
|
createSeekPathSrc
|
train
|
function createSeekPathSrc(target) {
return (
'"use strict";\n' + //go fast sauce!
target.map(function (e, i) {
return ' var k'+i+' = Buffer.from('+ JSON.stringify(e) +');' //strings only!
}).join('\n') + '\n'+
" return function (buffer, start) {\n"+
target.map(function (_, i) {
return " start = seekKey(buffer, start, k"+i+")"
}).join('\n') + '\n' +
' return start;\n'+
'}\n'
)
}
|
javascript
|
{
"resource": ""
}
|
q11774
|
Module
|
train
|
function Module(moduleId, ctx, combineConfig) {
this.moduleId = moduleId;
this.compiler = ctx;
this.combineConfig = combineConfig || ctx.getCombineConfig(moduleId);
this.definition = null;
this.prepare();
}
|
javascript
|
{
"resource": ""
}
|
q11775
|
train
|
function(files) {
var collection = {};
files.forEach(function(file) {
var filename = file.name;
if (!collection.hasOwnProperty(filename) || collection[filename].version < file.version) {
collection[filename] = file;
}
});
return Object.keys(collection).map(function(name) {
return collection[name];
});
}
|
javascript
|
{
"resource": ""
}
|
|
q11776
|
plugin
|
train
|
function plugin(options) {
return function(files, metalsmith, done) {
Object.keys(files).forEach(function(file) {
var data = files[file];
data.contents = new Buffer(special(data.contents.toString()));
});
done();
}
}
|
javascript
|
{
"resource": ""
}
|
q11777
|
KevoreeCore
|
train
|
function KevoreeCore(resolver, kevscript, loggerFactory) {
if (!resolver || !kevscript || !loggerFactory) {
throw new Error('KevoreeCore constructor needs: Resolver, KevScript engine and a LoggerFactory');
}
this.resolver = resolver;
this.loggerFactory = loggerFactory;
this.log = loggerFactory.create('Core');
this.kevs = kevscript;
this.stopping = false;
this.currentModel = null;
this.deployModel = null;
this.nodeName = null;
this.nodeInstance = null;
this.firstBoot = true;
this.scriptQueue = [];
this.emitter = new EventEmitter();
}
|
javascript
|
{
"resource": ""
}
|
q11778
|
filter
|
train
|
function filter(condition) {
var cb = makeAsync(condition, 2);
return through.obj(function (chunk, enc, next) {
cb(chunk, function (err, keep) {
if (err) {
return next(err);
}
if (keep) {
return next(null, chunk);
}
return next();
});
});
}
|
javascript
|
{
"resource": ""
}
|
q11779
|
fromArray
|
train
|
function fromArray(source) {
var data;
if (!Array.isArray(source)) {
throw new TypeError('Expected `source` to be an array.');
}
// Copy the source array so we can modify it at will
data = source.slice();
return new Readable({
objectMode: true,
read: function () {
if (data.length > 0) {
this.push(data.shift());
} else {
this.push(null);
}
}
});
}
|
javascript
|
{
"resource": ""
}
|
q11780
|
Delegate
|
train
|
function Delegate(subscribed, unsubscribed) {
var handlers = [];
function callable(handler) {
if (arguments.length !== 1) {
throw new Error('Delegate takes exactly 1 argument (' + arguments.length + ' given)');
} else if (typeof handler !== 'function') {
throw new Error('Delegate argument must be a Function object (got ' + typeof handler + ')');
}
// Add the handler
handlers.push(handler);
// Allow custom logic on subscribe, passing in the handler
var subscribedResult;
if (subscribed) {
subscribedResult = subscribed(handler);
}
// Return the unsubscribe function
return function unsubscribe() {
var initialHandler = util.removeLast(handlers, handler);
// Allow custom logic on unsubscribe, passing in the original handler
if (unsubscribed) {
unsubscribed(initialHandler, subscribedResult);
}
// Return the original handler
return initialHandler;
};
}
callable.invoke = function invoke() {
var args = arguments;
util.forEach(handlers, function (handler) {
handler.apply(null, args);
});
};
// Expose handlers for inspection
callable.handlers = handlers;
return callable;
}
|
javascript
|
{
"resource": ""
}
|
q11781
|
_decompressName
|
train
|
function _decompressName(buf, offset) {
var name = '';
var bytes = 0;
var octet = buf.readUInt8(offset + bytes);
bytes += 1;
// The name is made up of a variable number of labels. Each label begins
// with a length octet. The string of labels is ended by a zero length octet.
while (octet) {
var label = null;
// The first 2-bits of the octet are flags indicating if this is a length
// value or a label pointer to elsewhere in the packet.
var flags = octet & 0xc0;
// If both the top 2-bits of the octet are set, then this is an offset pointer.
if (flags === 0xc0) {
// NOTE: The number of bytes parsed was already incremented. We need
// to re-read that first byte to incorporate it into our pointer
// value. Therefore subtract one from the offset and only add
// one additional byte to the parsed count.
var pointer = buf.readUInt16BE(offset + bytes - 1) & 0x3fff;
bytes += 1;
// NOTE: This will only work if the start of the buffer corresponds to
// the start of the packet. If the packet is embedded in a larger
// buffer then we need to pass through the offset to the start of
// the packet.
var res = _decompressName(buf, pointer);
if (res.error) {
return res;
}
label = res.name;
if (!label) {
return {bytesRead: bytes, name: name}
}
// Once a pointer is used the name section is complete. We do not need
// to keep looking for a zero length octet. Note there is some logic at
// the end of the loop we still want to execute, so simply set octet to
// zero to terminate the loop instead of breaking.
octet = 0;
// If neither of the bits are set then the name is stored inline in the
// following bytes. The name length is defined by the lower 6-bits of the
// octet.
} else if (flags === 0) {
var length = octet & 0x3f;
if (offset + bytes + length > buf.length) {
return {
error: new Error('Name label too large to fit in remaining packet ' +
'bytes.')
};
}
label = buf.toString('ascii', offset + bytes, offset + bytes + length);
bytes += length;
// Look for the next label's length octet
octet = buf.readUInt8(offset + bytes);
bytes += 1;
// Any other values are undefined, so throw an error.
} else {
return {
error: new Error('Label length octet at offset [' +
(offset + bytes - 1) + '] has unexpected top 2-bits ' +
'of [' + flags + ']; should be [' + 0xc0 + '] or ' +
'[0].')
};
}
// Append to the last parsed label to the name. Separate labels with
// periods.
if (name.length > 0) {
name += '.';
}
name += label;
}
return {bytesRead: bytes, name: name};
}
|
javascript
|
{
"resource": ""
}
|
q11782
|
_decodeName
|
train
|
function _decodeName (name) {
var encoded = name;
var periodIndex = name.indexOf('.');
if (periodIndex > -1) {
encoded = name.slice(0, periodIndex);
}
var decoded = '';
var suffix = 0;
var charValue = 0;;
for (var i = 0, n = encoded.length; i < n; ++i) {
// decode char to first nibble
if (i % 2 === 0) {
charValue = (encoded.charCodeAt(i) - 'A'.charCodeAt(0)) << 4;
// decore char to second nibble and then combine with first nibble
} else {
charValue += encoded.charCodeAt(i) - 'A'.charCodeAt(0);
// Append the newly decoded character for the first 15 bytes
if (i < (encoded.length - 1)) {
decoded += String.fromCharCode(charValue);
// The last byte is reserved by convention as the suffix or type
} else {
suffix = charValue;
}
}
}
// NetBIOS names are always space padded out to 15 characters
decoded = decoded.trim();
// If there was a scope identifier (domain name) after the NetBIOS name
// then re-append it to the newly decoded name.
if (periodIndex > -1) {
decoded += name.slice(periodIndex);
}
return {fqdn: decoded, suffix: suffix};
}
|
javascript
|
{
"resource": ""
}
|
q11783
|
fromPromise
|
train
|
function fromPromise(source) {
var callCount = 0;
// Throw an error if the source is not a promise
if (
typeof source !== 'object' ||
source === null ||
typeof source.then !== 'function'
) {
throw new TypeError('Expected `source` to be a promise.');
}
return new Readable({
objectMode: true,
read: function () {
var self = this;
// Increment the number of calls so we know when to push null
callCount += 1;
// Listen to the promise and push results if this is the first call
if (callCount === 1) {
source.then(
function onResolve(data) {
self.push(data);
},
function onReject(err) {
process.nextTick(function () {
self.emit('error', err);
});
}
);
return;
}
// End the stream
self.push(null);
}
});
}
|
javascript
|
{
"resource": ""
}
|
q11784
|
wait
|
train
|
function wait(type) {
var that = this,
config = this.asyncConfig;
if (type !== 'series') {
type = 'parallel';
}
// Indicate we're going to have to wait
config.async = type;
function next(err) {
config.err = err;
config.ListenerIsDone = true;
if (config.ListenerCallback) {
config.ListenerCallback(err);
}
};
return next;
}
|
javascript
|
{
"resource": ""
}
|
q11785
|
parse
|
train
|
function parse (str) {
if (Buffer.isBuffer(str)) str = str.toString();
type('str', str, 'String');
try {
return JSON.parse(str);
} catch (ex) {
ex.message = 'Unable to parse JSON: ' + ex.message + '\nAttempted to parse: ' + str;
throw ex;
}
}
|
javascript
|
{
"resource": ""
}
|
q11786
|
train
|
function (obj) {
obj = obj || {};
var exts = Array.prototype.slice.call(arguments, 1);
for (var k=0; k<exts.length; k++) {
if (exts[k]) {
for (var i in exts[k]) {
if (exts[k].hasOwnProperty(i)) {
obj[i] = exts[k][i];
}
}
}
}
return obj;
}
|
javascript
|
{
"resource": ""
}
|
|
q11787
|
train
|
function (time, n) {
time = Number(time) || 0;
n = n || 2;
if (n === 2) {
time = time < 10 ? time + '0' : time;
} else if (n === 3) {
time = time < 10
? time + '00'
: time < 100
? time + '0'
: time;
}
return String(time);
}
|
javascript
|
{
"resource": ""
}
|
|
q11788
|
train
|
function (level) {
if (!level || typeof level !== 'object') {
throw new Error('The first parameter must be an object describing the level.');
}
if (typeof level.name !== 'string' || !level.name.length) {
throw new Error('The level object must have a name property.');
}
return extend({
name: null,
priority: 3,
color: null
}, level);
}
|
javascript
|
{
"resource": ""
}
|
|
q11789
|
IssueFinder
|
train
|
function IssueFinder(options) {
this.octo = options.octo;
this.repo = options.repo;
this.owner = options.owner;
assert(this.octo, 'No octokat instance provided!');
}
|
javascript
|
{
"resource": ""
}
|
q11790
|
_setupSerialConnection
|
train
|
function _setupSerialConnection() {
var port = availablePorts[0];
debug.log('Trying to connect to Davis VUE via port: ' + port);
// Open serial port connection
var sp = new serialPort(port, config.serialPort);
var received = '';
sp.on('open', function () {
debug.log('Serial connection established, waking up device.');
sp.write('\n', function(err) {
if (err) {
return constructor.emit('Error on write: ', err.message);
}
});
sp.on('data', function (data) {
if (!deviceAwake){
if (data.toString() === '\n\r'){
debug.log('Device is awake');
serialPortUsed = port;
constructor.emit('connected', port);
sp.write('LOOP 1\n');
return;
}
}
debug.log("Received data, length:" + data.length);
if (data.length == 100){
// remove ack
data = data.slice(1);
}
var parsedData = parsePacket(data);
constructor.emit('data', parsedData);
setTimeout(function () {
sp.write('LOOP 1\n');
}, 2000);
});
});
sp.on('error', function (error) {
constructor.emit('error', error);
// Reject this port if we haven't found the correct port yet
if (!serialPortUsed) {
_tryNextSerialPort();
}
});
sp.on('close', function () {
deviceAwake = false;
constructor.emit('close');
});
}
|
javascript
|
{
"resource": ""
}
|
q11791
|
doSign
|
train
|
function doSign(params, xKey, message, generateHash) {
// Let H be the hashing function and m the message:
// Generate a random per-message value k where 1<k<q
// Calculate r = (g^k mod p) mod q
// In the unlikely case that r=0, start again with a different random k
// Calculate s = k^(-1) * ( H (m) + xr) mod q
// In the unlikely case that s=0, start again with a different random k
// The signature is (r,s)
// The first two steps amount to creating a new per-message key. The modular exponentiation here is the most
// computationally expensive part of the signing operation, and it may be computed before the message hash is known.
// The modular inverse k^(-1) mod q is the second most expensive part, and it may also be computed before the
// message hash is known. It may be computed using the extended Euclidean algorithm or using Fermat's little theorem
// as k^(q-2) mod q .
let hash = generateHash(message);
return hash.map(h => {
let done = false;
let r = 0;
let s = 0;
do {
let k = 0;
let kInverse = 0;
do {
// Generate a random per-message value k where 1<k<q
k = params.q - Math.round(Math.random() * params.q);
// Calculate r = (g^k mod p) mod q
r = myNumbers.modPow(params.g, k, params.p) % params.q;
// Calculate s = (k^(-1) * ( H (m) + xKey * r)) mod q
// (k^(-1) x) mod q -- if q is prime --> x^(q-2) mod q
if (r) kInverse = myNumbers.mInverse(k, params.q);
} while (r === 0 || kInverse === 0); // In the unlikely case that r=0, start again with a different random k
// Calculate s = (k^(-1) * ( H (m) + xKey * r)) mod q
s = ( h + 1 + xKey * r ) * kInverse % params.q;
if (s) done = true;
} while (!done);
return {r: r, s: s}
});
}
|
javascript
|
{
"resource": ""
}
|
q11792
|
verify
|
train
|
function verify(params, yKey, sign, message, generateHash) {
// Reject the signature if 0<r<q or 0<s<q is not satisfied.
// Calculate w = s ^ (-1) mod q
// Calculate u_1 = H(m) * w mod q
// Calculate u_2 = r * w mod q
// Calculate v = ( (g^u_1 * y^u_2) mod p) mod q
// The signature is valid if and only if v = r
const hash = generateHash(message);
if (sign.length !== hash.length) return false;
return sign.every(({r, s}, i) => {
if (!(0 < r && r < params.q && 0 < s && s < params.q)) return false;
let w = myNumbers.mInverse(s, params.q);
let u2 = r * w % params.q;
let keyU2Mod = myNumbers.modPow(yKey, u2, params.p);
let h = hash[i];
if (h === null) return false;
let u1 = (h + 1) * w % params.q;
let v = ((myNumbers.modPow(params.g, u1, params.p) * keyU2Mod) % params.p) % params.q;
return v === r;
});
}
|
javascript
|
{
"resource": ""
}
|
q11793
|
createAggregator
|
train
|
function createAggregator(setter, initializer) {
return function(collection, iteratee) {
var func = isArray_1(collection) ? _arrayAggregator : _baseAggregator,
accumulator = initializer ? initializer() : {};
return func(collection, setter, _baseIteratee(iteratee, 2), accumulator);
};
}
|
javascript
|
{
"resource": ""
}
|
q11794
|
stringJSON2YAML
|
train
|
function stringJSON2YAML (doc, compact = false) {
if (!doc || (typeof doc !== 'string' && typeof doc !== 'object')) {
throw new TypeError('Argument must be a string or object, and not empty: ' + doc)
}
let obj = doc
if (typeof doc === 'string') {
obj = JSON.parse(doc)
}
return yaml.safeDump(obj, compact ? { 'flowLevel': 0 } : undefined)
}
|
javascript
|
{
"resource": ""
}
|
q11795
|
stringYAML2JSON
|
train
|
function stringYAML2JSON (doc, compact = false) {
if (!doc || typeof doc !== 'string') {
throw new TypeError('Argument must be a string or object, and not empty: ' + doc)
}
let obj = yaml.safeLoad(doc)
if (!obj || typeof obj !== 'object') {
throw new TypeError('Argument must be in yaml format: ' + doc)
}
return JSON.stringify(obj, undefined, compact ? undefined : 2)
}
|
javascript
|
{
"resource": ""
}
|
q11796
|
stringJSON2JSON
|
train
|
function stringJSON2JSON (doc, compact = false) {
if (!doc || (typeof doc !== 'string' && typeof doc !== 'object')) {
throw new TypeError('Argument must be a string or object, and not empty: ' + doc)
}
let obj = doc
if (typeof doc === 'string') {
obj = JSON.parse(doc)
}
return JSON.stringify(obj, undefined, compact ? undefined : 2)
}
|
javascript
|
{
"resource": ""
}
|
q11797
|
stringYAML2YAML
|
train
|
function stringYAML2YAML (doc, compact = false) {
if (!doc || typeof doc !== 'string') {
throw new TypeError('Argument must be a string or object, and not empty: ' + doc)
}
let obj = yaml.safeLoad(doc)
if (!obj || typeof obj !== 'object') {
throw new TypeError('Argument must be in yaml format: ' + doc)
}
return yaml.safeDump(obj, compact ? { 'flowLevel': 0 } : undefined)
}
|
javascript
|
{
"resource": ""
}
|
q11798
|
fuzzyStringCompare
|
train
|
function fuzzyStringCompare(a, b, tolerence) {
if (a == b) return true;
var as = stripNoise(a);
as = as.toLowerCase();
if (as.length > 255) as = as.substr(0, 255);
var bs = stripNoise(b);
bs = bs.toLowerCase();
if (bs.length > 255) bs = bs.substr(0, 255);
if (tolerence == undefined && levenshtein(as, bs) < 10) return true;
if (tolerence && levenshtein(as, bs) <= tolerence) return true;
}
|
javascript
|
{
"resource": ""
}
|
q11799
|
splitAuthor
|
train
|
function splitAuthor(author) {
return author
.split(/\s*[,\.\s]\s*/)
.filter(function(i) { return !!i }) // Strip out blanks
.filter(function(i) { return !/^[0-9]+(st|nd|rd|th)$/.test(i) }); // Strip out decendent numerics (e.g. '1st', '23rd')
}
|
javascript
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.