_id stringlengths 2 6 | title stringlengths 0 58 | partition stringclasses 3 values | text stringlengths 52 373k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q28100 | binl2rstr | train | function binl2rstr(input) {
var i, output = '',
l = input.length * 32;
for (i = 0; i < l; i += 8) {
output += String.fromCharCode((input[i >> 5] >>> (i % 32)) & 0xFF);
}
return output;
} | javascript | {
"resource": ""
} |
q28101 | binl | train | function binl(x, len) {
var T, j, i, l,
h0 = 0x67452301,
h1 = 0xefcdab89,
h2 = 0x98badcfe,
h3 = 0x10325476,
h4 = 0xc3d2e1f0,
A1, B1, C1, D1, E1,
A2, B2, C2, D2, E2;
/* append padding */
x[len >> 5] |= 0x80 << (len % 32);
x[(((len + 64) >>> 9) << 4) + 14] = len;
l = x.length;
for (i = 0; i < l; i += 16) {
A1 = A2 = h0;
B1 = B2 = h1;
C1 = C2 = h2;
D1 = D2 = h3;
E1 = E2 = h4;
for (j = 0; j <= 79; j += 1) {
T = safe_add(A1, rmd160_f(j, B1, C1, D1));
T = safe_add(T, x[i + rmd160_r1[j]]);
T = safe_add(T, rmd160_K1(j));
T = safe_add(bit_rol(T, rmd160_s1[j]), E1);
A1 = E1;
E1 = D1;
D1 = bit_rol(C1, 10);
C1 = B1;
B1 = T;
T = safe_add(A2, rmd160_f(79 - j, B2, C2, D2));
T = safe_add(T, x[i + rmd160_r2[j]]);
T = safe_add(T, rmd160_K2(j));
T = safe_add(bit_rol(T, rmd160_s2[j]), E2);
A2 = E2;
E2 = D2;
D2 = bit_rol(C2, 10);
C2 = B2;
B2 = T;
}
T = safe_add(h1, safe_add(C1, D2));
h1 = safe_add(h2, safe_add(D1, E2));
h2 = safe_add(h3, safe_add(E1, A2));
h3 = safe_add(h4, safe_add(A1, B2));
h4 = safe_add(h0, safe_add(B1, C2));
h0 = T;
}
return [h0, h1, h2, h3, h4];
} | javascript | {
"resource": ""
} |
q28102 | rmd160_f | train | function rmd160_f(j, x, y, z) {
return (0 <= j && j <= 15) ? (x ^ y ^ z) :
(16 <= j && j <= 31) ? (x & y) | (~x & z) :
(32 <= j && j <= 47) ? (x | ~y) ^ z :
(48 <= j && j <= 63) ? (x & z) | (y & ~z) :
(64 <= j && j <= 79) ? x ^ (y | ~z) :
'rmd160_f: j out of range';
} | javascript | {
"resource": ""
} |
q28103 | purgeADQueue | train | function purgeADQueue (channel, connectionName, options, messages) {
const name = options.uniqueName || options.name;
return new Promise(function (resolve, reject) {
const messageCount = messages.messages.length;
if (messageCount > 0) {
log.info(`Purge operation for queue '${options.name}' on '${connectionName}' is waiting for resolution on ${messageCount} messages`);
messages.once('empty', function () {
channel.purgeQueue(name)
.then(
result => resolve(result.messageCount),
reject
);
});
} else {
channel.purgeQueue(name)
.then(
result => resolve(result.messageCount),
reject
);
}
});
} | javascript | {
"resource": ""
} |
q28104 | replaceDecls | train | function replaceDecls(originalRules, criticalRule){
// find all the rules in the original CSS that have the same selectors and
// then create an array of all the associated declarations. Note that this
// works with mutiple duplicate selectors on the original CSS
var originalDecls = _.flatten(
originalRules
.filter(function(rule){
return _.isEqual(rule.selectors, criticalRule.selectors);
})
.map(function(rule){
return rule.declarations;
})
);
// take all the declarations that were found from the original CSS and use
// them here, make sure that we de-dup any declarations from the original CSS
criticalRule.declarations =
_.uniqBy(criticalRule.declarations.concat(originalDecls), function(decl){
return decl.property + ":" + decl.value;
});
return criticalRule;
} | javascript | {
"resource": ""
} |
q28105 | inferColumns | train | function inferColumns(rows) {
var columnSet = Object.create(null),
columns = [];
rows.forEach(function(row) {
for (var column in row) {
if (!(column in columnSet)) {
columns.push(columnSet[column] = column);
}
}
});
return columns;
} | javascript | {
"resource": ""
} |
q28106 | filterJsDocComments | train | function filterJsDocComments(jsDocComments) {
const swaggerJsDocComments = [];
for (let i = 0; i < jsDocComments.length; i += 1) {
const jsDocComment = jsDocComments[i];
for (let j = 0; j < jsDocComment.tags.length; j += 1) {
const tag = jsDocComment.tags[j];
if (tag.title === 'swagger') {
swaggerJsDocComments.push(jsYaml.safeLoad(tag.description));
}
}
}
return swaggerJsDocComments;
} | javascript | {
"resource": ""
} |
q28107 | convertGlobPaths | train | function convertGlobPaths(globs) {
return globs
.map(globString => glob.sync(globString))
.reduce((previous, current) => previous.concat(current), []);
} | javascript | {
"resource": ""
} |
q28108 | createSpecification | train | function createSpecification(swaggerDefinition, apis, fileName) {
// Options for the swagger docs
const options = {
// Import swaggerDefinitions
swaggerDefinition,
// Path to the API docs
apis,
};
// Initialize swagger-jsdoc -> returns validated JSON or YAML swagger spec
let swaggerSpec;
const ext = path.extname(fileName);
if (ext === '.yml' || ext === '.yaml') {
swaggerSpec = jsYaml.dump(swaggerJSDoc(options), {
schema: jsYaml.JSON_SCHEMA,
noRefs: true,
});
} else {
swaggerSpec = JSON.stringify(swaggerJSDoc(options), null, 2);
}
fs.writeFile(fileName, swaggerSpec, err => {
if (err) {
throw err;
}
console.log('Swagger specification is ready.');
});
} | javascript | {
"resource": ""
} |
q28109 | loadSpecification | train | function loadSpecification(defPath, data) {
const resolvedPath = path.resolve(defPath);
const extName = path.extname(resolvedPath);
const loader = LOADERS[extName];
// Check whether the definition file is actually a usable file
if (loader === undefined) {
throw new Error('Definition file should be .js, .json, .yml or .yaml');
}
const swaggerDefinition = loader(data, resolvedPath);
return swaggerDefinition;
} | javascript | {
"resource": ""
} |
q28110 | hasEmptyProperty | train | function hasEmptyProperty(obj) {
return Object.keys(obj)
.map(key => obj[key])
.every(
keyObject =>
typeof keyObject === 'object' &&
Object.keys(keyObject).every(key => !(key in keyObject))
);
} | javascript | {
"resource": ""
} |
q28111 | parseApiFile | train | function parseApiFile(file) {
const jsDocRegex = /\/\*\*([\s\S]*?)\*\//gm;
const fileContent = fs.readFileSync(file, { encoding: 'utf8' });
const ext = path.extname(file);
const yaml = [];
const jsDocComments = [];
if (ext === '.yaml' || ext === '.yml') {
yaml.push(jsYaml.safeLoad(fileContent));
} else {
const regexResults = fileContent.match(jsDocRegex);
if (regexResults) {
for (let i = 0; i < regexResults.length; i += 1) {
const jsDocComment = doctrine.parse(regexResults[i], { unwrap: true });
jsDocComments.push(jsDocComment);
}
}
}
return {
yaml,
jsdoc: jsDocComments,
};
} | javascript | {
"resource": ""
} |
q28112 | _monkeypatch | train | function _monkeypatch(filePath, monkeyPatched, processor, complete) {
async.waterfall(
[
function read(next) {
fs.readFile(filePath, 'utf8', next);
},
// TODO - need to parse gyp file - this is a bit hacker
function monkeypatch(content, next) {
if (monkeyPatched(content)) return complete();
_log('monkey patch %s', filePath);
processor(content, next);
},
function write(content, next) {
fs.writeFile(filePath, content, 'utf8', function(err) {
return next(err);
});
},
],
complete,
);
} | javascript | {
"resource": ""
} |
q28113 | _log | train | function _log() {
var args = Array.prototype.slice.call(arguments, 0),
level = args.shift();
if (!~['log', 'error', 'warn'].indexOf(level)) {
args.unshift(level);
level = 'log';
}
if (level == 'log') {
args[0] = '----> ' + args[0];
} else if (level == 'error') {
args[0] = '....> ' + colors.red('ERROR: ') + args[0];
} else if (level == 'warn') {
args[0] = '....> ' + colors.yellow('WARNING: ') + args[0];
}
console[level].apply(console, args);
} | javascript | {
"resource": ""
} |
q28114 | embed | train | function embed(resourceFiles, resourceRoot) {
if (resourceFiles.length > 0) {
let buffer = 'var embeddedFiles = {\n';
for (let i = 0; i < resourceFiles.length; ++i) {
buffer +=
JSON.stringify(path.relative(resourceRoot, resourceFiles[i])) + ': "';
buffer += encode(resourceFiles[i]) + '",\n';
}
buffer +=
'\n};\n\nmodule.exports.keys = function () { return Object.keys(embeddedFiles); }\n\nmodule.exports.get = ';
buffer += accessor.toString();
return buffer;
}
} | javascript | {
"resource": ""
} |
q28115 | checkOpts | train | function checkOpts(next) {
/* failsafe */
if (options === undefined) {
_log('error', 'no options given to .compile()');
process.exit();
}
/**
* Have we been given a custom flag for python executable?
**/
if (
options.python !== 'python' &&
options.python !== '' &&
options.python !== undefined
) {
if (isWin) {
isPy = options.python.replace(/\//gm, '\\'); // use windows file paths, batch is sensitive.
} else {
isPy = options.python;
}
_log('set python as ' + isPy);
} else {
isPy = 'python';
}
// remove dots
options.framework = options.framework.replace(/\./g, '');
// set outter-scope framework variable.
framework = options.framework;
_log('framework => ' + framework);
version = options.nodeVersion; // better framework vc
// check iojs version
if (framework === 'iojs' && version === 'latest') {
_log('fetching iojs versions');
mkdirp(options.nodeTempDir); // make temp dir, probably repetive.
// create write stream so we have control over events
var output = fs.createWriteStream(
path.join(options.nodeTempDir, 'iojs-versions.json'),
);
request.get('https://iojs.org/dist/index.json').pipe(output);
output.on('close', function() {
_log('done');
var f = fs.readFileSync(
path.join(options.nodeTempDir, 'iojs-versions.json'),
);
f = JSON.parse(f);
version = f[0].version.replace('v', '');
_log('iojs latest => ' + version);
// continue down along the async road
next();
});
} else {
next();
}
} | javascript | {
"resource": ""
} |
q28116 | downloadNode | train | function downloadNode(next) {
_downloadNode(
version,
options.nodeTempDir,
options.nodeConfigureArgs,
options.nodeMakeArgs,
options.nodeVCBuildArgs,
next,
);
} | javascript | {
"resource": ""
} |
q28117 | embedResources | train | function embedResources(nc, next) {
nodeCompiler = nc;
options.resourceFiles = options.resourceFiles || [];
options.resourceRoot = options.resourceRoot || '';
if (!Array.isArray(options.resourceFiles)) {
throw new Error('Bad Argument: resourceFiles is not an array');
}
const resourcesBuffer = embed(
options.resourceFiles,
options.resourceRoot,
);
if (resourcesBuffer != null) {
const resourcePath = path.join(nodeCompiler.dir, 'lib', 'nexeres.js');
// write nexeres.js
_log('embedResources %s', options.resourceFiles);
_log('resource -> %s', resourcePath);
fs.writeFile(resourcePath, resourcesBuffer, next);
} else {
next();
}
} | javascript | {
"resource": ""
} |
q28118 | combineProject | train | function combineProject(next) {
if (options.noBundle) {
_log(
'using provided bundle %s since noBundle is true',
options.input,
);
const source = fs.readFileSync(options.input, 'utf8');
const thirdPartyMain = `
if (!process.send) {
console.log('toine', global.process.argv);
process.argv.splice(1, 0, 'nexe.js');
}
const Module = require('module');
const initModule = new Module(process.execPath, null);
initModule.paths = Module._nodeModulePaths(process.cwd());
return initModule._compile(${JSON.stringify(source)}, process.execPath);
`;
fs.writeFileSync(
path.join(nodeCompiler.dir, 'lib', '_third_party_main.js'),
thirdPartyMain,
);
next();
} else {
_log('bundle %s', options.input);
bundle(options.input, nodeCompiler.dir, options, next);
}
} | javascript | {
"resource": ""
} |
q28119 | cleanUpOldExecutable | train | function cleanUpOldExecutable(next) {
fs.unlink(nodeCompiler.releasePath, function(err) {
if (err) {
if (err.code === 'ENOENT') {
next();
} else {
throw err;
}
} else {
next();
}
});
} | javascript | {
"resource": ""
} |
q28120 | checkThatExecutableExists | train | function checkThatExecutableExists(next) {
fs.exists(nodeCompiler.releasePath, function(exists) {
if (!exists) {
_log(
'error',
'The release executable has not been generated. ' +
'This indicates a failure in the build process. ' +
'There is likely additional information above.',
);
process.exit(1);
} else {
next();
}
});
} | javascript | {
"resource": ""
} |
q28121 | copyBinaryToOutput | train | function copyBinaryToOutput(next) {
_log('cp %s %s', nodeCompiler.releasePath, options.output);
ncp(nodeCompiler.releasePath, options.output, function(err) {
if (err) {
_log('error', "Couldn't copy binary.");
throw err; // dump raw error object
}
_log('copied');
next();
});
} | javascript | {
"resource": ""
} |
q28122 | downloadNode | train | function downloadNode(next) {
if (fs.existsSync(nodeFilePath)) return next();
var uri = framework;
if (framework === 'node') {
uri = 'nodejs'; // if node, use nodejs uri
} else if (framework === 'nodejs') {
framework = 'node'; // support nodejs, and node, as framework.
}
var type = global.type;
var url,
prefix = 'https://' + uri + '.org/dist';
if (version === 'latest') {
url = prefix + '/' + framework + '-' + version + '.tar.gz';
} else {
url =
prefix +
'/v' +
version +
'/' +
framework +
'-v' +
version +
'.tar.gz';
}
_log('downloading %s', url);
var output = fs.createWriteStream(nodeFilePath, {
flags: 'w+',
});
// need to set user-agent to bypass some corporate firewalls
var requestOptions = {
url: url,
headers: {
'User-Agent': 'Node.js',
},
};
_logProgress(request(requestOptions)).pipe(output);
output.on('close', function() {
next();
});
} | javascript | {
"resource": ""
} |
q28123 | unzipNodeTarball | train | function unzipNodeTarball(next) {
var onError = function(err) {
console.log(err.stack);
_log('error', 'failed to extract the node source');
process.exit(1);
};
if (isWin) {
_log('extracting the node source [node-tar.gz]');
// tar-stream method w/ gunzip-maybe
var read = fs.createReadStream(nodeFilePath);
var extract = tarstream.extract();
var basedir = nodeFileDir;
if (!fs.existsSync(nodeFileDir)) {
fs.mkdirSync(nodeFileDir);
}
extract.on('entry', function(header, stream, callback) {
// header is the tar header
// stream is the content body (might be an empty stream)
// call next when you are done with this entry
var absolutepath = path.join(basedir, header.name);
if (header.type === 'directory') {
// handle directories.
// console.log('dir:', header.name);
fs.mkdirSync(absolutepath);
return callback();
} else if (header.type === 'file') {
// handle files
// console.log('file:', header.name);
} else {
console.log(header.type + ':', header.name);
_log('warn', 'unhandled type in tar extraction, skipping');
return callback();
}
var write = fs.createWriteStream(absolutepath);
stream.pipe(write);
write.on('close', function() {
return callback();
});
stream.on('error', function(err) {
return onError(err);
});
write.on('error', function(err) {
return onError(err);
});
stream.resume(); // just auto drain the stream
});
extract.on('finish', function() {
_log('extraction finished');
return next();
});
read.pipe(gunzip()).pipe(extract);
} else {
_log('extracting the node source [native tar]');
var cmd = ['tar', '-xf', nodeFilePath, '-C', nodeFileDir];
_log(cmd.join(' '));
var tar = spawn(cmd.shift(), cmd);
tar.stdout.pipe(process.stdout);
tar.stderr.pipe(process.stderr);
tar.on('close', function() {
return next();
});
tar.on('error', onError);
}
} | javascript | {
"resource": ""
} |
q28124 | _loop | train | function _loop(dir) {
/* eventually try every python file */
var pdir = fs.readdirSync(dir);
pdir.forEach(function(v, i) {
var stat = fs.statSync(dir + '/' + v);
if (stat.isFile()) {
// only process Makefiles and .mk targets.
if (v !== 'Makefile' && path.extname(v) !== '.mk') {
return;
}
_log('patching ' + v);
/* patch the file */
var py = fs.readFileSync(dir + '/' + v, {
encoding: 'utf8',
});
py = py.replace(/([a-z]|\/)*python(\w|)/gm, isPy); // this is definently needed
fs.writeFileSync(dir + '/' + v, py, {
encoding: 'utf8',
}); // write to file
} else if (stat.isDirectory()) {
// must be dir?
// skip tests because we don't need them here
if (v !== 'test') {
_loop(dir + '/' + v);
}
}
});
} | javascript | {
"resource": ""
} |
q28125 | _monkeyPatchGyp | train | function _monkeyPatchGyp(compiler, options, complete) {
const hasNexeres = options.resourceFiles.length > 0;
const gypPath = path.join(compiler.dir, 'node.gyp');
let replacementString = "'lib/fs.js', 'lib/_third_party_main.js', ";
if (hasNexeres) {
replacementString += "'lib/nexeres.js', ";
}
_monkeypatch(
gypPath,
function(content) {
return ~content.indexOf('lib/_third_party_main.js');
},
function(content, next) {
content = content.replace("'lib/fs.js',", replacementString);
content = content.replace(
"'deps/node-inspect/lib/internal/inspect_repl.js',",
`'deps/node-inspect/lib/internal/inspect_repl.js',
'google-closure-compiler-js.js',`,
);
next(null, content);
},
complete,
);
} | javascript | {
"resource": ""
} |
q28126 | _monkeyPatchConfigure | train | function _monkeyPatchConfigure(compiler, complete, options) {
var configurePath = path.join(compiler.dir, 'configure.py');
var snapshotPath = options.startupSnapshot;
if (snapshotPath != null) {
_log('monkey patching configure file');
snapshotPath = path.join(process.cwd(), snapshotPath);
return _monkeypatch(
configurePath,
function(content) {
return ~content.indexOf('v8_embed_script');
},
function(content, next) {
next(
null,
content.replace(
'def configure_v8(o):',
`def configure_v8(o):
o['variables']['v8_embed_script'] = r'${snapshotPath}'
o['variables']['v8_warmup_script'] = r'${snapshotPath}'`,
),
);
},
complete,
);
} else {
_log('not patching configure file');
}
return complete();
} | javascript | {
"resource": ""
} |
q28127 | _monkeyPatchMainCc | train | function _monkeyPatchMainCc(compiler, complete) {
let finalContents;
let mainPath = path.join(compiler.dir, 'src', 'node.cc');
let mainC = fs.readFileSync(mainPath, {
encoding: 'utf8',
});
// content split, and original start/end
let constant_loc = 1;
let lines = mainC.split('\n');
let startLine = lines.indexOf(' // TODO use parse opts');
let endLine = lines.indexOf(' option_end_index = i;'); // pre node 0.11.6 compat
let isPatched = lines.indexOf('// NEXE_PATCH_IGNOREFLAGS');
if (isPatched !== -1) {
_log('already patched node.cc');
return complete();
}
/**
* This is the new method of passing the args. Tested on node.js 0.12.5
* and iojs 2.3.1
**/
if (endLine === -1 && startLine === -1) {
// only if the pre-0.12.5 failed.
_log('using the after 0.12.5 method of ignoring flags.');
startLine = lines.indexOf(
" while (index < nargs && argv[index][0] == '-') {",
); // beginning of the function
endLine = lines.indexOf(' // Copy remaining arguments.');
endLine--; // space, then it's at the }
constant_loc = lines.length + 1;
} else {
_log('using 0.10.x > method of ignoring flags');
lines[endLine] = ' option_end_index = 1;';
}
/**
* This is the method for 5.5.0
**/
if (endLine === -1 || startLine === -1) {
_log('using the after 5.5.0 method of ignoring flags.');
startLine = lines.indexOf(
" while (index < nargs && argv[index][0] == '-' && !short_circuit) {",
); // beginning of the function
endLine = lines.indexOf(' // Copy remaining arguments.');
endLine--; // space, then it's at the }
constant_loc = lines.length + 1;
}
// other versions here.
if (endLine === -1 || startLine === -1) {
// failsafe.
_log('error', 'Failed to find a way to patch node.cc to ignoreFlags');
_log('startLine =', startLine, '| endLine =', endLine);
if (!/^1(1|2)\./.test(compiler.version)) {
process.exit(1);
}
}
// check if it's been done
lines[constant_loc] = '// NEXE_PATCH_IGNOREFLAGS';
for (var i = startLine; i < endLine; i++) {
lines[i] = undefined; // set the value to undefined so it's skipped by the join
}
_log('patched node.cc');
finalContents = lines.join('\n');
// write the file contents
fs.writeFile(
mainPath,
finalContents,
{
encoding: 'utf8',
},
function(err) {
if (err) {
_log('error', 'failed to write to', mainPath);
return process.exit(1);
}
return complete();
},
);
} | javascript | {
"resource": ""
} |
q28128 | _getFirstDirectory | train | function _getFirstDirectory(dir) {
var files = glob.sync(dir + '/*');
for (var i = files.length; i--; ) {
var file = files[i];
if (fs.statSync(file).isDirectory()) return file;
}
return false;
} | javascript | {
"resource": ""
} |
q28129 | _logProgress | train | function _logProgress(req) {
req.on('response', function(resp) {
var len = parseInt(resp.headers['content-length'], 10),
bar = new ProgressBar('[:bar]', {
complete: '=',
incomplete: ' ',
total: len,
width: 100, // just use 100
});
req.on('data', function(chunk) {
bar.tick(chunk.length);
});
});
req.on('error', function(err) {
console.log(err);
_log('error', 'failed to download node sources,');
process.exit(1);
});
return req;
} | javascript | {
"resource": ""
} |
q28130 | train | function (packageName) {
let result;
try {
result = require.resolve(packageName, { basedir: process.cwd() });
result = require(result);
} catch (e) {
try {
result = require(packageName);
} catch (e) {
result = undefined;
}
}
return result;
} | javascript | {
"resource": ""
} | |
q28131 | getExamples | train | function getExamples(dirName, callback) {
const example_files = fs.readdirSync(dirName);
const entries = {};
// iterate through the list of files in the directory.
for (const filename of example_files) {
// ooo, javascript file!
if (filename.endsWith('.js')) {
// trim the entry name down to the file without the extension.
const entry_name = filename.split('.')[0];
callback(entry_name, path.join(dirName, filename));
}
}
return entries;
} | javascript | {
"resource": ""
} |
q28132 | getEntries | train | function getEntries(dirName) {
const entries = {};
getExamples(dirName, (entryName, filename) => {
entries[entryName] = filename;
});
return entries;
} | javascript | {
"resource": ""
} |
q28133 | getHtmlTemplates | train | function getHtmlTemplates(dirName) {
const html_conf = [];
// create the array of HTML plugins.
const template = path.join(dirName, '_template.html');
getExamples(dirName, (entryName, filename) => {
html_conf.push(
new HtmlWebpackPlugin({
title: entryName,
// ensure each output has a unique filename
filename: entryName + '.html',
template,
// without specifying chunks, all chunks are
// included with the file.
chunks: ['common', entryName]
})
);
});
return html_conf;
} | javascript | {
"resource": ""
} |
q28134 | train | function() {
return {
title: this.input.val().trim(),
order: window.app.Todos.nextOrder(),
completed: false
};
} | javascript | {
"resource": ""
} | |
q28135 | train | function() {
_.each(window.app.Todos.completed(), function(todo){ todo.destroy(); });
return false;
} | javascript | {
"resource": ""
} | |
q28136 | train | function(feature, minTime, maxTime) {
var featureStringTimes = this._getFeatureTimes(feature);
if (featureStringTimes.length == 0) {
return feature;
}
var featureTimes = [];
for (var i = 0, l = featureStringTimes.length; i < l; i++) {
var time = featureStringTimes[i]
if (typeof time == 'string' || time instanceof String) {
time = Date.parse(time.trim());
}
featureTimes.push(time);
}
if (featureTimes[0] > maxTime || featureTimes[l - 1] < minTime) {
return null;
}
return feature;
} | javascript | {
"resource": ""
} | |
q28137 | generateOptionsResponder | train | function generateOptionsResponder(res, methods) {
return function onDone(fn, err) {
if (err || methods.length === 0) {
return fn(err)
}
trySendOptionsResponse(res, methods, fn)
}
} | javascript | {
"resource": ""
} |
q28138 | mergeParams | train | function mergeParams(params, parent) {
if (typeof parent !== 'object' || !parent) {
return params
}
// make copy of parent for base
var obj = mixin({}, parent)
// simple non-numeric merging
if (!(0 in params) || !(0 in parent)) {
return mixin(obj, params)
}
var i = 0
var o = 0
// determine numeric gap in params
while (i in params) {
i++
}
// determine numeric gap in parent
while (o in parent) {
o++
}
// offset numeric indices in params before merge
for (i--; i >= 0; i--) {
params[i + o] = params[i]
// create holes for the merge when necessary
if (i < o) {
delete params[i]
}
}
return mixin(obj, params)
} | javascript | {
"resource": ""
} |
q28139 | restore | train | function restore(fn, obj) {
var props = new Array(arguments.length - 2)
var vals = new Array(arguments.length - 2)
for (var i = 0; i < props.length; i++) {
props[i] = arguments[i + 2]
vals[i] = obj[props[i]]
}
return function(){
// restore vals
for (var i = 0; i < props.length; i++) {
obj[props[i]] = vals[i]
}
return fn.apply(this, arguments)
}
} | javascript | {
"resource": ""
} |
q28140 | sendOptionsResponse | train | function sendOptionsResponse(res, methods) {
var options = Object.create(null)
// build unique method map
for (var i = 0; i < methods.length; i++) {
options[methods[i]] = true
}
// construct the allow list
var allow = Object.keys(options).sort().join(', ')
// send response
res.setHeader('Allow', allow)
res.setHeader('Content-Length', Buffer.byteLength(allow))
res.setHeader('Content-Type', 'text/plain')
res.setHeader('X-Content-Type-Options', 'nosniff')
res.end(allow)
} | javascript | {
"resource": ""
} |
q28141 | trySendOptionsResponse | train | function trySendOptionsResponse(res, methods, next) {
try {
sendOptionsResponse(res, methods)
} catch (err) {
next(err)
}
} | javascript | {
"resource": ""
} |
q28142 | reduceModifiers | train | function reduceModifiers (previousValue, currentValue) {
return previousValue + currentValue[property].replace(modifierPlaceholder, currentValue.className);
} | javascript | {
"resource": ""
} |
q28143 | jsonSections | train | function jsonSections(sections, block) {
return sections.map(function(section) {
// Temporary inserting of partial
var partial = section;
if (partial.markup() && partial.markup().toString().match(/^[^\n]+\.(html|hbs|pug)$/)) {
partial.file = partial.markup().toString();
partial.name = path.basename(partial.file, path.extname(partial.file));
partial.file = path.dirname(block.filePath) + '/' + partial.file;
partial.markupText = fs.readFileSync(partial.file, 'utf8');
section.markup = function() {
return partial.markupText;
};
}
return {
header: generateDescription(section.header(), {noWrapper: true}),
description: generateDescription(section.description()),
modifiers: jsonModifiers(section.modifiers()),
deprecated: section.deprecated(),
experimental: section.experimental(),
reference: section.reference(),
markup: section.markup() ? section.markup().toString() : null
};
});
} | javascript | {
"resource": ""
} |
q28144 | jsonModifiers | train | function jsonModifiers(modifiers) {
return modifiers.map(function(modifier, id) {
return {
id: id + 1,
name: modifier.name(),
description: modifier.description(),
className: modifier.className(),
markup: modifier.markup() ? modifier.markup().toString() : null
};
});
} | javascript | {
"resource": ""
} |
q28145 | intersect_line_line | train | function intersect_line_line(p1, p2, p3, p4) {
var denom = ((p4.y - p3.y)*(p2.x - p1.x) - (p4.x - p3.x)*(p2.y - p1.y));
// lines are parallel
if (denom === 0) {
return false;
}
var ua = ((p4.x - p3.x)*(p1.y - p3.y) - (p4.y - p3.y)*(p1.x - p3.x)) / denom;
var ub = ((p2.x - p1.x)*(p1.y - p3.y) - (p2.y - p1.y)*(p1.x - p3.x)) / denom;
if (ua < 0 || ua > 1 || ub < 0 || ub > 1) {
return false;
}
return new Springy.Vector(p1.x + ua * (p2.x - p1.x), p1.y + ua * (p2.y - p1.y));
} | javascript | {
"resource": ""
} |
q28146 | emptyBucket | train | function emptyBucket(aws, bucketName, keyPrefix) {
return listObjectsInBucket(aws, bucketName).then(resp => {
const contents = resp.Contents;
let testPrefix = false,
prefixRegexp;
if (!contents[0]) {
return Promise.resolve();
} else {
if (keyPrefix) {
testPrefix = true;
prefixRegexp = new RegExp('^' + keyPrefix);
}
const objects = contents.map(function(content) {
return {Key: content.Key};
}).filter(content => !testPrefix || prefixRegexp.test(content.Key));
const params = {
Bucket: bucketName,
Delete: { Objects: objects }
};
return aws.request('S3', 'deleteObjects', params);
}
});
} | javascript | {
"resource": ""
} |
q28147 | configureBucket | train | function configureBucket(
aws,
bucketName,
indexDocument,
errorDocument,
redirectAllRequestsTo,
routingRules
) {
const params = {
Bucket: bucketName,
WebsiteConfiguration: {}
};
if (redirectAllRequestsTo) {
params.WebsiteConfiguration.RedirectAllRequestsTo = {};
params.WebsiteConfiguration.RedirectAllRequestsTo.HostName = redirectAllRequestsTo.hostName;
if (redirectAllRequestsTo.protocol) {
params.WebsiteConfiguration.RedirectAllRequestsTo.Protocol = redirectAllRequestsTo.protocol;
}
} else {
// AWS's terminology (Suffix/Key) here is weird. The following is how you specify
// index and error documents for the bucket. See docs:
// https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putBucketWebsite-property
params.WebsiteConfiguration.IndexDocument = { Suffix: indexDocument };
params.WebsiteConfiguration.ErrorDocument = { Key: errorDocument };
}
if (routingRules) {
params.WebsiteConfiguration.RoutingRules = [];
routingRules.forEach(r => {
const routingRule = {
Redirect: {}
};
const redirectProps = [
'hostName',
'httpRedirectCode',
'protocol',
'replaceKeyPrefixWith',
'replaceKeyWith'
];
redirectProps.forEach(p => {
if (r.redirect[p]) {
if (p === 'httpRedirectCode') {
r.redirect[p] = r.redirect[p].toString();
}
// AWS expects the redirect properties to be PascalCase, while our API
// uses camelCase. Converting here.
routingRule.Redirect[p.charAt(0).toUpperCase() + p.slice(1)] = r.redirect[p];
}
});
if (r.condition) {
routingRule.Condition = {};
const conditionProps = ['httpErrorCodeReturnedEquals', 'keyPrefixEquals'];
conditionProps.forEach(p => {
if (r.condition[p]) {
if (p === 'httpErrorCodeReturnedEquals') {
r.condition[p] = r.condition[p].toString();
}
// AWS expects the redirect conditions to be PascalCase, while our API
// uses camelCase. Converting here.
routingRule.Condition[p.charAt(0).toUpperCase() + p.slice(1)] = r.condition[p];
}
});
}
params.WebsiteConfiguration.RoutingRules.push(routingRule);
});
}
return aws.request('S3', 'putBucketWebsite', params);
} | javascript | {
"resource": ""
} |
q28148 | configurePolicyForBucket | train | function configurePolicyForBucket(aws, bucketName, customPolicy) {
const policy = customPolicy || {
Version: '2012-10-17',
Statement: [
{
Effect: 'Allow',
Principal: {
AWS: '*'
},
Action: 's3:GetObject',
Resource: `arn:aws:s3:::${bucketName}/*`
}
]
};
const params = {
Bucket: bucketName,
Policy: JSON.stringify(policy)
};
return aws.request('S3', 'putBucketPolicy', params);
} | javascript | {
"resource": ""
} |
q28149 | configureCorsForBucket | train | function configureCorsForBucket(aws, bucketName) {
const params = {
Bucket: bucketName,
CORSConfiguration: require('./resources/CORSPolicy')
};
return aws.request('S3', 'putBucketCors', params);
} | javascript | {
"resource": ""
} |
q28150 | uploadDirectory | train | function uploadDirectory(aws, bucketName, clientRoot, headerSpec, orderSpec, keyPrefix) {
const allFiles = getFileList(clientRoot);
const filesGroupedByOrder = groupFilesByOrder(allFiles, orderSpec);
return filesGroupedByOrder.reduce((existingUploads, files) => {
return existingUploads.then(existingResults => {
const uploadList = buildUploadList(files, clientRoot, headerSpec, keyPrefix);
return Promise.all(
uploadList.map(u => uploadFile(aws, bucketName, u.filePath, u.fileKey, u.headers))
).then(currentResults => existingResults.concat(currentResults));
});
}, Promise.resolve([]));
} | javascript | {
"resource": ""
} |
q28151 | uploadFile | train | function uploadFile(aws, bucketName, filePath, fileKey, headers) {
const baseHeaderKeys = [
'Cache-Control',
'Content-Disposition',
'Content-Encoding',
'Content-Language',
'Content-Type',
'Expires',
'Website-Redirect-Location'
];
const fileBuffer = fs.readFileSync(filePath);
const params = {
Bucket: bucketName,
Key: fileKey,
Body: fileBuffer,
ContentType: mime.lookup(filePath)
};
Object.keys(headers).forEach(h => {
if (baseHeaderKeys.includes(h)) {
params[h.replace('-', '')] = headers[h];
} else {
if (!params.Metadata) {
params.Metadata = {};
}
params.Metadata[h] = headers[h];
}
});
return aws.request('S3', 'putObject', params);
} | javascript | {
"resource": ""
} |
q28152 | makeKeyIndexAndFree | train | function makeKeyIndexAndFree (list, key) {
var keyIndex = {}
var free = []
for (var i = 0, len = list.length; i < len; i++) {
var item = list[i]
var itemKey = getItemKey(item, key)
if (itemKey) {
keyIndex[itemKey] = i
} else {
free.push(item)
}
}
return {
keyIndex: keyIndex,
free: free
}
} | javascript | {
"resource": ""
} |
q28153 | setIndexesForSegments | train | function setIndexesForSegments(
segmentsObject = null,
serviceSegmentsObject = null
) {
const segments = segmentsObject
? Object.keys(segmentsObject).map(k => segmentsObject[k])
: null;
const serviceSegments = serviceSegmentsObject
? Object.keys(serviceSegmentsObject).map(k => serviceSegmentsObject[k])
: null;
if (segments === null && serviceSegments === null) {
return { segments, serviceSegments };
}
if (segments !== null && serviceSegments === null) {
const segmentsNew = segments.map((segment, key) => ({
...segment,
index: key + 1,
}));
return { segments: segmentsNew, serviceSegments };
}
if (segments === null && serviceSegments !== null) {
const serviceSegmentsNew = serviceSegments.map(
(segment, key) => ({
...segment,
index: key + 1,
})
);
return { segments, serviceSegments: serviceSegmentsNew };
}
const maxSegmentsTravelOrder = segments.reduce(travelOrderReducer, 0);
const maxServiceSegmentsTravelOrder = serviceSegments.reduce(travelOrderReducer, 0);
const maxOrder = Math.max(
maxSegmentsTravelOrder,
maxServiceSegmentsTravelOrder
);
const allSegments = [];
for (let i = 1; i <= maxOrder; i += 1) {
segments.forEach(s => (Number(s.TravelOrder) === i ? allSegments.push(s) : null));
serviceSegments.forEach(s => (Number(s.TravelOrder) === i ? allSegments.push(s) : null));
}
const indexedSegments = allSegments.map((s, k) => ({ ...s, index: k + 1 }));
return {
segments: indexedSegments.filter(s => s.SegmentType === undefined),
serviceSegments: indexedSegments.filter(s => s.SegmentType === 'Service'),
};
} | javascript | {
"resource": ""
} |
q28154 | cookieParser | train | function cookieParser (secret, options) {
var secrets = !secret || Array.isArray(secret)
? (secret || [])
: [secret]
return function cookieParser (req, res, next) {
if (req.cookies) {
return next()
}
var cookies = req.headers.cookie
req.secret = secrets[0]
req.cookies = Object.create(null)
req.signedCookies = Object.create(null)
// no cookies
if (!cookies) {
return next()
}
req.cookies = cookie.parse(cookies, options)
// parse signed cookies
if (secrets.length !== 0) {
req.signedCookies = signedCookies(req.cookies, secrets)
req.signedCookies = JSONCookies(req.signedCookies)
}
// parse JSON cookies
req.cookies = JSONCookies(req.cookies)
next()
}
} | javascript | {
"resource": ""
} |
q28155 | JSONCookie | train | function JSONCookie (str) {
if (typeof str !== 'string' || str.substr(0, 2) !== 'j:') {
return undefined
}
try {
return JSON.parse(str.slice(2))
} catch (err) {
return undefined
}
} | javascript | {
"resource": ""
} |
q28156 | JSONCookies | train | function JSONCookies (obj) {
var cookies = Object.keys(obj)
var key
var val
for (var i = 0; i < cookies.length; i++) {
key = cookies[i]
val = JSONCookie(obj[key])
if (val) {
obj[key] = val
}
}
return obj
} | javascript | {
"resource": ""
} |
q28157 | train | function () {
try {
return new TextDecoder().decode(new TextEncoder().encode("test"), {stream: true}) === "test";
} catch (error) {
console.log(error);
}
return false;
} | javascript | {
"resource": ""
} | |
q28158 | train | function (searchQuery) {
var selector = page.googleSearch.elements.searchInput;
// return a promise so the calling function knows the task has completed
return driver.findElement(selector).sendKeys(searchQuery, selenium.Key.ENTER);
} | javascript | {
"resource": ""
} | |
q28159 | getDriverInstance | train | function getDriverInstance() {
var driver;
switch (browserName || '') {
case 'firefox': {
driver = new FireFoxDriver();
}
break;
case 'phantomjs': {
driver = new PhantomJSDriver();
}
break;
case 'electron': {
driver = new ElectronDriver();
}
break;
case 'chrome': {
driver = new ChromeDriver();
}
break;
// try to load from file
default: {
var driverFileName = path.resolve(process.cwd(), browserName);
if (!fs.isFileSync(driverFileName)) {
throw new Error('Could not find driver file: ' + driverFileName);
}
driver = require(driverFileName)();
}
}
return driver;
} | javascript | {
"resource": ""
} |
q28160 | getEyesInstance | train | function getEyesInstance() {
if (global.eyesKey) {
var eyes = new Eyes();
// retrieve eyes api key from config file in the project root as defined by the user
eyes.setApiKey(global.eyesKey);
return eyes;
}
return null;
} | javascript | {
"resource": ""
} |
q28161 | createWorld | train | function createWorld() {
var runtime = {
driver: null, // the browser object
eyes: null,
selenium: selenium, // the raw nodejs selenium driver
By: selenium.By, // in keeping with Java expose selenium By
by: selenium.By, // provide a javascript lowercase version
until: selenium.until, // provide easy access to selenium until methods
expect: expect, // expose chai expect to allow variable testing
assert: assert, // expose chai assert to allow variable testing
trace: consoleInfo, // expose an info method to log output to the console in a readable/visible format
page: global.page || {}, // empty page objects placeholder
shared: global.shared || {} // empty shared objects placeholder
};
// expose properties to step definition methods via global variables
Object.keys(runtime).forEach(function (key) {
if (key === 'driver' && browserTeardownStrategy !== 'always') {
return;
}
// make property/method available as a global (no this. prefix required)
global[key] = runtime[key];
});
} | javascript | {
"resource": ""
} |
q28162 | importSupportObjects | train | function importSupportObjects() {
// import shared objects from multiple paths (after global vars have been created)
if (global.sharedObjectPaths && Array.isArray(global.sharedObjectPaths) && global.sharedObjectPaths.length > 0) {
var allDirs = {};
// first require directories into objects by directory
global.sharedObjectPaths.forEach(function (itemPath) {
if (fs.existsSync(itemPath)) {
var dir = requireDir(itemPath, { camelcase: true, recurse: true });
merge(allDirs, dir);
}
});
// if we managed to import some directories, expose them
if (Object.keys(allDirs).length > 0) {
// expose globally
global.shared = allDirs;
}
}
// import page objects (after global vars have been created)
if (global.pageObjectPath && fs.existsSync(global.pageObjectPath)) {
// require all page objects using camel case as object names
global.page = requireDir(global.pageObjectPath, { camelcase: true, recurse: true });
}
// add helpers
global.helpers = require('../runtime/helpers.js');
} | javascript | {
"resource": ""
} |
q28163 | train | function(url, waitInSeconds) {
// use either passed in timeout or global default
var timeout = (waitInSeconds) ? (waitInSeconds * 1000) : DEFAULT_TIMEOUT;
// load the url and wait for it to complete
return driver.get(url).then(function() {
// now wait for the body element to be present
return driver.wait(until.elementLocated(by.css('body')), timeout);
});
} | javascript | {
"resource": ""
} | |
q28164 | train | function (htmlCssSelector, attributeName) {
// get the element from the page
return driver.findElement(by.css(htmlCssSelector)).then(function(el) {
return el.getAttribute(attributeName);
});
} | javascript | {
"resource": ""
} | |
q28165 | train | function(elementSelector, attributeName, waitInMilliseconds) {
// use either passed in timeout or global default
var timeout = waitInMilliseconds || DEFAULT_TIMEOUT;
// readable error message
var timeoutMessage = attributeName + ' does not exists after ' + waitInMilliseconds + ' milliseconds';
// repeatedly execute the test until it's true or we timeout
return driver.wait(function() {
// get the html attribute value using helper method
return helpers.getAttributeValue(elementSelector, attributeName).then(function(value) {
// attribute exists if value is not null
return value !== null;
});
}, timeout, timeoutMessage);
} | javascript | {
"resource": ""
} | |
q28166 | train | function () {
if (!els.length) return
els.map(function (el) {
el.parentElement.removeChild(el)
})
els = []
} | javascript | {
"resource": ""
} | |
q28167 | train | function (obj, el) {
var self = this
Object.keys(obj).map(function (prop) {
var sh = self.shorthand[prop] || prop
if (sh.match(/(body|undo|replace)/g)) return
if (sh === 'inner') {
el.textContent = obj[prop]
return
}
el.setAttribute(sh, obj[prop])
})
return el
} | javascript | {
"resource": ""
} | |
q28168 | train | function (obj) {
if (!obj) return
diffTitle.before = opt.complement
var title = obj.inner + ' ' + (obj.separator || opt.separator) +
' ' + (obj.complement || opt.complement)
window.document.title = title.trim()
} | javascript | {
"resource": ""
} | |
q28169 | train | function (arr, tag, place, update) {
var self = this
if (!arr) return
arr.map(function (obj) {
var parent = (obj.body) ? self.getPlace('body') : self.getPlace(place)
var el = window.document.getElementById(obj.id) || window.document.createElement(tag)
// Elements that will substitute data
if (el.hasAttribute('id')) {
self.prepareElement(obj, el)
return
}
// Other elements
el = self.prepareElement(obj, el)
// Updated elements
if (update) {
diffEls.push(el)
return
}
// Append Elements
self.add(obj, el, parent)
})
} | javascript | {
"resource": ""
} | |
q28170 | VueHead | train | function VueHead (Vue, options) {
if (installed) return
installed = true
if (options) {
Vue.util.extend(opt, options)
}
/**
* Initializes and updates the elements in the head
* @param {Boolean} update
*/
function init (update) {
var self = this
var head = (typeof self.$options.head === 'function') ? self.$options.head.bind(self)() : self.$options.head
if (!head) return
Object.keys(head).map(function (key) {
var prop = head[key]
if (!prop) return
var obj = (typeof prop === 'function') ? head[key].bind(self)() : head[key]
if (key === 'title') {
util[key](obj)
return
}
util.handle(obj, key, 'head', update)
})
self.$emit('okHead')
}
/**
* Remove the meta tags elements in the head
*/
function destroy () {
if (!this.$options.head) return
util.undoTitle(diffTitle)
util.undo()
}
// v1
if (Vue.version.match(/[1].(.)+/g)) {
Vue.mixin({
ready: function () {
init.bind(this)()
},
destroyed: function () {
destroy.bind(this)()
},
events: {
updateHead: function () {
init.bind(this)(true)
util.update()
}
}
})
}
// v2
if (Vue.version.match(/[2].(.)+/g)) {
Vue.mixin({
created: function () {
var self = this
self.$on('updateHead', function () {
init.bind(self)(true)
util.update()
})
},
mounted: function () {
init.bind(this)()
},
beforeDestroy: function () {
destroy.bind(this)()
}
})
}
} | javascript | {
"resource": ""
} |
q28171 | init | train | function init (update) {
var self = this
var head = (typeof self.$options.head === 'function') ? self.$options.head.bind(self)() : self.$options.head
if (!head) return
Object.keys(head).map(function (key) {
var prop = head[key]
if (!prop) return
var obj = (typeof prop === 'function') ? head[key].bind(self)() : head[key]
if (key === 'title') {
util[key](obj)
return
}
util.handle(obj, key, 'head', update)
})
self.$emit('okHead')
} | javascript | {
"resource": ""
} |
q28172 | gatherHeaders | train | function gatherHeaders(item) {
var ret, i, l;
if (isHashArray(item)) {
//lets assume a multidimesional array with item 0 bing the title
i = -1;
l = item.length;
ret = [];
while (++i < l) {
ret[i] = item[i][0];
}
} else if (isArray(item)) {
ret = item;
} else {
ret = keys(item);
}
return ret;
} | javascript | {
"resource": ""
} |
q28173 | transformHashData | train | function transformHashData(stream, item) {
var vals = [], row = [], headers = stream.headers, i = -1, headersLength = stream.headersLength;
if (stream.totalCount++) {
row.push(stream.rowDelimiter);
}
while (++i < headersLength) {
vals[i] = item[headers[i]];
}
row.push(stream.formatter(vals));
return row.join("");
} | javascript | {
"resource": ""
} |
q28174 | transformArrayData | train | function transformArrayData(stream, item, cb) {
var row = [];
if (stream.totalCount++) {
row.push(stream.rowDelimiter);
}
row.push(stream.formatter(item));
return row.join("");
} | javascript | {
"resource": ""
} |
q28175 | transformHashArrayData | train | function transformHashArrayData(stream, item) {
var vals = [], row = [], i = -1, headersLength = stream.headersLength;
if (stream.totalCount++) {
row.push(stream.rowDelimiter);
}
while (++i < headersLength) {
vals[i] = item[i][1];
}
row.push(stream.formatter(vals));
return row.join("");
} | javascript | {
"resource": ""
} |
q28176 | transformItem | train | function transformItem(stream, item) {
var ret;
if (isArray(item)) {
if (isHashArray(item)) {
ret = transformHashArrayData(stream, item);
} else {
ret = transformArrayData(stream, item);
}
} else {
ret = transformHashData(stream, item);
}
return ret;
} | javascript | {
"resource": ""
} |
q28177 | bundleJavaScript | train | async function bundleJavaScript() {
const bundle = await rollup.rollup({
input: `${__dirname}/src/beedle.js`,
plugins: [
uglify()
]
});
await bundle.write({
format: 'umd',
name: 'beedle',
file: 'beedle.js',
dir: `${__dirname}/dist/`,
});
} | javascript | {
"resource": ""
} |
q28178 | _next | train | function _next (res) {
if (res && !options.skipParse) {
res = [].concat(res)
}
return next(res || result, cb)
} | javascript | {
"resource": ""
} |
q28179 | lazyResult | train | function lazyResult (render, tree) {
return {
get html () {
return render(tree, tree.options)
},
tree: tree,
messages: tree.messages
}
} | javascript | {
"resource": ""
} |
q28180 | match | train | function match (expression, cb) {
return Array.isArray(expression)
? traverse(this, function (node) {
for (var i = 0; i < expression.length; i++) {
if (compare(expression[i], node)) return cb(node)
}
return node
})
: traverse(this, function (node) {
if (compare(expression, node)) return cb(node)
return node
})
} | javascript | {
"resource": ""
} |
q28181 | executeCreate | train | function executeCreate(node) {
let element;
let children = [];
if (node.type === types.text) {
// Create a text node using the text content from the default key.
element = document.createTextNode(node.data[""]);
} else {
const nodeData = node.data;
// Create a DOM element.
element = document.createElement(node.name);
// Set data, events, and attributes.
for (let key in nodeData) {
const value = nodeData[key];
if (key[0] === "@") {
let MoonEvents = element.MoonEvents;
if (MoonEvents === undefined) {
MoonEvents = element.MoonEvents = {
[key]: value
};
} else {
MoonEvents[key] = value;
}
element.addEventListener(key.slice(1), ($event) => {
MoonEvents[key]($event);
});
} else if (key !== "children" && value !== false) {
element.setAttribute(key, value);
}
}
// Recursively append children.
const nodeDataChildren = nodeData.children;
for (let i = 0; i < nodeDataChildren.length; i++) {
const childOld = executeCreate(nodeDataChildren[i]);
element.appendChild(childOld.element);
children.push(childOld);
}
}
// Return an old node with a reference to the immutable node and mutable
// element. This is to help performance and allow static nodes to be reused.
return {
element,
node,
children
};
} | javascript | {
"resource": ""
} |
q28182 | executeView | train | function executeView(nodes, parents, indexes) {
while (true) {
let node = nodes.pop();
const parent = parents.pop();
const index = indexes.pop();
if (node.type === types.component) {
// Execute the component to get the component view.
node = components[node.name](node.data);
// Set the root view or current node to the new component view.
if (parent === null) {
setViewNew(node);
} else {
parent.data.children[index] = node;
}
}
// Execute the views of the children.
const children = node.data.children;
for (let i = 0; i < children.length; i++) {
nodes.push(children[i]);
parents.push(node);
indexes.push(i);
}
if (nodes.length === 0) {
// Move to the diff phase if there is nothing left to do.
executeDiff([viewOld], [viewNew], []);
break;
} else if (performance.now() - executeStart >= 16) {
// If the current frame doesn't have sufficient time left to keep
// running then continue executing the view in the next frame.
requestAnimationFrame(() => {
executeStart = performance.now();
executeView(nodes, parents, indexes);
});
break;
}
}
} | javascript | {
"resource": ""
} |
q28183 | executeDiff | train | function executeDiff(nodesOld, nodesNew, patches) {
while (true) {
const nodeOld = nodesOld.pop();
const nodeOldNode = nodeOld.node;
const nodeNew = nodesNew.pop();
// If they have the same reference (hoisted) then skip diffing.
if (nodeOldNode !== nodeNew) {
if (nodeOldNode.name !== nodeNew.name) {
// If they have different names, then replace the old node with the
// new one.
patches.push({
type: patchTypes.replaceNode,
nodeOld,
nodeNew,
nodeParent: null
});
} else if (nodeOldNode.type === types.text) {
// If they both are text, then update the text content.
if (nodeOldNode.data[""] !== nodeNew.data[""]) {
patches.push({
type: patchTypes.updateText,
nodeOld,
nodeNew,
nodeParent: null
});
}
} else {
// If they both are normal elements, then update attributes, update
// events, and diff the children for appends, deletes, or recursive
// updates.
patches.push({
type: patchTypes.updateData,
nodeOld,
nodeNew,
nodeParent: null
});
const childrenOld = nodeOld.children;
const childrenNew = nodeNew.data.children;
const childrenOldLength = childrenOld.length;
const childrenNewLength = childrenNew.length;
if (childrenOldLength === childrenNewLength) {
// If the children have the same length then update both as
// usual.
for (let i = 0; i < childrenOldLength; i++) {
nodesOld.push(childrenOld[i]);
nodesNew.push(childrenNew[i]);
}
} else if (childrenOldLength > childrenNewLength) {
// If there are more old children than new children, update the
// corresponding ones and remove the extra old children.
for (let i = 0; i < childrenNewLength; i++) {
nodesOld.push(childrenOld[i]);
nodesNew.push(childrenNew[i]);
}
for (let i = childrenNewLength; i < childrenOldLength; i++) {
patches.push({
type: patchTypes.removeNode,
nodeOld: childrenOld[i],
nodeNew: null,
nodeParent: nodeOld
});
}
} else {
// If there are more new children than old children, update the
// corresponding ones and append the extra new children.
for (let i = 0; i < childrenOldLength; i++) {
nodesOld.push(childrenOld[i]);
nodesNew.push(childrenNew[i]);
}
for (let i = childrenOldLength; i < childrenNewLength; i++) {
patches.push({
type: patchTypes.appendNode,
nodeOld: null,
nodeNew: childrenNew[i],
nodeParent: nodeOld
});
}
}
}
}
if (nodesOld.length === 0) {
// Move to the patch phase if there is nothing left to do.
executePatch(patches);
break;
} else if (performance.now() - executeStart >= 16) {
// If the current frame doesn't have sufficient time left to keep
// running then continue diffing in the next frame.
requestAnimationFrame(() => {
executeStart = performance.now();
executeDiff(nodesOld, nodesNew, patches);
});
break;
}
}
} | javascript | {
"resource": ""
} |
q28184 | executePatch | train | function executePatch(patches) {
for (let i = 0; i < patches.length; i++) {
const patch = patches[i];
switch (patch.type) {
case patchTypes.updateText: {
// Update text of a node with new text.
const nodeOld = patch.nodeOld;
const nodeNew = patch.nodeNew;
nodeOld.element.textContent = nodeNew.data[""];
nodeOld.node = nodeNew;
break;
}
case patchTypes.updateData: {
// Set attributes and events of a node with new data.
const nodeOld = patch.nodeOld;
const nodeOldNodeData = nodeOld.node.data;
const nodeOldElement = nodeOld.element;
const nodeNew = patch.nodeNew;
const nodeNewData = nodeNew.data;
// Set attributes on the DOM element.
for (let key in nodeNewData) {
const value = nodeNewData[key];
if (key[0] === "@") {
// Update the event listener.
nodeOldElement.MoonEvents[key] = value;
} else if (key !== "children") {
// Remove the attribute if the value is false, and update it
// otherwise.
if (value === false) {
nodeOldElement.removeAttribute(key);
} else {
nodeOldElement.setAttribute(key, value);
}
}
}
// Remove old attributes.
for (let key in nodeOldNodeData) {
if (!(key in nodeNewData)) {
nodeOldElement.removeAttribute(key);
}
}
nodeOld.node = nodeNew;
break;
}
case patchTypes.appendNode: {
// Append a node to the parent.
const nodeParent = patch.nodeParent;
const nodeOldNew = executeCreate(patch.nodeNew);
nodeParent.element.appendChild(nodeOldNew.element);
nodeParent.children.push(nodeOldNew);
break;
}
case patchTypes.removeNode: {
// Remove a node from the parent.
const nodeParent = patch.nodeParent;
// Pops the last child because the patches still hold a reference
// to them. The diff phase can only create this patch when there
// are extra old children, and popping nodes off of the end is more
// efficient than removing at a specific index, especially because
// they are equivalent in this case.
nodeParent.element.removeChild(nodeParent.children.pop().element);
break;
}
case patchTypes.replaceNode: {
// Replaces an old node with a new node.
const nodeOld = patch.nodeOld;
const nodeOldElement = nodeOld.element;
const nodeNew = patch.nodeNew;
const nodeOldNew = executeCreate(nodeNew);
const nodeOldNewElement = nodeOldNew.element;
nodeOldElement.parentNode.replaceChild(nodeOldNewElement, nodeOldElement);
nodeOld.element = nodeOldNewElement;
nodeOld.node = nodeOldNew.node;
nodeOld.children = nodeOldNew.children;
break;
}
}
}
// Remove the current execution from the queue.
executeQueue.shift();
// If there is new data in the execution queue, continue to it.
if (executeQueue.length !== 0) {
if (performance.now() - executeStart >= 16) {
// If the current frame doesn't have sufficient time left to keep
// running then start the next execution in the next frame.
requestAnimationFrame(() => {
executeStart = performance.now();
executeNext();
});
} else {
executeNext();
}
}
} | javascript | {
"resource": ""
} |
q28185 | executeNext | train | function executeNext() {
// Get the next data update.
const dataNew = executeQueue[0];
// Merge new data into current data.
for (let key in dataNew) {
data[key] = dataNew[key];
}
// Begin executing the view.
const viewNew = viewCurrent(data);
setViewNew(viewNew);
executeView([viewNew], [null], [0]);
} | javascript | {
"resource": ""
} |
q28186 | scopeExpression | train | function scopeExpression(expression) {
return expression.replace(expressionRE, (match, name) =>
(
name === undefined ||
name[0] === "$" ||
globals.indexOf(name) !== -1
) ?
match :
"data." + name
);
} | javascript | {
"resource": ""
} |
q28187 | lexError | train | function lexError(message, input, index) {
let lexMessage = message + "\n\n";
// Show input characters surrounding the source of the error.
for (
let i = Math.max(0, index - 16);
i < Math.min(index + 16, input.length);
i++
) {
lexMessage += input[i];
}
error(lexMessage);
} | javascript | {
"resource": ""
} |
q28188 | ParseError | train | function ParseError(message, start, end, next) {
this.message = message;
this.start = start;
this.end = end;
this.next = next;
} | javascript | {
"resource": ""
} |
q28189 | currency | train | function currency(value, opts) {
let that = this;
if(!(that instanceof currency)) {
return new currency(value, opts);
}
let settings = Object.assign({}, defaults, opts)
, precision = pow(settings.precision)
, v = parse(value, settings);
that.intValue = v;
that.value = v / precision;
// Set default incremental value
settings.increment = settings.increment || (1 / precision);
// Support vedic numbering systems
// see: https://en.wikipedia.org/wiki/Indian_numbering_system
if(settings.useVedic) {
settings.groups = vedicRegex;
} else {
settings.groups = groupRegex;
}
// Intended for internal usage only - subject to change
this._settings = settings;
this._precision = precision;
} | javascript | {
"resource": ""
} |
q28190 | alphabetSort | train | function alphabetSort(nodes) {
// use toLowerCase to keep `case insensitive`
return nodes.sort((...comparison) => {
return asciiSort(...comparison.map(val => getCellValue(val).toLowerCase()));
});
} | javascript | {
"resource": ""
} |
q28191 | parseType | train | function parseType (type) {
var size
var ret
if (isArray(type)) {
size = parseTypeArray(type)
var subArray = type.slice(0, type.lastIndexOf('['))
subArray = parseType(subArray)
ret = {
isArray: true,
name: type,
size: size,
memoryUsage: size === 'dynamic' ? 32 : subArray.memoryUsage * size,
subArray: subArray
}
return ret
} else {
var rawType
switch (type) {
case 'address':
rawType = 'uint160'
break
case 'bool':
rawType = 'uint8'
break
case 'string':
rawType = 'bytes'
break
}
ret = {
rawType: rawType,
name: type,
memoryUsage: 32
}
if ((type.startsWith('bytes') && type !== 'bytes') || type.startsWith('uint') || type.startsWith('int')) {
ret.size = parseTypeN(type)
} else if (type.startsWith('ufixed') || type.startsWith('fixed')) {
ret.size = parseTypeNxM(type)
}
if (type.startsWith('bytes') && type !== 'bytes' && (ret.size < 1 || ret.size > 32)) {
throw new Error('Invalid bytes<N> width: ' + ret.size)
}
if ((type.startsWith('uint') || type.startsWith('int')) && (ret.size % 8 || ret.size < 8 || ret.size > 256)) {
throw new Error('Invalid int/uint<N> width: ' + ret.size)
}
return ret
}
} | javascript | {
"resource": ""
} |
q28192 | train | function(name, value, expires, path, domain, httponly) {
if (!name) {
throw new Error("A name is required to create a cookie.");
}
// Parse date to timestamp - consider it never expiring if timestamp is not
// passed to the function
if (expires) {
if (typeof expires !== "number") {
expires = (new Date(expires)).getTime();
}
} else {
expires = -1;
}
this.name = name;
this.value = value || "";
this.expires = expires;
this.path = path || "/";
this.domain = domain || "*";
this.httponly = Boolean(httponly);
} | javascript | {
"resource": ""
} | |
q28193 | Cache | train | function Cache(cacheLoadParameter, cacheBackend) {
// Ensure parameters are how we want them...
cacheBackend = typeof cacheBackend === "function" ? cacheBackend : FilesystemBackend;
cacheLoadParameter = cacheLoadParameter instanceof Array ? cacheLoadParameter : [cacheLoadParameter];
// Now we can just run the factory.
this.datastore = cacheBackend.apply(cacheBackend, cacheLoadParameter);
// Instruct the backend to load up.
this.datastore.load();
} | javascript | {
"resource": ""
} |
q28194 | compare | train | function compare(a, b) {
for (var key in a) {
if (a.hasOwnProperty(key)) {
if (typeof a[key] !== typeof b[key]) {
return false;
}
if (typeof a[key] === "object") {
if (!compare(a[key], b[key])) {
return false;
}
} else if (a[key] !== b[key]) {
return false;
}
}
}
return true;
} | javascript | {
"resource": ""
} |
q28195 | deepAssign | train | function deepAssign(object, source) {
for (var key in source) {
if (source.hasOwnProperty(key)) {
if (typeof object[key] === "object" && typeof source[key] === "object") {
deepAssign(object[key], source[key]);
} else {
object[key] = source[key];
}
}
}
return object;
} | javascript | {
"resource": ""
} |
q28196 | train | function() {
Array.call(this);
/**
* Speeds up {@link FetchQueue.oldestUnfetchedItem} by storing the index at
* which the latest oldest unfetched queue item was found.
* @name FetchQueue._oldestUnfetchedIndex
* @private
* @type {Number}
*/
Object.defineProperty(this, "_oldestUnfetchedIndex", {
enumerable: false,
writable: true,
value: 0
});
/**
* Serves as a cache for what URL's have been fetched. Keys are URL's,
* values are booleans.
* @name FetchQueue._scanIndex
* @private
* @type {Object}
*/
Object.defineProperty(this, "_scanIndex", {
enumerable: false,
writable: true,
value: {}
});
/**
* Controls what properties can be operated on with the
* {@link FetchQueue#min}, {@link FetchQueue#avg} and {@link FetchQueue#max}
* methods.
* @name FetchQueue._allowedStatistics
* @type {Array}
*/
Object.defineProperty(this, "_allowedStatistics", {
enumerable: false,
writable: true,
value: [
"actualDataSize",
"contentLength",
"downloadTime",
"requestLatency",
"requestTime"
]
});
} | javascript | {
"resource": ""
} | |
q28197 | FSBackend | train | function FSBackend(loadParameter) {
this.loaded = false;
this.index = [];
this.location = typeof loadParameter === "string" && loadParameter.length > 0 ? loadParameter : process.cwd() + "/cache/";
this.location = this.location.substr(this.location.length - 1) === "/" ? this.location : this.location + "/";
} | javascript | {
"resource": ""
} |
q28198 | train | function(string) {
var result = /\ssrcset\s*=\s*("|')(.*?)\1/.exec(string);
return Array.isArray(result) ? String(result[2]).split(",").map(function(string) {
return string.trim().split(/\s+/)[0];
}) : "";
} | javascript | {
"resource": ""
} | |
q28199 | isSubdomainOf | train | function isSubdomainOf(subdomain, host) {
// Comparisons must be case-insensitive
subdomain = subdomain.toLowerCase();
host = host.toLowerCase();
// If we're ignoring www, remove it from both
// (if www is the first domain component...)
if (crawler.ignoreWWWDomain) {
subdomain = subdomain.replace(/^www./ig, "");
host = host.replace(/^www./ig, "");
}
// They should be the same flipped around!
return subdomain.split("").reverse().join("").substr(0, host.length) ===
host.split("").reverse().join("");
} | javascript | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.