_id
stringlengths 2
6
| title
stringlengths 0
58
| partition
stringclasses 3
values | text
stringlengths 52
373k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q55200
|
run
|
train
|
function run() {
platforms.forEach(function(platform) {
var wwwPath;
switch (platform) {
case 'android':
wwwPath = path.join(platformPath, platform, 'assets', 'www');
if (!fs.existsSync(wwwPath)) {
wwwPath = path.join(platformPath, platform, 'app', 'src', 'main', 'assets', 'www');
}
break;
case 'ios':
case 'browser':
case 'wp8':
case 'windows':
wwwPath = path.join(platformPath, platform, 'www');
break;
default:
console.log('this hook only supports android, ios, wp8, windows, and browser currently');
return;
}
processFolders(wwwPath);
});
}
|
javascript
|
{
"resource": ""
}
|
q55201
|
processFolders
|
train
|
function processFolders(wwwPath) {
foldersToProcess.forEach(function(folder) {
processFiles(path.join(wwwPath, folder));
});
}
|
javascript
|
{
"resource": ""
}
|
q55202
|
compress
|
train
|
function compress(file) {
var ext = path.extname(file),
res,
source,
result;
switch (ext) {
case '.js':
console.log('uglifying js file ' + file);
res = ngAnnotate(String(fs.readFileSync(file, 'utf8')), {
add: true
});
result = UglifyJS.minify(res.src, hookConfig.uglifyJsOptions);
fs.writeFileSync(file, result.code, 'utf8'); // overwrite the original unminified file
break;
case '.css':
console.log('minifying css file ' + file);
source = fs.readFileSync(file, 'utf8');
result = cssMinifier.minify(source);
fs.writeFileSync(file, result.styles, 'utf8'); // overwrite the original unminified file
break;
default:
console.log('encountered a ' + ext + ' file, not compressing it');
break;
}
}
|
javascript
|
{
"resource": ""
}
|
q55203
|
hasAttr
|
train
|
function hasAttr(fs, path, attr, callback) {
fs.getxattr(path, attr, function(err, attrVal) {
// File doesn't exist locally at all
if(err && err.code === 'ENOENT') {
return callback(null, false);
}
// Deal with unexpected error
if(err && err.code !== 'ENOATTR') {
return callback(err);
}
callback(null, !!attrVal);
});
}
|
javascript
|
{
"resource": ""
}
|
q55204
|
removeAttr
|
train
|
function removeAttr(fs, pathOrFd, attr, isFd, callback) {
var removeFn = 'fremovexattr';
if(isFd !== true) {
callback = isFd;
removeFn = 'removexattr';
}
fs[removeFn](pathOrFd, attr, function(err) {
if(err && err.code !== 'ENOATTR') {
return callback(err);
}
callback();
});
}
|
javascript
|
{
"resource": ""
}
|
q55205
|
getAttr
|
train
|
function getAttr(fs, pathOrFd, attr, isFd, callback) {
var getFn = 'fgetxattr';
if(isFd !== true) {
callback = isFd;
getFn = 'getxattr';
}
fs[getFn](pathOrFd, attr, function(err, value) {
if(err && err.code !== 'ENOATTR') {
return callback(err);
}
callback(null, value);
});
}
|
javascript
|
{
"resource": ""
}
|
q55206
|
forceCopy
|
train
|
function forceCopy(fs, oldPath, newPath, callback) {
fs.unlink(newPath, function(err) {
if(err && err.code !== 'ENOENT') {
return callback(err);
}
fs.readFile(oldPath, function(err, buf) {
if(err) {
return callback(err);
}
fs.writeFile(newPath, buf, callback);
});
});
}
|
javascript
|
{
"resource": ""
}
|
q55207
|
isPathUnsynced
|
train
|
function isPathUnsynced(fs, path, callback) {
hasAttr(fs, path, constants.attributes.unsynced, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55208
|
removeUnsynced
|
train
|
function removeUnsynced(fs, path, callback) {
removeAttr(fs, path, constants.attributes.unsynced, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55209
|
setUnsynced
|
train
|
function setUnsynced(fs, path, callback) {
fs.setxattr(path, constants.attributes.unsynced, Date.now(), callback);
}
|
javascript
|
{
"resource": ""
}
|
q55210
|
getUnsynced
|
train
|
function getUnsynced(fs, path, callback) {
getAttr(fs, path, constants.attributes.unsynced, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55211
|
removeChecksum
|
train
|
function removeChecksum(fs, path, callback) {
removeAttr(fs, path, constants.attributes.checksum, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55212
|
setChecksum
|
train
|
function setChecksum(fs, path, checksum, callback) {
fs.setxattr(path, constants.attributes.checksum, checksum, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55213
|
getChecksum
|
train
|
function getChecksum(fs, path, callback) {
getAttr(fs, path, constants.attributes.checksum, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55214
|
isPathPartial
|
train
|
function isPathPartial(fs, path, callback) {
hasAttr(fs, path, constants.attributes.partial, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55215
|
removePartial
|
train
|
function removePartial(fs, path, callback) {
removeAttr(fs, path, constants.attributes.partial, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55216
|
setPartial
|
train
|
function setPartial(fs, path, nodeCount, callback) {
fs.setxattr(path, constants.attributes.partial, nodeCount, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55217
|
getPartial
|
train
|
function getPartial(fs, path, callback) {
getAttr(fs, path, constants.attributes.partial, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55218
|
setPathsToSync
|
train
|
function setPathsToSync(fs, path, pathsToSync, callback) {
fs.setxattr(path, constants.attributes.pathsToSync, pathsToSync, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55219
|
getPathsToSync
|
train
|
function getPathsToSync(fs, path, callback) {
getAttr(fs, path, constants.attributes.pathsToSync, callback);
}
|
javascript
|
{
"resource": ""
}
|
q55220
|
request
|
train
|
function request(client, path, callback) {
var key = SyncLock.generateKey(client.username);
var id = client.id;
// Try to set this key/value pair, but fail if the path for the key already exists.
redis.hsetnx(key, path, id, function(err, reply) {
if(err) {
log.error({err: err, client: client}, 'Error trying to set redis key with hsetnx');
return callback(err);
}
if(reply === 1) {
// Success, we have the lock (path for the key was set). Return a new SyncLock instance
var lock = new SyncLock(key, id, path);
log.debug({client: client, syncLock: lock}, 'Lock acquired.');
return callback(null, lock);
}
// Path for key was not set (held by another client). See if the lock owner would be
// willing to let us take it. We'll wait a bit for a reply, and if
// we don't get one, assume the client holding the lock, or its server,
// has crashed, and the lock is OK to take.
// Act if we don't hear back from the lock owner in a reasonable
// amount of time, and set the lock ourselves.
var waitTimer = setTimeout(function() {
redis.removeListener('lock-response', client._handleLockResponseFn);
client._handleLockResponseFn = null;
redis.hset(key, path, id, function(err) {
if(err) {
log.error({err: err, client: client}, 'Error setting redis lock key.');
return callback(err);
}
var lock = new SyncLock(key, id, path);
log.debug({client: client, syncLock: lock}, 'Lock request timeout, setting lock manually.');
callback(null, lock);
});
}, CLIENT_TIMEOUT_MS);
waitTimer.unref();
// Listen for a response from the client holding the lock
client._handleLockResponseFn = function(message) {
handleLockResponse(message, key, path, client, waitTimer, callback);
};
redis.on('lock-response', client._handleLockResponseFn);
// Ask the client holding the lock to give it to us
log.debug({client: client}, 'Requesting lock override for ' + path);
redis.publish(Constants.server.lockRequestChannel, JSON.stringify({key: key, id: id, path: path}));
});
}
|
javascript
|
{
"resource": ""
}
|
q55221
|
isUserLocked
|
train
|
function isUserLocked(username, path, callback) {
var key = SyncLock.generateKey(username);
redis.hget(key, path, function(err, value) {
if(err) {
log.error(err, 'Error getting redis lock key %s.', key);
return callback(err);
}
callback(null, !!value);
});
}
|
javascript
|
{
"resource": ""
}
|
q55222
|
validateParams
|
train
|
function validateParams(fs, param2) {
var err;
if(!fs) {
err = new Errors.EINVAL('No filesystem provided');
} else if(!param2) {
err = new Errors.EINVAL('Second argument must be specified');
}
return err;
}
|
javascript
|
{
"resource": ""
}
|
q55223
|
calcWeak32
|
train
|
function calcWeak32(data, prev, start, end) {
var a = 0;
var b = 0;
var M = 1 << 16;
var N = 65521;
if (!prev) {
var len = (start >= 0 && end >= 0) ? (end - start + 1) : data.length;
var datai;
for (var i = 0; i < len; i++) {
datai = data[i];
a += datai;
b += ((len - i) * datai);
}
a %= N;
b %= N;
} else {
var k = start;
var l = end - 1;
var prev_k = k - 1;
var prev_l = l - 1;
var prev_first = data[prev_k];
var curr_last = data[l];
a = (prev.a - prev_first + curr_last) % N;
b = (prev.b - (prev_l - prev_k + 1) * prev_first + a) % N;
}
return { a: a, b: b, sum: a + b * M };
}
|
javascript
|
{
"resource": ""
}
|
q55224
|
createHashtable
|
train
|
function createHashtable(checksums) {
var hashtable = {};
var len = checksums.length;
var checksum;
var weak16;
for (var i = 0; i < len; i++) {
checksum = checksums[i];
weak16 = calcWeak16(checksum.weak);
if (hashtable[weak16]) {
hashtable[weak16].push(checksum);
} else {
hashtable[weak16] = [checksum];
}
}
return hashtable;
}
|
javascript
|
{
"resource": ""
}
|
q55225
|
roll
|
train
|
function roll(data, checksums, blockSize) {
var results = [];
var hashtable = createHashtable(checksums);
var length = data.length;
var start = 0;
var end = blockSize > length ? length : blockSize;
// Updated when a block matches
var lastMatchedEnd = 0;
// This gets updated every iteration with the previous weak 32bit hash
var prevRollingWeak = null;
var weak;
var weak16;
var match;
var d;
var len;
var mightMatch;
var chunk;
var strong;
var hashtable_weak16;
var hashtable_weak16i;
for (; end <= length; start++, end++) {
weak = calcWeak32(data, prevRollingWeak, start, end);
weak16 = calcWeak16(weak.sum);
match = false;
d = null;
prevRollingWeak = weak;
hashtable_weak16 = hashtable[weak16];
if (hashtable_weak16) {
len = hashtable_weak16.length;
for (var i = 0; i < len; i++) {
hashtable_weak16i = hashtable_weak16[i];
if (hashtable_weak16i.weak === weak.sum) {
mightMatch = hashtable_weak16i;
chunk = data.slice(start, end);
strong = md5sum(chunk);
if (mightMatch.strong === strong) {
match = mightMatch;
break;
}
}
}
}
if (match) {
if(start < lastMatchedEnd) {
d = data.slice(lastMatchedEnd - 1, end);
results.push({
data: d,
index: match.index
});
} else if (start - lastMatchedEnd > 0) {
d = data.slice(lastMatchedEnd, start);
results.push({
data: d,
index: match.index
});
} else {
results.push({
index: match.index
});
}
lastMatchedEnd = end;
} else if (end === length) {
// No match and last block
d = data.slice(lastMatchedEnd);
results.push({
data: d
});
}
}
return results;
}
|
javascript
|
{
"resource": ""
}
|
q55226
|
blockChecksums
|
train
|
function blockChecksums(fs, path, size, callback) {
var cache = {};
fs.readFile(path, function (err, data) {
if (!err) {
// cache file
cache[path] = data;
} else if (err && err.code === 'ENOENT') {
cache[path] = [];
} else {
return callback(err);
}
var length = cache[path].length;
var incr = size;
var start = 0;
var end = incr > length ? length : incr;
var blockIndex = 0;
var result = [];
var chunk;
var weak;
var strong;
while (start < length) {
chunk = cache[path].slice(start, end);
weak = calcWeak32(chunk).sum;
strong = md5sum(chunk);
result.push({
index: blockIndex,
weak: weak,
strong: strong
});
// update slice indices
start += incr;
end = (end + incr) > length ? length : end + incr;
// update block index
blockIndex++;
}
callback(null, result);
});
}
|
javascript
|
{
"resource": ""
}
|
q55227
|
getChecksum
|
train
|
function getChecksum(fs, path, callback) {
fs.readFile(path, function(err, data) {
if(!err) {
callback(null, md5sum(data));
} else if(err.code === 'ENOENT') {
// File does not exist so the checksum is an empty string
callback(null, "");
} else {
callback(err);
}
});
}
|
javascript
|
{
"resource": ""
}
|
q55228
|
generateChecksums
|
train
|
function generateChecksums(fs, paths, stampNode, callback) {
// Maybe stampNode was not passed in
if(typeof callback !== 'function') {
callback = findCallback(callback, stampNode);
stampNode = false;
}
var paramError = validateParams(fs, paths);
if(paramError) {
return callback(paramError);
}
var checksumList = [];
function ChecksumNode(path, type, checksum) {
this.path = path;
this.type = type;
this.checksum = checksum;
}
function addChecksumNode(path, nodeType, checksum, callback) {
var checksumNode;
// If no checksum was passed in
if(typeof checksum === 'function') {
callback = checksum;
checksumNode = new ChecksumNode(path, nodeType);
} else {
checksumNode = new ChecksumNode(path, nodeType, checksum);
}
checksumList.push(checksumNode);
callback();
}
// Only calculate the checksums for synced paths
function maybeAddChecksumNode(path, nodeType, callback) {
fsUtils.isPathUnsynced(fs, path, function(err, unsynced) {
if(err) {
return callback(err);
}
if(unsynced) {
return callback();
}
getChecksum(fs, path, function(err, checksum) {
if(err) {
return callback(err);
}
// If we shouldn't add the checksum stamp or
// the node does not exist (cannot add a stamp)
// immediately add the checksum
if(!stampNode || checksum === "") {
return addChecksumNode(path, nodeType, checksum, callback);
}
// Stamp the node with the checksum
fsUtils.setChecksum(fs, path, checksum, function(err) {
if(err) {
return callback(err);
}
addChecksumNode(path, nodeType, checksum, callback);
});
});
});
}
function calcChecksum(path, callback) {
fs.lstat(path, function(err, stat) {
var nodeType = stat && stat.type;
if(err) {
if(err.code !== 'ENOENT') {
return callback(err);
}
// Checksums for non-existent files
maybeAddChecksumNode(path, nodeType, callback);
} else if(stat.isDirectory()) {
// Directory checksums are not calculated i.e. are undefined
addChecksumNode(path, nodeType, callback);
} else {
// Checksums for synced files/links
maybeAddChecksumNode(path, nodeType, callback);
}
});
}
async.eachSeries(paths, calcChecksum, function(err) {
if(err) {
return callback(err);
}
callback(null, checksumList);
});
}
|
javascript
|
{
"resource": ""
}
|
q55229
|
maybeAddChecksumNode
|
train
|
function maybeAddChecksumNode(path, nodeType, callback) {
fsUtils.isPathUnsynced(fs, path, function(err, unsynced) {
if(err) {
return callback(err);
}
if(unsynced) {
return callback();
}
getChecksum(fs, path, function(err, checksum) {
if(err) {
return callback(err);
}
// If we shouldn't add the checksum stamp or
// the node does not exist (cannot add a stamp)
// immediately add the checksum
if(!stampNode || checksum === "") {
return addChecksumNode(path, nodeType, checksum, callback);
}
// Stamp the node with the checksum
fsUtils.setChecksum(fs, path, checksum, function(err) {
if(err) {
return callback(err);
}
addChecksumNode(path, nodeType, checksum, callback);
});
});
});
}
|
javascript
|
{
"resource": ""
}
|
q55230
|
compareContents
|
train
|
function compareContents(fs, checksumList, callback) {
var ECHKSUM = "Checksums do not match";
var paramError = validateParams(fs, checksumList);
if(paramError) {
return callback(paramError);
}
function compare(checksumNode, callback) {
var path = checksumNode.path;
fs.lstat(path, function(err, stat) {
if(err && err.code !== 'ENOENT') {
return callback(err);
}
// If the types of the nodes on each fs do not match
// i.e. /a is a file on fs1 and /a is a directory on fs2
if(!err && checksumNode.type !== stat.type) {
return callback(ECHKSUM);
}
// If the node type is a directory, checksum should not exist
if(!err && stat.isDirectory()) {
if(!checksumNode.checksum) {
return callback();
}
callback(ECHKSUM);
}
// Checksum comparison for a non-existent path or file/link
getChecksum(fs, path, function(err, checksum) {
if(err) {
return callback(err);
}
if(checksum !== checksumNode.checksum) {
return callback(ECHKSUM);
}
callback();
});
});
}
async.eachSeries(checksumList, compare, function(err) {
if(err && err !== ECHKSUM) {
return callback(err);
}
callback(null, err !== ECHKSUM);
});
}
|
javascript
|
{
"resource": ""
}
|
q55231
|
createParentDirectories
|
train
|
function createParentDirectories(path, callback) {
(new fs.Shell()).mkdirp(Path.dirname(path), function(err) {
if(err && err.code !== 'EEXIST') {
return callback(err);
}
callback();
});
}
|
javascript
|
{
"resource": ""
}
|
q55232
|
findPathIndexInArray
|
train
|
function findPathIndexInArray(array, path) {
for(var i = 0; i < array.length; i++) {
if(array[i].path === path) {
return i;
}
}
return -1;
}
|
javascript
|
{
"resource": ""
}
|
q55233
|
handle404
|
train
|
function handle404(url, res) {
var json = {
error: {
code: 404,
message: 'The requested URL ' + url + ' was not found on this server.'
}
};
write(json, res, 404);
}
|
javascript
|
{
"resource": ""
}
|
q55234
|
handleDir
|
train
|
function handleDir(fs, path, res) {
var sh = new fs.Shell();
sh.ls(path, {recursive: true}, function(err, listing) {
if(err) {
log.error(err, 'Unable to get listing for path `%s`', path);
handle404(path, res);
return;
}
write(listing, res);
});
}
|
javascript
|
{
"resource": ""
}
|
q55235
|
ensureLock
|
train
|
function ensureLock(client, path) {
var lock = client.lock;
if(!(lock && !('unlocked' in lock))) {
// Create an error so we get a stack, too.
var err = new Error('Attempted sync step without lock.');
log.error({client: client, err: err}, 'Client should own lock but does not for ' + path);
return false;
}
return true;
}
|
javascript
|
{
"resource": ""
}
|
q55236
|
checkFileSizeLimit
|
train
|
function checkFileSizeLimit(client, srcList) {
function maxSizeExceeded(obj) {
var errorMsg;
client.lock.release(function(err) {
if(err) {
log.error({err: err, client: client}, 'Error releasing sync lock');
}
releaseLock(client);
errorMsg = SyncMessage.error.maxsizeExceeded;
errorMsg.content = {path: obj.path};
client.sendMessage(errorMsg);
});
}
for (var key in srcList) {
if(srcList.hasOwnProperty(key)) {
var obj = srcList[key];
for (var prop in obj) {
if(obj.hasOwnProperty(prop) && prop === 'size') {
if(obj.size > MAX_SYNC_SIZE_BYTES) {
// Fail this sync, contains a file that is too large.
log.warn({client: client},
'Client tried to exceed file sync size limit: file was %s bytes, limit is %s',
obj.size, MAX_SYNC_SIZE_BYTES);
maxSizeExceeded(obj);
return false;
}
}
}
}
}
return true;
}
|
javascript
|
{
"resource": ""
}
|
q55237
|
maybeReleaseLock
|
train
|
function maybeReleaseLock() {
var lock = self.client.lock;
function done(err) {
log.debug({client: self.client}, 'Closed client sync handler');
self.client.lock = null;
self.client = null;
callback(err);
}
// No lock
if(!lock) {
return done();
}
// Lock reference, but it's already unlocked
if(lock.unlocked) {
return done();
}
// Holding lock, release it
lock.release(function(err) {
if(err) {
log.error({err: err, client: self.client}, 'Error releasing sync lock');
return done(err);
}
done();
});
}
|
javascript
|
{
"resource": ""
}
|
q55238
|
getUsernameByToken
|
train
|
function getUsernameByToken(token) {
for(var username in authTable) {
if(authTable[username].indexOf(token) > -1) {
return username;
}
}
return null;
}
|
javascript
|
{
"resource": ""
}
|
q55239
|
handlePatchAckResponse
|
train
|
function handlePatchAckResponse() {
var syncedPath = data.content.path;
function complete() {
fsUtils.removeUnsynced(fs, syncedPath, function(err) {
if(err && err.code !== 'ENOENT') {
log.error('Failed to remove unsynced attribute for ' + syncedPath + ' in handlePatchAckResponse, complete()');
}
syncManager.syncNext(syncedPath);
});
}
fs.lstat(syncedPath, function(err, stats) {
if(err) {
if(err.code !== 'ENOENT') {
log.error('Failed to access ' + syncedPath + ' in handlePatchAckResponse');
return fs.delaySync(function(delayErr, delayedPath) {
if(delayErr) {
log.error('Failed to delay upstream sync for ' + delayedPath + ' in handlePatchAckResponse');
}
onError(syncManager, err);
});
}
// Non-existent paths usually due to renames or
// deletes cannot be stamped with a checksum
return complete();
}
if(!stats.isFile()) {
return complete();
}
rsyncUtils.getChecksum(rawFs, syncedPath, function(err, checksum) {
if(err) {
log.error('Failed to get the checksum for ' + syncedPath + ' in handlePatchAckResponse');
return fs.delaySync(function(delayErr, delayedPath) {
if(delayErr) {
log.error('Failed to delay upstream sync for ' + delayedPath + ' in handlePatchAckResponse while getting checksum');
}
onError(syncManager, err);
});
}
fsUtils.setChecksum(rawFs, syncedPath, checksum, function(err) {
if(err) {
log.error('Failed to stamp the checksum for ' + syncedPath + ' in handlePatchAckResponse');
return fs.delaySync(function(delayErr, delayedPath) {
if(delayErr) {
log.error('Failed to delay upstream sync for ' + delayedPath + ' in handlePatchAckResponse while setting checksum');
}
onError(syncManager, err);
});
}
complete();
});
});
});
}
|
javascript
|
{
"resource": ""
}
|
q55240
|
convert_json_to_objects
|
train
|
function convert_json_to_objects(data) {
if (Array.isArray(data)) {
return data.map((item, index, arr) => {
return new this(item, true)
})
} else {
return new this(data, true)
}
}
|
javascript
|
{
"resource": ""
}
|
q55241
|
runClient
|
train
|
function runClient(client) {
var ws = client.ws;
function invalidMessage() {
var message = SyncMessage.error.format;
message.content = {error: 'Unable to parse/handle message, invalid message format.'};
client.sendMessage(message);
}
ws.onmessage = function(msg, flags) {
var data;
var message;
var info;
if(!flags || !flags.binary) {
try {
// Keep track of how much data we receive
info = client.info();
if(info) {
info.bytesReceived += Buffer.byteLength(msg.data, 'utf8');
}
data = JSON.parse(msg.data);
message = SyncMessage.parse(data);
} catch(error) {
log.error({client: client, err: error}, 'Unable to parse/handle client message. Data was `%s`', msg.data);
return invalidMessage();
}
// Delegate ws messages to the sync protocol handler at this point
client.handler.handleMessage(message);
} else {
log.warn({client: client}, 'Expected string but got binary data over web socket.');
invalidMessage();
}
};
// Send an AUTHZ response to let client know normal sync'ing can begin.
client.state = States.LISTENING;
client.sendMessage(SyncMessage.response.authz);
log.debug({client: client}, 'Starting authorized client session');
}
|
javascript
|
{
"resource": ""
}
|
q55242
|
initClient
|
train
|
function initClient(client) {
var ws = client.ws;
client.state = States.CONNECTING;
// Wait until we get the user's token so we can finish authorizing
ws.onmessage = function(msg) {
var data;
var info;
try {
// Keep track of how much data we receive
info = client.info();
if(info) {
info.bytesReceived += Buffer.byteLength(msg.data, 'utf8');
}
data = JSON.parse(msg.data);
} catch(err) {
log.error({client: client, err: err}, 'Error parsing client token. Data was `%s`', msg.data);
ClientInfo.remove(token);
client.close({
code: 1011,
message: 'Error: token could not be parsed.'
});
return;
}
// Authorize user
var token = data.token;
var username = WebsocketAuth.getAuthorizedUsername(token);
if (!username) {
log.warn({client: client}, 'Client sent an invalid or expired token (could not get username): token=%s', token);
ClientInfo.remove(token);
client.close({
code: 1008,
message: 'Error: invalid token.'
});
return;
}
// Update client details now that he/she is authenticated
client.id = token;
client.username = username;
client.fs = filesystem.create(username);
ClientInfo.update(client);
log.info({client: client}, 'Client connected');
runClient(client);
};
}
|
javascript
|
{
"resource": ""
}
|
q55243
|
remove
|
train
|
function remove(client) {
if(!clients) {
return;
}
var idx = clients.indexOf(client);
if(idx > -1) {
clients.splice(idx, 1);
}
}
|
javascript
|
{
"resource": ""
}
|
q55244
|
add
|
train
|
function add(client) {
// Auto-remove clients on close
client.once('closed', function() {
remove(client);
});
clients = clients || [];
clients.push(client);
initClient(client);
}
|
javascript
|
{
"resource": ""
}
|
q55245
|
shutdown
|
train
|
function shutdown(callback) {
var closed = 0;
var connected = clients ? clients.length : 0;
function maybeFinished() {
if(++closed >= connected) {
clients = null;
log.info('[Shutdown] All client connections safely closed.');
return callback();
}
log.info('[Shutdown] Closed client %s of %s.', closed, connected);
}
if(!connected) {
return maybeFinished();
}
var client;
for(var i = 0; i < connected; i++) {
client = clients[i] || null;
if(!client) {
maybeFinished();
} else {
client.once('closed', maybeFinished);
if(client.state !== States.CLOSING && client.state !== States.CLOSED) {
client.close();
}
}
}
}
|
javascript
|
{
"resource": ""
}
|
q55246
|
apply_data
|
train
|
function apply_data(query, files) {
if(startCb){
startCb();
}
if (query !== undefined) {
if (this.method === 'get') {
this.request = this.request.query(query)
} else {
this.request = this.request.send(query)
}
}
if (files !== undefined) {
if (files instanceof FormData) {
this.request = this.request.send(files)
} else {
Object.keys(files).forEach((key) => {
this.request = this.request.attach(key, files[key])
})
}
}
}
|
javascript
|
{
"resource": ""
}
|
q55247
|
handleFile
|
train
|
function handleFile(fs, path, res) {
var contentType = mime.lookup(path);
var encoding = mime.charsets.lookup(contentType) === "UTF-8" ? "utf8" : null;
fs.readFile(path, {encoding: encoding}, function(err, data) {
if(err) {
log.error(err, 'Unable to read file path `%s`', path);
handle404(path, res);
return;
}
write(data, contentType, res);
});
}
|
javascript
|
{
"resource": ""
}
|
q55248
|
handleDir
|
train
|
function handleDir(fs, path, res) {
var sh = new fs.Shell();
var parent = Path.dirname(path);
var header = '<!DOCTYPE html>' +
'<html><head><title>Index of ' + path + '</title></head>' +
'<body><h1>Index of ' + path + '</h1>' +
'<table><tr><th><img src="/icons/blank.png" alt="[ICO]"></th>' +
'<th><a href="#">Name</a></th><th><a href="#">Last modified</a></th>' +
'<th><a href="#">Size</a></th><th><a href="#">Description</a></th></tr>' +
'<tr><th colspan="5"><hr></th></tr>' +
'<tr><td valign="top"><img src="/icons/back.png" alt="[DIR]"></td>' +
'<td><a href="' + parent + '">Parent Directory</a> </td><td> </td>' +
'<td align="right"> - </td><td> </td></tr>';
var footer = '<tr><th colspan="5"><hr></th></tr>' +
'</table><address>MakeDrive/' + version + ' (Web)</address>' +
'</body></html>';
function row(icon, alt, href, name, modified, size) {
icon = icon || '/icons/unknown.png';
alt = alt || '[ ]';
modified = util.formatDate(new Date(modified));
size = util.formatSize(size);
return '<tr><td valign="top"><img src="' + icon + '" alt="' + alt + '"></td><td>' +
'<a href="' + href + '">' + name + '</a> </td>' +
'<td align="right">' + modified + ' </td>' +
'<td align="right">' + size + '</td><td> </td></tr>';
}
function processEntries(entries) {
var rows = '';
entries.forEach(function(entry) {
var name = Path.basename(entry.path);
var ext = Path.extname(entry.path);
var href = Path.join('/p', path, entry.path);
var icon;
var alt;
if(entry.type === 'DIRECTORY') {
icon = '/icons/folder.png';
alt = '[DIR]';
} else { // file
if(util.isImage(ext)) {
icon = '/icons/image2.png';
alt = '[IMG]';
} else if(util.isMedia(ext)) {
icon = '/icons/movie.png';
alt = '[MOV]';
} else {
icon = '/icons/text.png';
alt = '[TXT]';
}
}
rows += row(icon, alt, href, name, entry.modified, entry.size);
});
var content = header + rows + footer;
write(content, 'text/html', res);
}
sh.ls(path, function(err, list) {
if(err) {
log.error(err, 'Unable to get listing for path `%s`', path);
handle404(path, res);
return;
}
processEntries(list);
});
}
|
javascript
|
{
"resource": ""
}
|
q55249
|
onmessage
|
train
|
function onmessage(channel, message) {
if(closing) {
return;
}
switch(channel) {
case ChannelConstants.syncChannel:
module.exports.emit('sync', message);
break;
case ChannelConstants.lockRequestChannel:
module.exports.emit('lock-request', message);
break;
case ChannelConstants.lockResponseChannel:
module.exports.emit('lock-response', message);
break;
default:
log.warn('[Redis] Got unexpected message on channel `%s`. Message was: `%s`', channel, message);
break;
}
}
|
javascript
|
{
"resource": ""
}
|
q55250
|
windowCloseHandler
|
train
|
function windowCloseHandler(event) {
if(!options.windowCloseWarning) {
return;
}
if(sync.state !== sync.SYNC_SYNCING) {
return;
}
var confirmationMessage = "Sync currently underway, are you sure you want to close?";
(event || global.event).returnValue = confirmationMessage;
return confirmationMessage;
}
|
javascript
|
{
"resource": ""
}
|
q55251
|
continuousBundle
|
train
|
function continuousBundle() {
const bundle = b => b.bundle()
.on('error', (err) => {
notifier.notify({
title: 'Browserify Error',
message: err.message,
});
gutil.log('Browserify Error', err);
})
.pipe(source('crudl.js'))
.pipe(gulp.dest(dist))
.on('end', () => {
notifier.notify({
title: 'Browserify',
message: 'OK',
});
})
const opts = assign({}, watchify.args, browersifyOptions, { debug: true });
const bundler = watchify(browserify(opts).transform(babelify.configure(babelifyOptions)));
bundler.on('update', () => bundle(bundler)); // on any dep update, runs the bundler
bundler.on('log', gutil.log); // output build logs to terminal
return bundle(bundler)
}
|
javascript
|
{
"resource": ""
}
|
q55252
|
bundleDevelopment
|
train
|
function bundleDevelopment() {
const opts = assign({}, browersifyOptions, { debug: true });
const bundler = browserify(opts).transform(babelify.configure(babelifyOptions));
bundler.on('log', gutil.log); // output build logs to terminal
return bundler.bundle()
.on('error', (err) => { // log errors if they happen
gutil.log('Browserify Error', err);
})
.pipe(source('crudl.js'))
.pipe(gulp.dest(dist))
.on('end', () => {
gutil.log(`Successfully build ${gutil.colors.magenta(`${dist}/crudl.js`)}`)
})
}
|
javascript
|
{
"resource": ""
}
|
q55253
|
bundleProduction
|
train
|
function bundleProduction() {
return browserify(browersifyOptions)
.transform(babelify.configure(babelifyOptions))
.transform(envify({ _: 'purge', NODE_ENV: 'production' }), { global: true })
.on('log', gutil.log)
.bundle()
// minify
.pipe(source('crudl.js'))
.pipe(buffer())
.pipe(uglify())
.pipe(rename('crudl.min.js'))
.on('error', gutil.log.bind(gutil, 'uglify:'))
// Prepend the license
.pipe(concat('crudl.min.js'))
.pipe(concat.header(`/* LICENSE: ${packageJSON.license} */\n`))
// Copy to dist
.pipe(gulp.dest(dist))
.on('end', () => {
gutil.log(`Successfully build ${gutil.colors.magenta(`${dist}/crudl.min.js`)}`)
})
}
|
javascript
|
{
"resource": ""
}
|
q55254
|
sassWatch
|
train
|
function sassWatch() {
return gulp
// ... and compile if necessary
.watch(sassSrcFiles, ['sass-compile'])
.on('change', (event) => {
gutil.log(`File ${event.path} was ${event.type}, running tasks...`);
});
}
|
javascript
|
{
"resource": ""
}
|
q55255
|
transit
|
train
|
function transit(state, variable, value) {
// FIXME: deep copying of the whole state can be eventually slow...
const newState = cloneDeep(state)
set(newState, variable, value)
return newState
}
|
javascript
|
{
"resource": ""
}
|
q55256
|
getStats
|
train
|
function getStats(results, done) {
const databases = results.databases;
const keys = ['document_count', 'storage_size', 'index_count', 'index_size'];
const stats = {};
keys.map(function(k) {
stats[k] = 0;
});
databases.map(function(db) {
keys.map(function(k) {
stats[k] += db[k];
});
});
done(null, stats);
}
|
javascript
|
{
"resource": ""
}
|
q55257
|
getInstanceDetail
|
train
|
function getInstanceDetail(client, db, done) {
const tasks = {
client: attach.bind(null, client),
db: attach.bind(null, db),
userInfo: ['client', 'db', getUserInfo],
host: ['client', 'db', getHostInfo],
build: ['client', 'db', getBuildInfo],
cmdLineOpts: ['client', 'db', getCmdLineOpts],
genuineMongoDB: ['build', 'cmdLineOpts', getGenuineMongoDB],
listDatabases: ['client', 'db', 'userInfo', listDatabases],
allowedDatabases: ['userInfo', getAllowedDatabases],
databases: [
'client',
'db',
'listDatabases',
'allowedDatabases',
getDatabases
],
listCollections: ['client', 'db', 'databases', listCollections],
allowedCollections: ['userInfo', getAllowedCollections],
collections: [
'client',
'db',
'listCollections',
'allowedCollections',
getCollections
],
hierarchy: ['databases', 'collections', getHierarchy],
stats: ['databases', getStats]
};
async.auto(tasks, function(err, results) {
if (err) {
// report error
return done(err);
}
// cleanup
results = omit(results, [
'db',
'listDatabases',
'allowedDatabases',
'userInfo',
'listCollections',
'allowedCollections',
'cmdLineOpts'
]);
return done(null, results);
});
}
|
javascript
|
{
"resource": ""
}
|
q55258
|
saveReturnUrlToSession
|
train
|
function saveReturnUrlToSession(req, isReturnUrlAllowed) {
const referrer = req.get("referrer")
const isInternalRequest = Boolean(
referrer && url.parse(referrer).hostname === req.hostname
)
const isUrlAllowed = isReturnUrlAllowed(req.originalUrl)
const isSessionSet = Boolean(req.session && req.session.returnTo)
if (!isUrlAllowed || isInternalRequest || isSessionSet) {
return
}
req.session.returnTo = req.originalUrl
}
|
javascript
|
{
"resource": ""
}
|
q55259
|
stop
|
train
|
function stop(opts, done) {
debug('stopping...');
killIfRunning(opts, function(err) {
debug('Any running workers have been sent a stop command');
done(err);
});
}
|
javascript
|
{
"resource": ""
}
|
q55260
|
configure
|
train
|
function configure(opts, done) {
delete opts._;
opts = defaults(opts, {
topology: process.env.MONGODB_TOPOLOGY || 'standalone'
});
opts = defaults(opts, {
name: opts.topology
});
opts = defaults(opts, {
logpath: untildify(
process.env.MONGODB_LOGPATH ||
format('~/.mongodb/runner/%s.log', opts.name)
),
pidpath: untildify(process.env.MONGODB_PIDPATH || '~/.mongodb/runner/pid'),
port: process.env.MONGODB_PORT || 27017,
mongodBin: process.env.MONGOD_BIN || 'mongod',
mongosBin: process.env.MONGOS_BIN || 'mongos',
storageEngine: process.env.MONGODB_STORAGE_ENGINE,
auth_mechanism: process.env.MONGODB_AUTH_MECHANISM || 'none',
purge: process.env.MONGODB_PURGE || true
});
// MongoDB < 3.0 doesn't understand the storageEngine argument and
// will fail to start if provided!
if (opts.version < '3.0') {
delete opts.storageEngine;
}
if (opts.topology === 'replicaset') {
opts = defaults(opts, {
arbiters: process.env.MONGODB_ARBITERS || 0,
secondaries: process.env.MONGODB_SECONDARIES || 2,
passives: process.env.MONGODB_PASSIVES || 0
});
}
if (opts.topology === 'cluster') {
opts = defaults(opts, {
shards: process.env.MONGODB_SHARDS || 1, // -> replsets
routers: process.env.MONGODB_ROUTERS || 1, // -> mongoses
configs: process.env.MONGODB_CONFIGS || 1,
shardPort: process.env.MONGODB_SHARDS_PORT || 31000, // -> replsetStartPort
configPort: process.env.MONGODB_CONFIGS_PORT || 35000, // -> configStartPort
arbiters: process.env.MONGODB_ARBITERS || 0,
secondaries: process.env.MONGODB_SECONDARIES || 2,
passives: process.env.MONGODB_PASSIVES || 0
});
}
debug('Ready to process spec', opts);
if (opts.action === 'stop') {
return done();
}
async.series(
[
mkdirp.bind(null, opts.pidpath),
getDbPath.bind(null, opts),
createLogsDirectory.bind(null, opts)
],
done
);
}
|
javascript
|
{
"resource": ""
}
|
q55261
|
mongodb_runner_mocha_before
|
train
|
function mongodb_runner_mocha_before(opts) {
if (typeof opts === 'function') {
// So you can just do `before(require('mongodb-runner/mocha/before'));`
return mongodb_runner_mocha_before({}).apply(this, arguments);
}
opts = opts || {};
defaults(opts, {
port: 27017,
timeout: 10000,
slow: 10000
});
return function(done) {
this.timeout(opts.timeout);
this.slow(opts.slow);
debug('checking if mongodb is running...');
running(function(err, res) {
if (err) {
debug('mongodb detection failed so going to try and start one');
runner({
port: opts.port,
action: 'start'
}, done);
return;
}
if (res && res.length > 0) {
if (res[0].port === opts.port) {
process.env.MONGODB_RUNNER_MOCHA_SKIP_STOP = '1';
debug('mongodb already running on `localhost:%s` '
+ 'so we won\'t start a new one', opts.port);
done();
return;
}
debug('mongodb already running, but its on '
+ '`localhost:%d` and we need `localhost:%s` for '
+ 'the tests so starting up a new one.', res[0].port, opts.port);
runner({
action: 'start',
port: opts.port
}, done);
return;
}
debug('no mongodb running so starting one up');
runner({
action: 'start',
port: opts.port
}, done);
return;
});
};
}
|
javascript
|
{
"resource": ""
}
|
q55262
|
mongodb_runner_mocha_after
|
train
|
function mongodb_runner_mocha_after(opts) {
if (typeof opts === 'function') {
// So you can just do `after(require('mongodb-runner/mocha/after'));`
return mongodb_runner_mocha_after({}).apply(this, arguments);
}
opts = opts || {};
defaults(opts, {
port: 27017
});
return function(done) {
if (process.env.MONGODB_RUNNER_MOCHA_SKIP_STOP) {
debug('not stopping mongodb as it was not started by mocha/before');
process.env.MONGODB_RUNNER_MOCHA_SKIP_STOP = undefined;
done();
return;
}
debug('stopping mongodb...');
runner({
port: opts.port,
action: 'stop'
}, done);
};
}
|
javascript
|
{
"resource": ""
}
|
q55263
|
tryParse
|
train
|
function tryParse(url) {
if (!url) {
return null;
}
var conf = {};
var parsed = urlParse(url);
conf.protocol = parsed.protocol;
conf.host = parsed.hostname;
conf.port = parseInt(parsed.port,10);
return conf;
}
|
javascript
|
{
"resource": ""
}
|
q55264
|
mixinProxying
|
train
|
function mixinProxying(agent, proxyOpts) {
agent.proxy = proxyOpts;
var orig = _.pick(agent, 'createConnection', 'addRequest');
// Make the tcp or tls connection go to the proxy, ignoring the
// destination host:port arguments.
agent.createConnection = function(port, host, options) {
return orig.createConnection.call(this,
this.proxy.port, this.proxy.host, options);
};
// tell the proxy where we really want to go by fully-qualifying the path
// part. Force a localAddress if one was configured
agent.addRequest = function(req, host, port, localAddress) {
req.path = this.proxy.innerProtocol + '//' + host + ':' + port + req.path;
if (this.proxy.localAddress) {
localAddress = this.proxy.localAddress;
}
return orig.addRequest.call(this, req, host, port, localAddress);
};
}
|
javascript
|
{
"resource": ""
}
|
q55265
|
train
|
function () {
if (this.el && this.el.parentNode) this.el.parentNode.removeChild(this.el);
this._rendered = false;
this._downsertBindings();
return this;
}
|
javascript
|
{
"resource": ""
}
|
|
q55266
|
train
|
function (context, templateArg) {
var template = templateArg || this.template;
if (!template) throw new Error('Template string or function needed.');
var newDom = isString(template) ? template : template.call(this, context || this);
if (isString(newDom)) newDom = domify(newDom);
var parent = this.el && this.el.parentNode;
if (parent) parent.replaceChild(newDom, this.el);
if (newDom.nodeName === '#document-fragment') throw new Error('Views can only have one root element, including comment nodes.');
this.el = newDom;
return this;
}
|
javascript
|
{
"resource": ""
}
|
|
q55267
|
checkSchemaHasReferences
|
train
|
function checkSchemaHasReferences(schema) {
if (schema.$ref) {
return true;
}
return Object.values(schema).some((value) => {
if (_.isArray(value)) {
return value.some(checkSchemaHasReferences);
} else if (_.isObject(value)) {
return checkSchemaHasReferences(value);
}
return false;
});
}
|
javascript
|
{
"resource": ""
}
|
q55268
|
findReferences
|
train
|
function findReferences(schema) {
if (schema.$ref) {
return [schema.$ref];
}
let references = [];
if (schema.allOf) {
references = references.concat(...schema.allOf.map(findReferences));
}
if (schema.anyOf) {
references = references.concat(...schema.anyOf.map(findReferences));
}
if (schema.oneOf) {
references = references.concat(...schema.oneOf.map(findReferences));
}
if (schema.not) {
references = references.concat(...findReferences(schema.not));
}
// Array
if (schema.items) {
if (Array.isArray(schema.items)) {
references = references.concat(...schema.items.map(findReferences));
} else {
references = references.concat(findReferences(schema.items));
}
}
if (schema.additionalItems && typeof schema.additionalItems === 'object') {
references = references.concat(findReferences(schema.additionalItems));
}
// Object
if (schema.properties) {
Object.keys(schema.properties).forEach((key) => {
references = references.concat(findReferences(schema.properties[key]));
});
}
if (schema.patternProperties) {
Object.keys(schema.patternProperties).forEach((key) => {
references = references.concat(findReferences(schema.patternProperties[key]));
});
}
if (schema.additionalProperties && typeof schema.additionalProperties === 'object') {
references = references.concat(findReferences(schema.additionalProperties));
}
return references;
}
|
javascript
|
{
"resource": ""
}
|
q55269
|
getTime
|
train
|
function getTime(time) {
var hours = Math.floor(time/3600);
var minutes = Math.floor(time % 3600/60);
var seconds = (time % 3600) % 60;
return hours + 'h ' + minutes + 'min ' + seconds + 's';
}
|
javascript
|
{
"resource": ""
}
|
q55270
|
isStream
|
train
|
function isStream(input) {
if (ReadableStream.prototype.isPrototypeOf(input)) {
return 'web';
}
if (NodeReadableStream && NodeReadableStream.prototype.isPrototypeOf(input)) {
return 'node';
}
return false;
}
|
javascript
|
{
"resource": ""
}
|
q55271
|
getPushSettings
|
train
|
function getPushSettings(opts) {
assert.ok(opts, 'opts is undefined');
var headers = {
'X-Project-Id': opts.widget,
'X-App-Id': opts.instance
};
fhutils.addAppApiKeyHeader(headers, opts.appapikey);
return {
url: 'https://' + opts.millicore + ':' + opts.port + '/box/api/unifiedpush/mbaas/',
applicationId: "fake", // we have to use fake ID, it will be added by supercore
masterSecret: "fake", // we have to use fake secret, it will be added by supercore
headers: headers
};
}
|
javascript
|
{
"resource": ""
}
|
q55272
|
toStream
|
train
|
function toStream(input) {
let streamType = isStream(input);
if (streamType === 'node') {
return nodeToWeb(input);
} else if (streamType) {
return input;
}
return new ReadableStream({
start(controller) {
controller.enqueue(input);
controller.close();
}
});
}
|
javascript
|
{
"resource": ""
}
|
q55273
|
concat
|
train
|
function concat(list) {
if (list.some(isStream)) {
return concatStream(list);
}
if (typeof list[0] === 'string') {
return list.join('');
}
if (NodeBuffer && NodeBuffer.isBuffer(list[0])) {
return NodeBuffer.concat(list);
}
return concatUint8Array(list);
}
|
javascript
|
{
"resource": ""
}
|
q55274
|
concatStream
|
train
|
function concatStream(list) {
list = list.map(toStream);
const transform = transformWithCancel(async function(reason) {
await Promise.all(transforms.map(stream => cancel(stream, reason)));
});
let prev = Promise.resolve();
const transforms = list.map((stream, i) => transformPair(stream, (readable, writable) => {
prev = prev.then(() => pipe(readable, transform.writable, {
preventClose: i !== list.length - 1
}));
return prev;
}));
return transform.readable;
}
|
javascript
|
{
"resource": ""
}
|
q55275
|
getWriter
|
train
|
function getWriter(input) {
const writer = input.getWriter();
const releaseLock = writer.releaseLock;
writer.releaseLock = () => {
writer.closed.catch(function() {});
releaseLock.call(writer);
};
return writer;
}
|
javascript
|
{
"resource": ""
}
|
q55276
|
pipe
|
train
|
async function pipe(input, target, options) {
input = toStream(input);
try {
if (input[externalBuffer]) {
const writer = getWriter(target);
for (let i = 0; i < input[externalBuffer].length; i++) {
await writer.ready;
await writer.write(input[externalBuffer][i]);
}
writer.releaseLock();
}
return await input.pipeTo(target, options);
} catch(e) {}
}
|
javascript
|
{
"resource": ""
}
|
q55277
|
transformRaw
|
train
|
function transformRaw(input, options) {
const transformStream = new TransformStream(options);
pipe(input, transformStream.writable);
return transformStream.readable;
}
|
javascript
|
{
"resource": ""
}
|
q55278
|
transformWithCancel
|
train
|
function transformWithCancel(cancel) {
let pulled = false;
let backpressureChangePromiseResolve;
let outputController;
return {
readable: new ReadableStream({
start(controller) {
outputController = controller;
},
pull() {
if (backpressureChangePromiseResolve) {
backpressureChangePromiseResolve();
} else {
pulled = true;
}
},
cancel
}, {highWaterMark: 0}),
writable: new WritableStream({
write: async function(chunk) {
outputController.enqueue(chunk);
if (!pulled) {
await new Promise(resolve => {
backpressureChangePromiseResolve = resolve;
});
backpressureChangePromiseResolve = null;
} else {
pulled = false;
}
},
close: outputController.close.bind(outputController),
abort: outputController.error.bind(outputController)
})
};
}
|
javascript
|
{
"resource": ""
}
|
q55279
|
transform
|
train
|
function transform(input, process = () => undefined, finish = () => undefined) {
if (isStream(input)) {
return transformRaw(input, {
async transform(value, controller) {
try {
const result = await process(value);
if (result !== undefined) controller.enqueue(result);
} catch(e) {
controller.error(e);
}
},
async flush(controller) {
try {
const result = await finish();
if (result !== undefined) controller.enqueue(result);
} catch(e) {
controller.error(e);
}
}
});
}
const result1 = process(input);
const result2 = finish();
if (result1 !== undefined && result2 !== undefined) return concat([result1, result2]);
return result1 !== undefined ? result1 : result2;
}
|
javascript
|
{
"resource": ""
}
|
q55280
|
transformPair
|
train
|
function transformPair(input, fn) {
let incomingTransformController;
const incoming = new TransformStream({
start(controller) {
incomingTransformController = controller;
}
});
const pipeDonePromise = pipe(input, incoming.writable);
const outgoing = transformWithCancel(async function() {
incomingTransformController.error(new Error('Readable side was canceled.'));
await pipeDonePromise;
await new Promise(setTimeout);
});
fn(incoming.readable, outgoing.writable);
return outgoing.readable;
}
|
javascript
|
{
"resource": ""
}
|
q55281
|
passiveClone
|
train
|
function passiveClone(input) {
if (isStream(input)) {
return new ReadableStream({
start(controller) {
const transformed = transformPair(input, async (readable, writable) => {
const reader = getReader(readable);
const writer = getWriter(writable);
try {
while (true) {
await writer.ready;
const { done, value } = await reader.read();
if (done) {
try { controller.close(); } catch(e) {}
await writer.close();
return;
}
try { controller.enqueue(value); } catch(e) {}
await writer.write(value);
}
} catch(e) {
controller.error(e);
await writer.abort(e);
}
});
overwrite(input, transformed);
}
});
}
return slice(input);
}
|
javascript
|
{
"resource": ""
}
|
q55282
|
slice
|
train
|
function slice(input, begin=0, end=Infinity) {
if (isStream(input)) {
if (begin >= 0 && end >= 0) {
let bytesRead = 0;
return transformRaw(input, {
transform(value, controller) {
if (bytesRead < end) {
if (bytesRead + value.length >= begin) {
controller.enqueue(slice(value, Math.max(begin - bytesRead, 0), end - bytesRead));
}
bytesRead += value.length;
} else {
controller.terminate();
}
}
});
}
if (begin < 0 && (end < 0 || end === Infinity)) {
let lastBytes = [];
return transform(input, value => {
if (value.length >= -begin) lastBytes = [value];
else lastBytes.push(value);
}, () => slice(concat(lastBytes), begin, end));
}
if (begin === 0 && end < 0) {
let lastBytes;
return transform(input, value => {
const returnValue = lastBytes ? concat([lastBytes, value]) : value;
if (returnValue.length >= -end) {
lastBytes = slice(returnValue, end);
return slice(returnValue, begin, end);
} else {
lastBytes = returnValue;
}
});
}
console.warn(`stream.slice(input, ${begin}, ${end}) not implemented efficiently.`);
return fromAsync(async () => slice(await readToEnd(input), begin, end));
}
if (input[externalBuffer]) {
input = concat(input[externalBuffer].concat([input]));
}
if (isUint8Array(input) && !(NodeBuffer && NodeBuffer.isBuffer(input))) {
if (end === Infinity) end = input.length;
return input.subarray(begin, end);
}
return input.slice(begin, end);
}
|
javascript
|
{
"resource": ""
}
|
q55283
|
cancel
|
train
|
async function cancel(input, reason) {
if (isStream(input) && input.cancel) {
return input.cancel(reason);
}
}
|
javascript
|
{
"resource": ""
}
|
q55284
|
fromAsync
|
train
|
function fromAsync(fn) {
return new ReadableStream({
pull: async controller => {
try {
controller.enqueue(await fn());
controller.close();
} catch(e) {
controller.error(e);
}
}
});
}
|
javascript
|
{
"resource": ""
}
|
q55285
|
mongoConnectionStringOS3
|
train
|
function mongoConnectionStringOS3(cb) {
debug('Running in OpenShift 3, requesting db connection string from MBaaS');
mbaasClient.app.databaseConnectionString({
"domain": config.fhmbaas.domain,
"environment": config.fhmbaas.environment
}, function retrieved(err, resp) {
if (err) {
return cb(err);
}
process.env.FH_MONGODB_CONN_URL = resp.url;
return cb(undefined, resp.url);
});
}
|
javascript
|
{
"resource": ""
}
|
q55286
|
findFieldDefinition
|
train
|
function findFieldDefinition() {
var foundField;
if (!(params.fieldId || params.fieldCode)) {
return undefined;
}
//Iterating through each of the pages to find a matching field.
_.each(self.form.pages, function(page) {
_.each(page.fields, function(field) {
var fieldId = field._id;
var fieldCode = field.fieldCode;
if (fieldId === params.fieldId || fieldCode === params.fieldCode) {
foundField = field;
}
});
});
return foundField;
}
|
javascript
|
{
"resource": ""
}
|
q55287
|
processInputValue
|
train
|
function processInputValue() {
var value = params.value;
var index = params.index || 0;
var fieldType = field.type;
//Checking for a value.
if (typeof(value) === "undefined" || value === null) {
return "No value entered.";
}
/**
* File-base fields (photo, signature and file) need to stream the file to the mongo server.
*/
if (fieldType === "photo" || fieldType === "signature" || fieldType === "file") {
//The stream must be a paused stream.
var fileURI = value.fileStream;
delete value.fileStream;
//It must be possible to stream the object to the database.
var isString=(typeof(fileURI) === "string");
if (!isString) {
return "Expected a string URI object when streaming a file-based field ";
}
if (!(value.fileName && value.fileSize && value.fileType)) {
return "Invalid file parameters. Params: " + JSON.stringify(value);
}
//Generating a random file hash name.
var hashName = "filePlaceHolder" + Date.now() + Math.floor(Math.random() * 10000000000000);
var fileUpdateTime = Date.now;
self.filesToUpload[hashName] = {
fieldId: field._id,
fileStream: fileURI
};
value.hashName = hashName;
value.fileUpdateTime = fileUpdateTime;
}
self.fieldValues[field._id] = self.fieldValues[field._id] || [];
self.fieldValues[field._id][index] = value;
return undefined;
}
|
javascript
|
{
"resource": ""
}
|
q55288
|
FHapi
|
train
|
function FHapi(cfg) {
var api = {
getVersion: function() {
//Getting The Version of fh-mbaas-api
return packageJSON.version;
},
cache: require('./cache')(cfg),
db: require('./db')(cfg),
events: new EventEmitter(),
forms: require('./forms')(cfg),
log: false,
stringify: false,
parse: false,
push: require('./push')(cfg),
call: require('./call')(cfg),
util: false,
redisPort: cfg.redis.port || '6379',
redisHost: cfg.redis.host || 'localhost',
session: require('./session')(cfg),
stats: require('./stats')(cfg),
sync: sync,
act: require('./act')(cfg),
service: require('./act')(cfg),
sec: sec.security,
auth: require('./auth')(cfg),
host: require('./host'),
permission_map: require('fh-db').permission_map,
hash: function(opts, callback) {
var p = {
act: 'hash',
params: opts
};
sec.security(p, callback);
},
web: require('./web')(cfg)
};
api.sync.setEventEmitter(api.events);
var redisUrl = 'redis://' + api.redisHost + ':' + api.redisPort;
getDBUrl(api, function(err, connectionString) {
if (err) {
console.warn('Warning! Could not get a mongodb connection string. Sync will not work. (', err, ')');
return;
} else if (!connectionString) {
console.warn('Warning! Could not get a mongodb connection string. Sync will not work. If running in a Dynofarm/FeedHenry MBaaS, ensure the database is upgraded');
return;
}
var poolSize = parseInt(process.env.SYNC_MONGODB_POOLSIZE) || 50;
api.sync.connect(connectionString, {poolSize: poolSize}, redisUrl, function(err) {
if (err) {
console.error('Error starting the sync server (', err, ')');
}
});
});
api.mbaasExpress = function(opts) {
opts = opts || {};
opts.api = api;
return require('fh-mbaas-express')(opts);
};
api.shutdown = function(cb) {
// Sync service has a setInterval loop running which will prevent fh-mbaas-api from exiting cleanly.
// Call stopAll to ensure Sync exits clenaly.
api.sync.stopAll(cb);
};
return api;
}
|
javascript
|
{
"resource": ""
}
|
q55289
|
Reader
|
train
|
function Reader(input) {
this.stream = input;
if (input[externalBuffer]) {
this[externalBuffer] = input[externalBuffer].slice();
}
let streamType = streams.isStream(input);
if (streamType === 'node') {
input = streams.nodeToWeb(input);
}
if (streamType) {
const reader = input.getReader();
this._read = reader.read.bind(reader);
this._releaseLock = () => {
reader.closed.catch(function() {});
reader.releaseLock();
};
return;
}
let doneReading = false;
this._read = async () => {
if (doneReading || doneReadingSet.has(input)) {
return { value: undefined, done: true };
}
doneReading = true;
return { value: input, done: false };
};
this._releaseLock = () => {
if (doneReading) {
try {
doneReadingSet.add(input);
} catch(e) {}
}
};
}
|
javascript
|
{
"resource": ""
}
|
q55290
|
createMarkdownTemplate
|
train
|
function createMarkdownTemplate(file, vueObj) {
let json2mdTemplate = [], mdTablesTemplate;
mdTablesTemplate = buildTables(vueObj)
if(mdTablesTemplate.length > 0) {
json2mdTemplate = json2mdTemplate.concat(mdTablesTemplate)
}
return json2md(json2mdTemplate)
}
|
javascript
|
{
"resource": ""
}
|
q55291
|
buildTables
|
train
|
function buildTables(vueObj) {
let updatedTemplate = [{h3: "<button class='title'>PROPS, METHODS, EVENTS, SLOTS</button>"}]
let mdTable
mdTable = tableFromProps(vueObj["props"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
mdTable = tableFromMethods(vueObj["methods"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
mdTable = tableFromEvents(vueObj["events"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
mdTable = tableFromSlots(vueObj["slots"])
if(mdTable != null) {
updatedTemplate.push(mdTable)
}
return updatedTemplate.length > 1 ? updatedTemplate : []
}
|
javascript
|
{
"resource": ""
}
|
q55292
|
tableFromProps
|
train
|
function tableFromProps(propsObj) {
const headers = ["Prop Name", "Type", "Default", "Require", "Description"]
let rows = []
// construct rows of table from object of properties
for(const prop in propsObj) {
// Don't document properties with `@ignore` tag
if (propsObj[prop].tags.ignore) {
continue
}
let cols = []
cols.push(`${prop}`) // property name
cols.push(propsObj[prop]["type"] ? propsObj[prop]["type"]["name"].replace(/\|/g, ',') : 'unknown') // type of the property
cols.push(propsObj[prop]["defaultValue"] ? propsObj[prop]["defaultValue"]["value"] : 'n/a') // property default value
cols.push(propsObj[prop]["required"] ? 'true' : 'false') // property is required
cols.push(`${propsObj[prop]["description"]}`) // description of the property
rows.push(cols)
}
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
javascript
|
{
"resource": ""
}
|
q55293
|
tableFromMethods
|
train
|
function tableFromMethods(methodsArr) {
const headers = ["Method Name", "Type", "Parameters", "Description"]
let rows = []
// construct rows of table array of methods
methodsArr.forEach((method) => {
let cols = []
cols.push(method["name"]) // method name
let paramList = ''
method["params"].forEach((param) => {
paramList += `${param["name"]}: ${param["type"]["name"]} - ${param["description"]}\n`
})
cols.push(paramList) // list of method parameters
cols.push(`${method["description"]}`) // description of the method
rows.push(cols);
})
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
javascript
|
{
"resource": ""
}
|
q55294
|
tableFromEvents
|
train
|
function tableFromEvents(eventsObj) {
const headers = ["Event Name", "Type", "Description"]
let rows = []
for(const evt in eventsObj) {
let cols = []
cols.push(`${evt}`) // event name
let typeList = ''
eventsObj[evt]["type"]["names"].forEach((type, idx, arr) => {
typeList += `${type}${arr[idx+1] ? `|` : ''}`
})
cols.push(typeList) // list of event types
cols.push(`${eventsObj[evt]["description"]}`) // description of the event
rows.push(cols);
}
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
javascript
|
{
"resource": ""
}
|
q55295
|
tableFromSlots
|
train
|
function tableFromSlots(slotsObj) {
const headers = ["Slot", "Description"]
let rows = []
for(const slot in slotsObj) {
let cols = []
cols.push(`${slot}`) // name of the slot
cols.push(`${slotsObj[slot]["description"] || ''}`) // description of the slot
rows.push(cols)
}
return rows.length > 0 ? {table: {headers, rows}} : null
}
|
javascript
|
{
"resource": ""
}
|
q55296
|
buildAPIs
|
train
|
function buildAPIs(vueObj) {
const funcArray = [propsAPIObject, methodsAPIObject, eventsAPIObject, slotsAPIObject]
const compAPIObj = funcArray.reduce((apiObj, curFn) => {
const obj = curFn(vueObj)
if (obj !== null) {
Object.assign(apiObj, obj)
}
return apiObj
}, {})
return compAPIObj
}
|
javascript
|
{
"resource": ""
}
|
q55297
|
propsAPIObject
|
train
|
function propsAPIObject(vueObj) {
const propsObj = vueObj["props"] || {}
let props = []
// construct array of objects for props
for (const prop in propsObj) {
if (propsObj.hasOwnProperty(prop)) {
// Don't document properties with `@ignore` tag
if (propsObj[prop].tags.ignore) {
continue
}
// object representing a single prop
const ele = {
"name": `${prop}`,
"type": propsObj[prop]["type"] ? propsObj[prop]["type"]["name"] : 'unknown',
"default": (propsObj[prop]["defaultValue"] && propsObj[prop]["defaultValue"] !== " ") ?
propsObj[prop]["defaultValue"]["value"] : 'n/a',
"description": `${propsObj[prop]["description"] || 'MISSING DESCRIPTION'}`
}
props.push(ele)
}
}
return props.length > 0 ? {props} : null
}
|
javascript
|
{
"resource": ""
}
|
q55298
|
methodsAPIObject
|
train
|
function methodsAPIObject(vueObj) {
const methodsArr = vueObj["methods"] || []
let methods = []
// construct array of objects for public methods
methodsArr.forEach((method) => {
const ele = {
"name": `${method["name"]}`,
"parameters": `${method["params"].reduce((paramList, param) => {
paramList += `${param["name"]}: ${param["type"]["name"]} - ${param["description"]}\n`
return paramList
}, '')}`,
"description": `${method["description"] || 'MISSING DESCRIPTION'}`
}
methods.push(ele)
})
return methods.length > 0 ? {methods} : null
}
|
javascript
|
{
"resource": ""
}
|
q55299
|
eventsAPIObject
|
train
|
function eventsAPIObject(vueObj) {
const eventsObj = vueObj["events"] || {}
let events = []
for (const evt in eventsObj) {
if (eventsObj.hasOwnProperty(evt)) {
const ele = {
"name": `${evt}`,
"type": `${eventsObj[evt]["type"]["names"].reduce((typeList, type, idx, arr) => {
typeList += `${type}${arr[idx+1] ? `|` : ''}`
return typeList
}, '')}`,
"description": `${eventsObj[evt]["description"] || 'MISSING DESCRIPTION'}`
}
events.push(ele)
}
}
return events.length > 0 ? {events} : null
}
|
javascript
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.