_id
stringlengths
2
6
title
stringlengths
0
58
partition
stringclasses
3 values
text
stringlengths
52
373k
language
stringclasses
1 value
meta_information
dict
q63400
prepare
test
function prepare(repoState, opts) { const workingState = repoState.getCurrentState(); const changes = workingState.getChanges(); // Is this an empty commit ? opts.empty = workingState.isClean(); // Parent SHA opts.parents = new Immutable.List([ workingState.getHead() ]); // Get merged tree (with applied changes) opts.treeEntries = WorkingUtils.getMergedTreeEntries(workingState); // Create map of blobs that needs to be created opts.blobs = changes.filter((change) => { return !change.hasSha(); }).map((change) => { return change.getContent(); }); return CommitBuilder.create(opts); }
javascript
{ "resource": "" }
q63401
flush
test
function flush(repoState, driver, commitBuilder, options = {}) { options = Object.assign({ branch: repoState.getCurrentBranch(), ignoreEmpty: true }, options); if (options.ignoreEmpty && commitBuilder.isEmpty() && commitBuilder.getParents().count() < 2) { return Q(repoState); } // Create new commit return driver.flushCommit(commitBuilder) // Forward the branch .then((commit) => { return driver.forwardBranch(options.branch, commit.getSha()) // Fetch new workingState and replace old one .then(function updateBranch() { const updated = options.branch.merge({ commit }); return repoState.updateBranch(options.branch, updated); }, function nonFF(err) { if (err.code === ERRORS.NOT_FAST_FORWARD) { // Provide the created commit to allow merging it back. err.commit = commit; } throw err; }); }) .then(function updateWorkingState(forwardedRepoState) { const forwardedBranch = forwardedRepoState.getBranch(options.branch.getFullName()); return RepoUtils.fetchTree(forwardedRepoState, driver, forwardedBranch); }); }
javascript
{ "resource": "" }
q63402
format
test
function format(color, messages) { var length = messages.length; if (length === 0 || typeof(color) !== 'string') { return; } return (util.format.apply(null, messages)[color]); }
javascript
{ "resource": "" }
q63403
push
test
function push(repoState, driver, opts = {}) { opts = Object.assign({ branch: repoState.getCurrentBranch(), force: false, remote: { name: 'origin' } }, opts); return driver.push(opts) // Can fail with NOT_FAST_FORWARD // TODO update remote branch in repoState list of branches .thenResolve(repoState); }
javascript
{ "resource": "" }
q63404
pull
test
function pull(repoState, driver, opts = {}) { opts = Object.assign({ branch: repoState.getCurrentBranch(), force: false, remote: { name: 'origin' } }, opts); return driver.pull(opts) // Update branch SHA .then(() => { return driver.fetchBranches(); }) .then((branches) => { const updatedBranch = branches.find((br) => { return br.name === opts.branch.name; }); repoState = repoState.updateBranch(opts.branch, updatedBranch); return RepoUtils.fetchTree(repoState, driver, updatedBranch); }); }
javascript
{ "resource": "" }
q63405
compareRefs
test
function compareRefs(driver, base, head) { const baseRef = base instanceof Branch ? base.getFullName() : base; const headRef = head instanceof Branch ? head.getFullName() : head; return driver.findParentCommit(baseRef, headRef) .then((parentCommit) => { // There can be no parent commit return Q.all([ parentCommit ? parentCommit.getSha() : null, baseRef, headRef ].map((ref) => { return ref ? driver.fetchWorkingState(ref) : WorkingState.createEmpty(); })); }) .spread((parent, base, head) => { const conflicts = _compareTrees(parent.getTreeEntries(), base.getTreeEntries(), head.getTreeEntries()); return new TreeConflict({ base, head, parent, conflicts }); }); }
javascript
{ "resource": "" }
q63406
solveTree
test
function solveTree(treeConflict, solved) { solved = treeConflict.getConflicts() .merge(solved) // Solve unresolved conflicts .map(function defaultSolve(conflict) { if (!conflict.isSolved()) { return conflict.keepBase(); } else { return conflict; } }); return treeConflict.set('conflicts', solved); }
javascript
{ "resource": "" }
q63407
mergeCommit
test
function mergeCommit(treeConflict, parents, options) { options = options || {}; const opts = {}; // Assume the commit is not empty opts.empty = false; // Parent SHAs opts.parents = new Immutable.List(parents); opts.author = options.author; opts.message = options.message || 'Merged commit'; // Get the solved tree entries const solvedEntries = _getSolvedEntries(treeConflict); opts.treeEntries = solvedEntries; // Create map of blobs that needs to be created const solvedConflicts = treeConflict.getConflicts(); opts.blobs = solvedEntries.filter((treeEntry) => { return !treeEntry.hasSha(); }).map((treeEntry, path) => { return solvedConflicts.get(path).getSolvedContent(); }); return CommitBuilder.create(opts); }
javascript
{ "resource": "" }
q63408
_getSolvedEntries
test
function _getSolvedEntries(treeConflict) { const parentEntries = treeConflict.getParent().getTreeEntries(); const baseEntries = treeConflict.getBase().getTreeEntries(); const headEntries = treeConflict.getHead().getTreeEntries(); const baseDiff = _diffEntries(parentEntries, baseEntries); const headDiff = _diffEntries(parentEntries, headEntries); const resolvedEntries = treeConflict.getConflicts().map((solvedConflict) => { // Convert to TreeEntries (or null for deletion) if (solvedConflict.isDeleted()) { return null; } else { return new TreeEntry({ sha: solvedConflict.getSolvedSha() || null }); } }); return parentEntries.merge(baseDiff, headDiff, resolvedEntries) // Remove deleted entries .filter(function nonNull(entry) { return entry !== null; }); }
javascript
{ "resource": "" }
q63409
addBlob
test
function addBlob(cache, sha, blob) { const blobs = cache.getBlobs(); const newBlobs = blobs.set(sha, blob); const newCache = cache.set('blobs', newBlobs); return newCache; }
javascript
{ "resource": "" }
q63410
get
test
function get(repoState, dirPath) { // Remove trailing '/' etc. const normDirPath = Path.join(dirPath, '.'); const filepaths = DirUtils.readFilenamesRecursive(repoState, normDirPath); const tree = { value: File.createDir(normDirPath), children: {} }; for (let i = 0; i < filepaths.length; i++) { const relativePath = Path.relative(normDirPath, filepaths[i]); const parts = relativePath.split('/'); let node = tree; let prefix = normDirPath; for (let j = 0; j < parts.length; j++) { const head = parts[j]; const isLeaf = (j === parts.length - 1); prefix = Path.join(prefix, head); // Create node if doesn't exist if (!node.children[head]) { if (isLeaf) { node.children[head] = { value: FileUtils.stat(repoState, filepaths[i]) }; } else { node.children[head] = { value: File.createDir(prefix), children: {} }; } } node = node.children[head]; } } return TreeNode.fromJS(tree); }
javascript
{ "resource": "" }
q63411
normCreatedCommit
test
function normCreatedCommit(ghCommit) { const commit = Commit.create({ sha: ghCommit.sha, message: ghCommit.message, author: getSimpleAuthor(ghCommit.author), date: ghCommit.author.date, parents: ghCommit.parents.map(function getSha(o) { return o.sha; }) }); return commit; }
javascript
{ "resource": "" }
q63412
normListedCommit
test
function normListedCommit(ghCommit) { const commit = Commit.create({ sha: ghCommit.sha, message: ghCommit.commit.message, author: getCompleteAuthor(ghCommit), date: ghCommit.commit.author.date, files: ghCommit.files, parents: ghCommit.parents.map(c => c.sha) }); return commit; }
javascript
{ "resource": "" }
q63413
stat
test
function stat(repoState, filepath) { const workingState = repoState.getCurrentState(); // Lookup potential changes const change = workingState.getChanges().get(filepath); // Lookup file entry const treeEntry = workingState.getTreeEntries().get(filepath); // Determine SHA of the blob let blobSHA; if (change) { blobSHA = change.getSha(); } else { blobSHA = treeEntry.getSha(); } // Get the blob from change or cache let blob; if (blobSHA) { // Get content from cache blob = repoState.getCache().getBlob(blobSHA); } else { // No sha, so it must be in changes blob = change.getContent(); } let fileSize; if (blob) { fileSize = blob.getByteLength(); } else { // It might have been moved (but not fetched) const originalEntry = workingState.getTreeEntries().find((entry) => { return entry.getSha() === blobSHA; }); fileSize = originalEntry.getBlobSize(); } return new File({ type: FILETYPE.FILE, fileSize, path: filepath, content: blob }); }
javascript
{ "resource": "" }
q63414
readAsString
test
function readAsString(repoState, filepath, encoding) { const blob = read(repoState, filepath); return blob.getAsString(encoding); }
javascript
{ "resource": "" }
q63415
exists
test
function exists(repoState, filepath) { const workingState = repoState.getCurrentState(); const mergedFileSet = WorkingUtils.getMergedTreeEntries(workingState); return mergedFileSet.has(filepath); }
javascript
{ "resource": "" }
q63416
remove
test
function remove(repoState, filepath) { if (!exists(repoState, filepath)) { throw error.fileNotFound(filepath); } const change = Change.createRemove(); return ChangeUtils.setChange(repoState, filepath, change); }
javascript
{ "resource": "" }
q63417
move
test
function move(repoState, filepath, newFilepath) { if (filepath === newFilepath) { return repoState; } const initialWorkingState = repoState.getCurrentState(); // Create new file, with Sha if possible const sha = WorkingUtils.findSha(initialWorkingState, filepath); let changeNewFile; if (sha) { changeNewFile = Change.createCreateFromSha(sha); } else { // Content not available as blob const blob = read(repoState, filepath); const contentBuffer = blob.getAsBuffer(); changeNewFile = Change.createCreate(contentBuffer); } // Remove old file const removedRepoState = remove(repoState, filepath); // Add new file return ChangeUtils.setChange(removedRepoState, newFilepath, changeNewFile); }
javascript
{ "resource": "" }
q63418
hasChanged
test
function hasChanged(previousState, newState, filepath) { const previouslyExists = exists(previousState, filepath); const newExists = exists(newState, filepath); if (!previouslyExists && !newExists) { // Still non existing return false; } else if (exists(previousState, filepath) !== exists(newState, filepath)) { // The file is absent from one return true; } else { // Both files exist const prevWorking = previousState.getCurrentState(); const newWorking = newState.getCurrentState(); const prevSha = WorkingUtils.findSha(prevWorking, filepath); const newSha = WorkingUtils.findSha(newWorking, filepath); if (prevSha === null && newSha === null) { // Both have are in pending changes. We can compare their contents return read(previousState, filepath).getAsString() !== read(newState, filepath).getAsString(); } else { // Content changed if Shas are different, or one of them is null return prevSha !== newSha; } } }
javascript
{ "resource": "" }
q63419
setup
test
function setup(connection, done) { var config = createDefaultConfig(), options = { proxy: false, headers: {} }; // reset the global variable with handles to port numbers handles = {}; if (connection !== 'direct') { options.proxy = true; config.proxy.gateway = { protocol: 'http:', host: 'localhost', port: 0, auth: 'proxyuser:C0mp13x_!d0rd$$@P!' }; // optionally test a non-RFC proxy that expects explicit values // for the Via and/or Host request headers if (connection === 'non-rfc-proxy') { config.proxy.headers['Via'] = 'http://jedi.example.com'; config.proxy.headers['Host'] = 'force.example.com'; } // the config map will be mutated by the json-proxy library, so clone it options.headers = require('util')._extend(config.proxy.headers); configureLanProxy(options, config, function() { configureNock(options, config); configureExpress(config, done); }) } else { configureNock(options, config); configureExpress(config, done); } }
javascript
{ "resource": "" }
q63420
configureNock
test
function configureNock(options, config) { var result = {}; // deny all real net connections except for localhost nock.disableNetConnect(); nock.enableNetConnect('localhost'); function createNock(url) { var instance = nock(url), expectedViaHeader = options.headers['Via'] || 'http://localhost:' + config.proxy.gateway.port, expectedHostHeader = options.headers['Host'] || /.*/; if (options.proxy === true) { // verify that the request was actually proxied // optionally support downstream proxies with non-RFC expectations on // the Host and Via request headers instance.matchHeader('via', expectedViaHeader); instance.matchHeader('host', expectedHostHeader); } // verify the injected header instance.matchHeader('x-test-header', 'John Doe'); return instance; } rules = [ createNock('http://api.example.com') .get('/api/hello') .reply(200, '{ "hello": "world" }') .get('/account?id=1&email=2&sort=asc') .reply(200, '{ "email": "john.doe@example.com" }') .get('/api/notfound') .reply(404), createNock('http://rewrite.example.com') .get('/hello') .reply(200, '{ "hello": "world" }') .get('/foo/bar') .reply(200, '{ "foo": "bar" }'), createNock('http://www.example.com') .get('/foo/12345/bar') .reply(200, '{ "foo": "bar" }') .get('/subapp/junction/customer/1') .reply(200, '{ "id": 1 }'), createNock('https://api.example.biz') .get('/issue/8') .reply(200, '{ "reporter": "@heygrady" }'), createNock('https://secure.example.com') .get('/secure/api/hello') .reply(200, '{ "hello": "world" }') .get('/secure/api/notfound') .reply(404), createNock('https://authorization.example.com') .matchHeader('X-Test-Header-Function', 'Bearer 0123456789abcdef') .get('/token') .reply(200, '{ "author": "ehtb" }') ]; }
javascript
{ "resource": "" }
q63421
configureExpress
test
function configureExpress(config, done) { var portfinder = require('portfinder'); tmp.dir(function(err, filepath){ handles.filepath = filepath; portfinder.getPort(function (err, port) { if (err) throw(err); handles.port = port; fs.writeFileSync(path.join(handles.filepath, 'index.txt'), 'hello, world'); app.use(proxy.initialize(config)); app.use(express.static(handles.filepath)); handles.server = require('http').createServer(app); handles.server.listen(handles.port, function() { done(null, handles.port); }); }); }); }
javascript
{ "resource": "" }
q63422
configureLanProxy
test
function configureLanProxy(options, config, done) { var portfinder = require('portfinder'), request = require('request'), credentials = config.proxy.gateway.auth, gatewayPort, expectedAuthorizationHeader, requestViaHeader, responseViaHeader; handles = handles || {}; handles.gatewayServer = require('http').createServer(function (req, res) { expectedAuthorizationHeader = 'Basic ' + new Buffer(credentials).toString('base64'); // HACK: node 0.12.x appears to inject a slash at the front // of absolute URLs // ex., GET http://www.example.com --> GET /http://www.exampel.com if (req.url.charAt(0) === '/') { req.url = req.url.substr(1); } // validate the proxy target if (req.url !== req.headers['x-forwarded-url']) { res.writeHead(500); res.end('{ "error": 500, "message": "invalid proxy request, expected X-Forwarded-Url header ' + req.headers['x-forwarded-url'] + '" }'); return; } // validate the proxy credentials if (req.headers['authorization'] !== expectedAuthorizationHeader) { res.writeHead(401); res.end('{ "error": 401, "message": "invalid proxy credentials, expected ' + expectedAuthorizationHeader + '" }'); return; } // determine if we are using a proxy that is not RFC compliant requestViaHeader = options.headers['Via'] || '127.0.0.1:' + handles.port; responseHostHeader = options.headers['Host'] || req.headers['host']; responseViaHeader = options.headers['Via'] || 'http://localhost:' + gatewayPort; // validate the via header was injected and points to 127.0.0.1 in either ipv4 or ipv6 format if (req.headers['via'] === undefined || req.headers['via'] === null || req.headers['via'].indexOf(requestViaHeader) === -1) { res.writeHead(400); res.end('{ "error": 400, "message": "invalid via header, expected ' + requestViaHeader + '" }'); return; } // strip the proxy credentials header req.headers['authorization'] = null; // simulate the behavior of x-forwarded-for with multiple proxies req.headers['x-forwarded-for'] = [req.headers['x-forwarded-for'], req.headers['via']].join(', '); // change the via header to this server req.headers['via'] = responseViaHeader; req.headers['host'] = responseHostHeader; var errorCallback = function errorCallback(err, repsonse, body) { if (err) { res.writeHead(500); res.end(JSON.stringify({ "error": 500, "message": err.message })); return; } } request(req, errorCallback).pipe(res); }); portfinder.getPort(function (err, port) { if (err) done(err); config.proxy.gateway.port = port; gatewayPort = port; handles.gatewayServer.listen(port, function() { done(null); }); }); }
javascript
{ "resource": "" }
q63423
cleanup
test
function cleanup(done) { config = null; rules.forEach(function(rule){ rule.done(); }); nock.cleanAll(); handles.server.close(); if (handles.gatewayServer !== undefined && handles.gatewayServer !== null) { handles.gatewayServer.close(); } fs.unlinkSync(path.join(handles.filepath, '/index.txt')); handles = null; done(); }
javascript
{ "resource": "" }
q63424
setChange
test
function setChange(repoState, filepath, change) { let workingState = repoState.getCurrentState(); let changes = workingState.getChanges(); const type = change.getType(); // Simplify change when possible if (type === CHANGE_TYPE.REMOVE && !workingState.getTreeEntries().has(filepath)) { // Removing a file that did not exist before changes = changes.delete(filepath); } else if (type === CHANGE_TYPE.CREATE && workingState.getTreeEntries().has(filepath)) { // Adding back a file that existed already changes = changes.set(filepath, change.set('type', CHANGE_TYPE.UPDATE)); } else { // Push changes to list changes = changes.set(filepath, change); } // Update workingState and repoState workingState = workingState.set('changes', changes); return RepoUtils.updateCurrentWorkingState(repoState, workingState); }
javascript
{ "resource": "" }
q63425
revertAll
test
function revertAll(repoState) { let workingState = repoState.getCurrentState(); // Create empty list of changes const changes = new Immutable.OrderedMap(); // Update workingState and repoState workingState = workingState.set('changes', changes); return RepoUtils.updateCurrentWorkingState(repoState, workingState); }
javascript
{ "resource": "" }
q63426
revertForFile
test
function revertForFile(repoState, filePath) { let workingState = repoState.getCurrentState(); // Remove file from changes map const changes = workingState.getChanges().delete(filePath); // Update workingState and repoState workingState = workingState.set('changes', changes); return RepoUtils.updateCurrentWorkingState(repoState, workingState); }
javascript
{ "resource": "" }
q63427
revertForDir
test
function revertForDir(repoState, dirPath) { let workingState = repoState.getCurrentState(); let changes = workingState.getChanges(); // Remove all changes that are in the directory changes = changes.filter((change, filePath) => { return !PathUtils.contains(dirPath, filePath); }); // Update workingState and repoState workingState = workingState.set('changes', changes); return RepoUtils.updateCurrentWorkingState(repoState, workingState); }
javascript
{ "resource": "" }
q63428
revertAllRemoved
test
function revertAllRemoved(repoState) { let workingState = repoState.getCurrentState(); const changes = workingState.getChanges().filter( // Remove all changes that are in the directory (change) => { return change.getType() === CHANGE_TYPE.REMOVE; } ); // Update workingState and repoState workingState = workingState.set('changes', changes); return RepoUtils.updateCurrentWorkingState(repoState, workingState); }
javascript
{ "resource": "" }
q63429
normPath
test
function normPath(p) { p = path.normalize(p); if (p[0] == '/') p = p.slice(1); if (p[p.length - 1] == '/') p = p.slice(0, -1); if (p == '.') p = ''; return p; }
javascript
{ "resource": "" }
q63430
pathContains
test
function pathContains(dir, path) { dir = dir ? normPath(dir) + '/' : dir; path = normPath(path); return path.indexOf(dir) === 0; }
javascript
{ "resource": "" }
q63431
readFilenamesRecursive
test
function readFilenamesRecursive(repoState, dirName) { dirName = PathUtils.norm(dirName); const workingState = repoState.getCurrentState(); const fileSet = WorkingUtils.getMergedFileSet(workingState); return fileSet.filter((path) => { return PathUtils.contains(dirName, path); }).toArray(); }
javascript
{ "resource": "" }
q63432
move
test
function move(repoState, dirName, newDirName) { // List entries to move const filesToMove = readFilenamesRecursive(repoState, dirName); // Push change to remove all entries return filesToMove.reduce((repoState, oldPath) => { const newPath = Path.join( newDirName, Path.relative(dirName, oldPath) ); return FileUtils.move(repoState, oldPath, newPath); }, repoState); }
javascript
{ "resource": "" }
q63433
create
test
function create(repositoryState, driver, name, opts = {}) { const { // Base branch for the new branch base = repositoryState.getCurrentBranch(), // Fetch the working state and switch to it ? checkout = true, // Drop changes from base branch the new working state ? clean = true, // Drop changes from the base branch ? cleanBase = false } = opts; let createdBranch; return driver.createBranch(base, name) // Update list of branches .then((branch) => { createdBranch = branch; let branches = repositoryState.getBranches(); branches = branches.push(createdBranch); return repositoryState.set('branches', branches); }) // Update working state or fetch it if needed .then((repoState) => { let baseWk = repoState.getWorkingStateForBranch(base); if (!baseWk) { return checkout ? RepoUtils.fetchTree(repoState, driver, createdBranch) : repoState; } // Reuse base WorkingState clean const headWk = clean ? baseWk.asClean() : baseWk; repoState = RepoUtils.updateWorkingState(repoState, createdBranch, headWk); // Clean base WorkingState baseWk = cleanBase ? baseWk.asClean() : baseWk; repoState = RepoUtils.updateWorkingState(repoState, base, baseWk); return repoState; }) // Checkout the branch .then((repoState) => { if (!checkout) { return repoState; } return RepoUtils.checkout(repoState, createdBranch); }); }
javascript
{ "resource": "" }
q63434
update
test
function update(repoState, driver, branchName) { branchName = Normalize.branchName(branchName || repoState.getCurrentBranch()); return driver.fetchBranches() .then((branches) => { const newBranch = branches.find((branch) => { return branch.getFullName() === branchName; }); if (!newBranch) { return repoState; } else { return RepoUtils.fetchTree(repoState, driver, newBranch); } }); }
javascript
{ "resource": "" }
q63435
remove
test
function remove(repoState, driver, branch) { return driver.deleteBranch(branch) .then(() => { return repoState.updateBranch(branch, null); }); }
javascript
{ "resource": "" }
q63436
fetch
test
function fetch(repoState, driver, sha) { if (isFetched(repoState, sha)) { // No op if already fetched return Q(repoState); } const cache = repoState.getCache(); // Fetch the blob return driver.fetchBlob(sha) // Then store it in the cache .then((blob) => { const newCache = CacheUtils.addBlob(cache, sha, blob); return repoState.set('cache', newCache); }); }
javascript
{ "resource": "" }
q63437
test
function (context, options, callback) { // add the current request to the queue context.retryQueue.push([options, callback]); // bail if the token is currently being refreshed if (context.refreshActive) { return false; } // ready to refresh context.refreshActive = true; return request({ uri: baseUrl + '/oauth2/token', method: 'POST', headers: { 'Authorization': 'Basic ' + new Buffer(context.clientId + ':' + context.clientSecret).toString('base64'), 'User-Agent': userAgent }, form: { grant_type: 'client_credentials' } }, function (err, res, body) { context.refreshActive = false; // if anything but a 200 is returned from the token refresh call, we return the error to the // caller and blow out the retry queue if (res.statusCode != 200) { context.retryQueue = []; return callback && callback(res.body, res); } // set the access token on the connection var token = JSON.parse(body); context.accessToken = token.access_token; // process the queue of requests for the current connection while (0 < context.retryQueue.length) { var reqArgs = context.retryQueue.pop(); context.apiRequest(reqArgs[0], reqArgs[1]); } }); }
javascript
{ "resource": "" }
q63438
PokitDok
test
function PokitDok(clientId, clientSecret, version) { this.clientId = clientId; this.clientSecret = clientSecret; this.version = version || 'v4'; this.refreshActive = false; this.retryQueue = []; this.accessToken = null; }
javascript
{ "resource": "" }
q63439
featureArrayToFeatureString
test
function featureArrayToFeatureString(features, bias, firstFeatureNumber) { if (!Array.isArray(features)) throw new Error("Expected an array, but got "+JSON.stringify(features)) var line = (bias? " "+firstFeatureNumber+":"+bias: ""); for (var feature=0; feature<features.length; ++feature) { var value = features[feature]; if (value) line += (" "+(feature+firstFeatureNumber+(bias?1:0))+":"+value.toPrecision(5)); } return line; }
javascript
{ "resource": "" }
q63440
test
function(feature) { if (!(feature in this.featureNameToFeatureIndex)) { var newIndex = this.featureIndexToFeatureName.length; this.featureIndexToFeatureName.push(feature); this.featureNameToFeatureIndex[feature] = newIndex; } }
javascript
{ "resource": "" }
q63441
test
function(hash) { if (hash instanceof Array) { for (var index in hash) this.addFeature(hash[index]); } else if (hash instanceof Object) { for (var feature in hash) this.addFeature(feature); } else throw new Error("FeatureLookupTable.addFeatures expects a hash or an array, but got: "+JSON.stringify(hash)); }
javascript
{ "resource": "" }
q63442
test
function(hash) { this.addFeatures(hash); var array = []; for (var featureIndex=0; featureIndex<this.featureIndexToFeatureName.length; ++featureIndex) array[featureIndex]=0; if (hash instanceof Array) { for (var i in hash) array[this.featureNameToFeatureIndex[hash[i]]] = true; } else if (hash instanceof Object) { for (var feature in hash) array[this.featureNameToFeatureIndex[feature]] = hash[feature]; } else throw new Error("Unsupported type: "+JSON.stringify(hash)); return array; }
javascript
{ "resource": "" }
q63443
test
function(hashes) { this.addFeaturess(hashes); var arrays = []; for (var i=0; i<hashes.length; ++i) { arrays[i] = []; for (var feature in this.featureNameToFeatureIndex) arrays[i][this.featureNameToFeatureIndex[feature]] = hashes[i][feature] || 0; } return arrays; }
javascript
{ "resource": "" }
q63444
test
function(array) { var hash = {}; for (var feature in this.featureNameToFeatureIndex) { if (array[this.featureNameToFeatureIndex[feature]]) hash[feature] = array[this.featureNameToFeatureIndex[feature]]; } return hash; }
javascript
{ "resource": "" }
q63445
test
function(arrays) { var hashes = []; for (var i=0; i<arrays.length; ++i) hashes[i] = this.arrayToHash(arrays[i]); return hashes; }
javascript
{ "resource": "" }
q63446
test
function(sample, splitLabels, treeNode) { var superlabels = {}; // the first parts of each of the splitLabels var mapSuperlabelToRest = {}; // each value is a list of continuations of the key. for (var i in splitLabels) { var splitLabel = splitLabels[i]; var superlabel = splitLabel[0]; superlabels[superlabel] = true; if (splitLabel.length>1) { if (!mapSuperlabelToRest[superlabel]) mapSuperlabelToRest[superlabel] = []; mapSuperlabelToRest[superlabel].push(splitLabel.slice(1)); } } treeNode.superlabelClassifier.trainOnline(sample, Object.keys(superlabels)); for (var superlabel in mapSuperlabelToRest) { if (!(superlabel in treeNode.mapSuperlabelToBranch)) { treeNode.mapSuperlabelToBranch[superlabel] = { superlabelClassifier: this.newMultilabelClassifier(), mapSuperlabelToBranch: {} } } this.trainOnlineRecursive(sample, mapSuperlabelToRest[superlabel], treeNode.mapSuperlabelToBranch[superlabel]); } }
javascript
{ "resource": "" }
q63447
test
function(dataset, treeNode) { var superlabelsDataset = []; var mapSuperlabelToRestDataset = {}; dataset.forEach(function(datum) { var splitLabels = datum.output; // [ [ 'Offer', 'Leased Car', 'Without leased car' ], [ 'Offer', 'Working Hours', '9 hours' ] ] var superlabels = {}; // the first parts of each of the splitLabels var mapSuperlabelToRest = {}; // each value is a list of continuations of the key. for (var i in splitLabels) { var splitLabel = splitLabels[i];//[ 'Offer', 'Leased Car', 'Without leased car' ] var superlabel = splitLabel[0]; superlabels[superlabel] = true; //superlabels['Offer'] = true if (splitLabel.length>1) { // if it have more than one label (superlabel) if (!mapSuperlabelToRest[superlabel]) mapSuperlabelToRest[superlabel] = []; mapSuperlabelToRest[superlabel].push(splitLabel.slice(1));//['Leased Car', 'Without leased car'] } } /* Sample of mapSuperlabelToRest { Offer: [ [ 'Leased Car', 'Without leased car' ], [ 'Working Hours', '9 hours' ] ] } Sample of superlabelsDataset, initial dataset with superlabel instead of entire output '. [end]': 0.965080896043587 }, output: [ 'Offer' ] } ] */ superlabelsDataset.push({ input: datum.input, output: Object.keys(superlabels) }); for (var superlabel in mapSuperlabelToRest) { if (!(superlabel in mapSuperlabelToRestDataset)) mapSuperlabelToRestDataset[superlabel] = []; mapSuperlabelToRestDataset[superlabel].push({ input: datum.input, output: mapSuperlabelToRest[superlabel] }); } }, this); /* Sample of mapSuperlabelToRestDataset { Offer: [ { input: [Object], output: [["Leased Car","Without leased car"],["Working Hours","9 hours"]] } ] } */ // train the classifier only on superlabels treeNode.superlabelClassifier.trainBatch(superlabelsDataset); for (var superlabel in mapSuperlabelToRestDataset) { if (!(superlabel in treeNode.mapSuperlabelToBranch)) { treeNode.mapSuperlabelToBranch[superlabel] = { superlabelClassifier: this.newMultilabelClassifier(), mapSuperlabelToBranch: {} } } /* train the next level classifier for a give superlabel classifier superlabel (from loop) with the dataset from new structure mapSuperlabelToRestDataset (see above) */ this.trainBatchRecursive(mapSuperlabelToRestDataset[superlabel], treeNode.mapSuperlabelToBranch[superlabel]); } }
javascript
{ "resource": "" }
q63448
test
function(sample, explain, treeNode, depth) { if (!depth) depth = 1; // classify the superlabel var superlabelsWithExplain = treeNode.superlabelClassifier.classify(sample, explain); var superlabels = (explain>0? superlabelsWithExplain.classes: superlabelsWithExplain); var splitLabels = []; if (explain>0) { var explanations = ["depth="+depth+": "+superlabels, superlabelsWithExplain.explanation]; } // for all superlabels that were classified, may be there are more than one that were classified with it for (var i in superlabels) { var superlabel = superlabels[i]; var splitLabel = [superlabel]; // classifier of [Offer] types / second level / classifies Offer's parameters var branch = treeNode.mapSuperlabelToBranch[superlabel]; if (branch) { // [ [ 'Without leased car' ] ] var branchLabelsWithExplain = this.classifyRecursive(sample, explain, branch, depth+1); var branchLabels = (explain>0? branchLabelsWithExplain.classes: branchLabelsWithExplain); for (var j in branchLabels) splitLabels.push(splitLabel.concat(branchLabels[j])); if (explain>0) explanations = explanations.concat(branchLabelsWithExplain.explanation); } else { splitLabels.push(splitLabel); } } return (explain>0? {classes: splitLabels, explanation: explanations}: splitLabels); }
javascript
{ "resource": "" }
q63449
test
function(opts) { if (!('binaryClassifierType' in opts)) { console.dir(opts); throw new Error("opts must contain binaryClassifierType"); } if (!opts.binaryClassifierType) { console.dir(opts); throw new Error("opts.binaryClassifierType is null"); } this.binaryClassifierType = opts.binaryClassifierType; this.classifier = new this.binaryClassifierType(); switch (opts.segmentSplitStrategy) { case 'shortestSegment': this.segmentSplitStrategy = this.shortestSegmentSplitStrategy; break; case 'longestSegment': this.segmentSplitStrategy = this.longestSegmentSplitStrategy; break; case 'cheapestSegment': this.segmentSplitStrategy = this.cheapestSegmentSplitStrategy; break; default: this.segmentSplitStrategy = null; } this.mapClassnameToClassifier = {}; }
javascript
{ "resource": "" }
q63450
test
function(opts) { this.retrain_count = opts.retrain_count || 10; this.Constant = opts.Constant || 5.0; this.weights = { //DUMMY_CLASS:{} }; this.weights_sum = { //DUMMY_CLASS:{} }; this.seenFeatures = {}; this.num_iterations = 0 }
javascript
{ "resource": "" }
q63451
test
function(classes) { classes = hash.normalized(classes); for (var aClass in classes) { if (!(aClass in this.weights)) { this.weights[aClass]={}; this.weights_sum[aClass]={}; } } }
javascript
{ "resource": "" }
q63452
test
function(opts) { opts = opts || {}; if (!opts.multilabelClassifierType) { console.dir(opts); throw new Error("opts.multilabelClassifierType is null"); } if (!opts.numberofclassifiers) { console.dir(opts); throw new Error("opts.numberofclassifiers is null"); } // this.splitLabel = opts.splitLabel || function(label) {return label.split(/@/);} this.classifier = this.intializeClassifiers(opts.numberofclassifiers, opts.multilabelClassifierType) }
javascript
{ "resource": "" }
q63453
test
function(expected, actual) { this.count++; if (expected && actual) this.TP++; if (!expected && actual) this.FP++; if (expected && !actual) this.FN++; if (!expected && !actual) this.TN++; if (expected==actual) this.TRUE++; }
javascript
{ "resource": "" }
q63454
test
function (expectedClasses, actualClasses ) { var explanations = []; actualClasses = hash.normalized(actualClasses); expectedClasses = hash.normalized(expectedClasses); var allTrue = true; if (!(Object.keys(expectedClasses)[0] in this.confusion)) this.confusion[Object.keys(expectedClasses)[0]] = {} if (!(Object.keys(actualClasses)[0] in this.confusion[Object.keys(expectedClasses)[0]])) this.confusion[Object.keys(expectedClasses)[0]][Object.keys(actualClasses)[0]] = 0 this.confusion[Object.keys(expectedClasses)[0]][Object.keys(actualClasses)[0]] += 1 for (var actualClass in actualClasses) { if (!(actualClass in this.confusion)) this.confusion[actualClass]={} if (!(actualClass in this.labels)) { this.labels[actualClass]={} this.labels[actualClass]['TP']=0 this.labels[actualClass]['FP']=0 this.labels[actualClass]['FN']=0 } if (actualClass in expectedClasses) { this.labels[actualClass]['TP'] += 1 } else { this.labels[actualClass]['FP'] += 1 } } for (var expectedClass in expectedClasses) { if (!(expectedClass in this.labels)) { this.labels[expectedClass]={} this.labels[expectedClass]['TP']=0 this.labels[expectedClass]['FP']=0 this.labels[expectedClass]['FN']=0 } if (!(expectedClass in actualClasses)) { this.labels[expectedClass]['FN'] += 1 } } }
javascript
{ "resource": "" }
q63455
test
function (expectedClasses, actualClasses, logTruePositives) { var explanations = []; actualClasses = hash.normalized(actualClasses); expectedClasses = hash.normalized(expectedClasses); var allTrue = true; for (var actualClass in actualClasses) { if (actualClass in expectedClasses) { if (logTruePositives) explanations.push("\t\t+++ TRUE POSITIVE: "+actualClass); this.TP++; } else { explanations.push("\t\t--- FALSE POSITIVE: "+actualClass); this.FP++; allTrue = false; } } for (var expectedClass in expectedClasses) { if (!(expectedClass in actualClasses)) { explanations.push("\t\t--- FALSE NEGATIVE: "+expectedClass); this.FN++; allTrue = false; } } if (allTrue) { if (logTruePositives) explanations.push("\t\t*** ALL TRUE!"); this.TRUE++; } this.count++; return explanations; }
javascript
{ "resource": "" }
q63456
test
function (expectedClasses, actualClasses, logTruePositives ) { var explanations = {}; explanations['TP'] = []; explanations['FP'] = []; explanations['FN'] = []; actualClasses = hash.normalized(actualClasses); expectedClasses = hash.normalized(expectedClasses); var allTrue = true; for (var actualClass in actualClasses) { if (actualClass in expectedClasses) { if (logTruePositives) explanations['TP'].push(actualClass); this.TP++; } else { explanations['FP'].push(actualClass); this.FP++; allTrue = false; } } for (var expectedClass in expectedClasses) { if (!(expectedClass in actualClasses)) { explanations['FN'].push(expectedClass); this.FN++; allTrue = false; } } if (allTrue) { // if ((logTruePositives)&& (!only_false_cases)) explanations.push("\t\t*** ALL TRUE!"); this.TRUE++; } this.count++; _.each(explanations, function(value, key, list){ // explanations[key] = _.sortBy(explanations[key], function(num){ num }); explanations[key].sort() }, this) if (explanations['FP'].length == 0) delete explanations['FP'] if (explanations['FN'].length == 0) delete explanations['FN'] return explanations; }
javascript
{ "resource": "" }
q63457
test
function(dataset) { if (this.debug) console.log("trainBatch start"); var timestamp = new Date().getTime()+"_"+process.pid var learnFile = svmcommon.writeDatasetToFile(dataset, this.bias, /*binarize=*/true, this.model_file_prefix+"_"+timestamp, "SvmPerf", FIRST_FEATURE_NUMBER); var modelFile = learnFile.replace(/[.]learn/,".model"); var command = "svm_perf_learn "+this.learn_args+" "+learnFile + " "+modelFile; if (this.debug) console.log("running "+command); console.log(command) var result = execSync(command); if (result.code>0) { console.dir(result); console.log(fs.readFileSync(learnFile, 'utf-8')); throw new Error("Failed to execute: "+command); } this.setModel(fs.readFileSync(modelFile, "utf-8")); if (this.debug) console.log("trainBatch end"); }
javascript
{ "resource": "" }
q63458
modelStringToModelMap
test
function modelStringToModelMap(modelString) { var matches = SVM_PERF_MODEL_PATTERN.exec(modelString); if (!matches) { console.log(modelString); throw new Error("Model does not match SVM-perf format"); }; //var threshold = parseFloat(matches[1]); // not needed - we use our own bias var featuresAndWeights = matches[2].split(" "); var mapFeatureToWeight = {}; //mapFeatureToWeight.threshold = threshold; // not needed - we use our own bias //String alphaTimesY = featuresAndWeights[0]; // always 1 in svmperf for (var i=1; i<featuresAndWeights.length; ++i) { var featureAndWeight = featuresAndWeights[i]; var featureWeight = featureAndWeight.split(":"); if (featureWeight.length!=2) throw new Error("Model featureAndWeight doesn't match svm-perf pattern: featureAndWeight="+featureAndWeight); var feature = parseInt(featureWeight[0]); if (feature<=0) throw new IllegalArgumentException("Non-positive feature id: featureAndWeight="+featureAndWeight); var weight = parseFloat(featureWeight[1]); if (Math.abs(weight)>=MIN_WEIGHT) mapFeatureToWeight[feature-FIRST_FEATURE_NUMBER]=weight; // start feature values from 0. // Note: if there is bias, then mapFeatureToWeight[0] is its weight. } return mapFeatureToWeight; }
javascript
{ "resource": "" }
q63459
test
function(dataset, relationName, featureLookupTable) { var arff = "% Automatically generated by Node.js\n"; arff += "@relation "+relationName+"\n"; featureLookupTable.featureIndexToFeatureName.forEach(function(featureName) { if (_.isUndefined(featureName)) arff += "@attribute undefined {0,1}"+"\n"; else if (!_.isString(featureName)) throw new Error("Expected featureName to be a string, but found "+JSON.stringify(featureName)); else arff += "@attribute "+featureName.replace(/[^a-zA-Z0-9]/g, "_")+" "+"{0,1}"+"\n"; }); arff += "\n@data\n"; dataset.forEach(function(datum) { var datumArff = _.clone(datum.input, {}); for (var i=0; i<datum.output.length; ++i) datumArff[datum.output[i]]=1; //console.dir(datumArff); var array = featureLookupTable.hashToArray(datumArff); arff += array + "\n"; }); return arff; }
javascript
{ "resource": "" }
q63460
SvmLinear
test
function SvmLinear(opts) { this.learn_args = opts.learn_args || ""; this.model_file_prefix = opts.model_file_prefix || null; this.bias = opts.bias || 1.0; this.multiclass = opts.multiclass || false; this.debug = opts.debug||false; this.train_command = opts.train_command || 'liblinear_train'; this.test_command = opts.test_command || 'liblinear_test'; this.timestamp = "" if (!SvmLinear.isInstalled()) { var msg = "Cannot find the executable 'liblinear_train'. Please download it from the LibLinear website, and put a link to it in your path."; console.error(msg) throw new Error(msg); } }
javascript
{ "resource": "" }
q63461
test
function(dataset) { this.timestamp = new Date().getTime()+"_"+process.pid // check for multilabel _.each(dataset, function(datum, key, list){ if (_.isArray(datum.output)) if (datum.output.length > 1) { console.log("Multi-label is not allowed") console.log(JSON.stringify(darum.output, null, 4)) process.exit(0) } }, this) // convert all arraay-like outputs to just values dataset = _.map(dataset, function(datum){ if (_.isArray(datum.output)) datum.output = datum.output[0] return datum }); this.allLabels = _(dataset).map(function(datum){return datum.output}); this.allLabels = _.uniq(_.flatten(this.allLabels)) // dataset = _.map(dataset, function(datum){ // datum.output = this.allLabels.indexOf(datum.output) // return datum }); if (this.allLabels.length==1) // a single label return; //console.log(util.inspect(dataset,{depth:1})); if (this.debug) console.log("trainBatch start"); var learnFile = svmcommon.writeDatasetToFile( dataset, this.bias, /*binarize=*/false, this.model_file_prefix+"_"+this.timestamp, "SvmLinear", FIRST_FEATURE_NUMBER); var modelFile = learnFile.replace(/[.]learn/,".model"); var command = this.train_command+" "+this.learn_args+" "+learnFile + " "+modelFile; console.log("running "+command); var result = child_process.execSync(command); if (result.code>0) { console.dir(result); console.log(fs.readFileSync(learnFile, 'utf-8')); throw new Error("Failed to execute: "+command); } this.modelFileString = modelFile; if (this.debug) console.log("trainBatch end"); }
javascript
{ "resource": "" }
q63462
modelStringToModelMap
test
function modelStringToModelMap(modelString) { var matches = LIB_LINEAR_MODEL_PATTERN.exec(modelString); if (!matches) { console.log(modelString); throw new Error("Model does not match SVM-Linear format"); }; var labels = matches[1].split(/\s+/); var mapLabelToMapFeatureToWeight = {}; for (var iLabel in labels) { var label = labels[iLabel]; mapLabelToMapFeatureToWeight[label]={}; } var weightsMatrix = matches[3]; // each line represents a feature; each column represents a label: var weightsLines = weightsMatrix.split(NEWLINE); for (var feature in weightsLines) { var weights = weightsLines[feature].split(/\s+/); weights.pop(); // ignore lal]st weight, which is empty (-space) if (weights.length==0) continue; // ignore empty lines // if (isNaN(parseFloat(weights[weights.length-1]))) // weights.pop(); if (weights.length==1 && labels.length==2) weights[1] = -weights[0]; if (weights.length!=labels.length) throw new Error("Model does not match SVM-Linear format: there are "+labels.length+" labels ("+labels+") and "+weights.length+" weights ("+weights+")"); for (var iLabel in labels) { var label = labels[iLabel]; var weight = parseFloat(weights[iLabel]); if (Math.abs(weight)>=MIN_WEIGHT) mapLabelToMapFeatureToWeight[label][feature]=weight; } } return mapLabelToMapFeatureToWeight; }
javascript
{ "resource": "" }
q63463
test
function(sample, labels) { labels = multilabelutils.normalizeOutputLabels(labels); for (var l in labels) { var positiveLabel = labels[l]; this.makeSureClassifierExists(positiveLabel); this.mapClassnameToClassifier[positiveLabel].trainOnline(sample, 1); } for (var negativeLabel in this.mapClassnameToClassifier) { if (labels.indexOf(negativeLabel)<0) this.mapClassnameToClassifier[negativeLabel].trainOnline(sample, 0); } }
javascript
{ "resource": "" }
q63464
test
function(opts) { if (!opts.multiclassClassifierType) { console.dir(opts); throw new Error("opts.multiclassClassifierType not found"); } this.multiclassClassifierType = opts.multiclassClassifierType; this.featureExtractor = FeaturesUnit.normalize(opts.featureExtractor); this.multiclassClassifier = new this.multiclassClassifierType(); }
javascript
{ "resource": "" }
q63465
concatOptionDataArrays
test
function concatOptionDataArrays(options, data, prop) { if (!_.has(options, prop) && !_.has(data, prop)) { return; } var combined = []; if (_.isArray(options[prop])) { combined = combined.concat(options[prop]); } if (_.isArray(data[prop])) { combined = combined.concat(data[prop]); } options[prop] = combined; }
javascript
{ "resource": "" }
q63466
preorder
test
function preorder(node, nodeIndex, parent) { var children var length var index var position var child if (is(test, node, nodeIndex, parent)) { return null } children = node.children if (!children || children.length === 0) { return node } // Move all living children to the beginning of the children array. position = 0 length = children.length index = -1 while (++index < length) { child = preorder(children[index], index, node) if (child) { children[position++] = child } } // Cascade delete. if (cascade && position === 0) { return null } // Drop other nodes. children.length = position return node }
javascript
{ "resource": "" }
q63467
filterRelations
test
function filterRelations(relation) { var mappedData = includedData.find(function (inc) { return inc.id === relation.id; }); var RelationModel = getModel(relation.type); var modeledData = new RelationModel(mappedData); return checkForRelations(modeledData, modeledData.data); }
javascript
{ "resource": "" }
q63468
test
function (bundleName, filter) { var bundle, files = []; bundle = this._bundles[bundleName]; if (!bundle) { throw new Error('Unknown bundle "' + bundleName + '"'); } Object.keys(bundle.files).forEach(function (fullpath) { var res = { ext: libpath.extname(fullpath).substr(1) }; if (this._filterResource(res, filter)) { files.push(fullpath); } }, this); return files; }
javascript
{ "resource": "" }
q63469
test
function (bundleName, filter) { var bundle = this._bundles[bundleName]; if (!bundle) { throw new Error('Unknown bundle "' + bundleName + '"'); } return this._walkBundleResources(bundle, filter); }
javascript
{ "resource": "" }
q63470
test
function (filter) { var self = this, ress = []; Object.keys(this._bundles).forEach(function (bundleName) { var bundle = self._bundles[bundleName]; self._walkBundleResources(bundle, filter).forEach(function (res) { ress.push(res); }); }); return ress; }
javascript
{ "resource": "" }
q63471
test
function (filter) { var bundleName, bundles = this._bundles, bundleNames = []; if ('function' !== typeof filter) { return Object.keys(this._bundles); } for (bundleName in bundles) { if (bundles.hasOwnProperty(bundleName)) { if (filter(bundles[bundleName])) { bundleNames.push(bundleName); } } } return bundleNames; }
javascript
{ "resource": "" }
q63472
test
function (findPath) { // FUTURE OPTIMIZATION: use a more complicated datastructure for faster lookups var found = {}, // length: path longest; // expands path in case of symlinks findPath = libfs.realpathSync(findPath); // searchs based on expanded path Object.keys(this._bundlePaths).forEach(function (bundlePath) { if (0 === findPath.indexOf(bundlePath) && (findPath.length === bundlePath.length || libpath.sep === findPath.charAt(bundlePath.length))) { found[bundlePath.length] = bundlePath; } }); longest = Math.max.apply(Math, Object.keys(found)); return this._bundlePaths[found[longest]]; }
javascript
{ "resource": "" }
q63473
test
function (baseDirectory, name, version, pkg, options) { var seed; seed = { baseDirectory: baseDirectory, name: name, version: version }; if (pkg) { seed.name = (pkg.locator && pkg.locator.name ? pkg.locator.name : pkg.name); seed.version = pkg.version; seed.options = pkg.locator; seed.pkg = pkg; } if (options) { if (seed.options) { // merge options under seed.options mix(seed.options, options); } else { seed.options = options; } } return seed; }
javascript
{ "resource": "" }
q63474
test
function (seed, parent) { var bundle, ruleset = this._loadRuleset(seed), msg; if (seed.options.location) { // This is fairly legacy, and we might be able to remove it. seed.baseDirectory = libpath.resolve(seed.baseDirectory, seed.options.location); } if (!ruleset) { msg = 'Bundle "' + seed.name + '" has unknown ruleset ' + JSON.stringify(seed.options.ruleset); if (seed.options.rulesets) { msg += ' in rulesets ' + JSON.stringify(seed.options.rulesets); } throw new Error(msg); } bundle = new Bundle(seed.baseDirectory, seed.options); bundle.name = seed.name; bundle.version = seed.version; bundle.type = ruleset._name; this._bundles[bundle.name] = bundle; this._bundlePaths[libfs.realpathSync(bundle.baseDirectory)] = bundle.name; // wire into parent if (parent) { if (!parent.bundles) { parent.bundles = {}; } parent.bundles[bundle.name] = bundle; } return bundle; }
javascript
{ "resource": "" }
q63475
test
function (fullPath) { var bundleName, bundle, ruleset, relativePath, pathParts, subBundleSeed, res; bundleName = this._getBundleNameByPath(fullPath); bundle = this._bundles[bundleName]; if (bundle.baseDirectory === fullPath.substr(0, bundle.baseDirectory.length)) { relativePath = fullPath.substr(bundle.baseDirectory.length + 1); } // This mainly happens during watch(), since we skip node_modules // in _walkBundle(). if (relativePath.indexOf('node_modules') === 0) { pathParts = relativePath.split(libpath.sep); while (pathParts[0] === 'node_modules' && pathParts.length >= 2) { pathParts.shift(); bundleName = pathParts.shift(); } relativePath = pathParts.join(libpath.sep); bundle = this._bundles[bundleName]; // The package's directory is not a resource (... and is mostly uninteresting). if (!relativePath) { return; } // unknown bundle if (!bundle) { return; } } ruleset = this._loadRuleset(bundle); if (ruleset._skip && this._ruleSkip(fullPath, relativePath, ruleset._skip)) { return; } if (ruleset._bundles) { subBundleSeed = this._ruleBundles(fullPath, relativePath, ruleset._bundles, bundle); if (subBundleSeed) { // sub-bundle inherits options.rulesets from parent if (!subBundleSeed.options) { subBundleSeed.options = {}; } if (!subBundleSeed.options.rulesets) { subBundleSeed.options.rulesets = bundle.options.rulesets; } this._makeBundle(subBundleSeed, bundle); return; } } // This is the base "meta" for a file. If a rule matches we'll // augment this. res = { bundleName: bundleName, fullPath: fullPath, relativePath: relativePath, ext: libpath.extname(fullPath).substr(1) }; this._onFile(res, ruleset); }
javascript
{ "resource": "" }
q63476
test
function (fullPath, relativePath, rule) { var r, regex; relativePath = BundleLocator._toUnixPath(relativePath); for (r = 0; r < rule.length; r += 1) { regex = rule[r]; if (regex.test(relativePath)) { return true; } } return false; }
javascript
{ "resource": "" }
q63477
test
function (res, ruleset) { var bundle = this._bundles[res.bundleName], ruleName, rule, relativePath = BundleLocator._toUnixPath(res.relativePath), match; bundle.files[res.fullPath] = true; for (ruleName in ruleset) { if (ruleset.hasOwnProperty(ruleName)) { // Rules that start with "_" are special directives, // and have already been handle by the time we get here. if ('_' !== ruleName.charAt(0)) { rule = ruleset[ruleName]; match = relativePath.match(rule.regex); if (match) { res.name = match[rule.nameKey || 1]; res.type = ruleName; if (rule.subtypeKey) { res.subtype = match[rule.subtypeKey] || ''; } if (rule.selectorKey && match[rule.selectorKey]) { res.selector = match[rule.selectorKey]; } else { res.selector = DEFAULT_SELECTOR; } // file will become a resource after the first match return this._onResource(res); } } } } }
javascript
{ "resource": "" }
q63478
test
function (res) { var bundle = this._bundles[res.bundleName], type = res.type, subtype, selector = res.selector, name = res.name; if (!bundle.resources[selector]) { bundle.resources[selector] = {}; } if (!bundle.resources[selector][type]) { bundle.resources[selector][type] = {}; } if (res.hasOwnProperty('subtype')) { subtype = res.subtype; if (!bundle.resources[selector][type][subtype]) { bundle.resources[selector][type][subtype] = {}; } bundle.resources[selector][type][subtype][name] = res; } else { bundle.resources[selector][type][name] = res; } }
javascript
{ "resource": "" }
q63479
test
function (res, filter) { if (!filter || Object.keys(filter).length === 0) { return true; } var prop; for (prop in filter) { if ('extensions' === prop) { // sugar for users if ('string' === typeof filter.extensions) { filter.extensions = filter.extensions.split(','); } if (!filter.extensions || filter.extensions.indexOf(res.ext) === -1) { return false; } } else if ('types' === prop) { // sugar for users if ('string' === typeof filter.types) { filter.types = filter.types.split(','); } if (!filter.types || filter.types.indexOf(res.type) === -1) { return false; } } else { return false; // unknown filters should fail to pass } } return true; }
javascript
{ "resource": "" }
q63480
test
function (pkgDepths) { // pkgDepths -> depth: [metas] var depths, minDepth, maxDepth, seeds; depths = Object.keys(pkgDepths); minDepth = Math.min.apply(Math, depths); maxDepth = Math.max.apply(Math, depths); seeds = pkgDepths[minDepth]; if (1 === seeds.length) { if (minDepth !== maxDepth) { debug('multiple "' + seeds[0].name + '" packages found, using version ' + seeds[0].version + ' from ' + seeds[0].baseDirectory); } return seeds[0]; } seeds.sort(function (a, b) { return libsemver.rcompare(a.version, b.version); }); debug('multiple "' + seeds[0].name + '" packages found, using version ' + seeds[0].version + ' from ' + seeds[0].baseDirectory); return seeds[0]; }
javascript
{ "resource": "" }
q63481
test
function (all) { var byDepth = {}; // name: depth: [metas] all.forEach(function (seed) { if (!byDepth[seed.name]) { byDepth[seed.name] = {}; } if (!byDepth[seed.name][seed.npmDepth]) { byDepth[seed.name][seed.npmDepth] = []; } byDepth[seed.name][seed.npmDepth].push(seed); }); return Object.keys(byDepth).map(function (name) { return this._dedupeSeeds(byDepth[name]); }, this); }
javascript
{ "resource": "" }
q63482
test
function (bundleSeed) { var self = this, parentName, parent, bundle, filters; // TODO -- merge options (second arg) over bundleSeed.options parentName = this._getBundleNameByPath(libpath.dirname(bundleSeed.baseDirectory)); parent = this._bundles[parentName]; bundle = this._makeBundle(bundleSeed, parent); this._bundles[bundle.name] = bundle; filters = this._options.exclude.concat(['node_modules', /^\./]); // adding the bundle dir itself for BC this._processFile(bundle.baseDirectory); walk.walkSync(bundle.baseDirectory, { filters: [], listeners: { directories: function (root, dirStatsArray, next) { var i, dirStats, exclude; function filterDir(filter) { if (dirStats.name.match(filter)) { return true; } } for (i = dirStatsArray.length - 1; i >= 0; i -= 1) { dirStats = dirStatsArray[i]; exclude = filters.some(filterDir); if (exclude) { // the sync walk api is pretty bad, it requires to // mutate the actual dir array dirStatsArray.splice(i, 1); } else { self._processFile(libpath.join(root, dirStats.name)); } } next(); }, file: function(root, fileStats, next) { self._processFile(libpath.join(root, fileStats.name)); next(); }, errors: function(root, nodeStatsArray, next) { next(); } } }); return bundle; }
javascript
{ "resource": "" }
q63483
test
function (srcObject, excludeKeys) { var destObject = {}, key; for (key in srcObject) { if (srcObject.hasOwnProperty(key)) { if (-1 === excludeKeys.indexOf(key)) { destObject[key] = srcObject[key]; } } } return destObject; }
javascript
{ "resource": "" }
q63484
Bundle
test
function Bundle(baseDirectory, options) { this.options = options || {}; this.name = libpath.basename(baseDirectory); this.baseDirectory = baseDirectory; this.type = undefined; this.files = {}; this.resources = {}; }
javascript
{ "resource": "" }
q63485
getBaseScales
test
function getBaseScales(type, domain, range, nice, tickCount) { const factory = (type === 'time' && scaleUtc) || (type === 'log' && scaleLog) || scaleLinear const scale = createScale(factory, domain, range) if (nice) scale.nice(tickCount) return scale }
javascript
{ "resource": "" }
q63486
BufferingTracer
test
function BufferingTracer(tracer, options) { options = options || {}; var self = this; this._tracer = tracer; this._maxTraces = options.maxTraces || 50; this._sendInterval = options.sendInterval ? (options.sendInterval * 1000) : 10 * 1000; this._lastSentTs = Date.now(); this._buffer = []; this._stopped = false; this._periodSendTimeoutId = setTimeout(this._periodicSendFunction.bind(this), this._sendInterval); }
javascript
{ "resource": "" }
q63487
build
test
function build(gulp) { // make sure we don't lose anything from required files // @see https://github.com/Mikhus/gulp-help-doc/issues/2 // currently this is not supported for typescript var source = OPTIONS.isTypescript ? fs.readFileSync('gulpfile.ts').toString() : OPTIONS.gulpfile ? fs.readFileSync(OPTIONS.gulpfile).toString() : Object.keys(require.cache || {'gulpfile.js': ''}).map(function(file) { if (!/node_modules|\.json$/.test(file)) { return fs.readFileSync(file).toString() + '\n'; } }).join(''); var rxDoc = '\\/\\*\\*\\r?\n(((?!\\*\\/)[\\s\\S])*?)' + '@task\\s+\\{(.*)?\\}((?!\\*\\/)[\\s\\S])*?\\*\\/'; var rxArgs = '@arg\\s+\\{(.*?)\\}(.*?)\\r?\\n'; var rxOrder = '@order\\s+\\{(\\d+)\\}(.*?)\\r?\\n'; var rxGroup = '@group\\s+\\{(.*?)\\}(.*?)\\r?\\n'; var globalRxDoc = new RegExp(rxDoc, 'g'); var localRxDoc = new RegExp(rxDoc); var globalRxArgs = new RegExp(rxArgs, 'g'); var localRxArgs = new RegExp(rxArgs); var globalRxOrder = new RegExp(rxOrder, 'g'); var localRxOrder = new RegExp(rxOrder); var globalRxGroup = new RegExp(rxGroup, 'g'); var localRxGroup = new RegExp(rxGroup); var jsDoc = (source.match(globalRxDoc) || []); var tasks = gulpTasks(gulp); Object.keys(tasks).forEach(function (task) { reflection[task] = { name: tasks[task].name, desc: '', dep: tasks[task].dep }; }); jsDoc.map(function (block) { var parts = block.match(localRxDoc); var name = parts[3].trim(); var desc = parts[1].replace(/\s*\*/g, ' ') .replace(/\s{2,}/g, ' ') .trim(); if (!reflection[name]) { return; } reflection[name].desc = desc; reflection[name].public = true; reflection[name].args = (block.match(globalRxArgs) || []) .map(function (def) { var argsParts = def.match(localRxArgs); return { name: argsParts[1], desc: argsParts[2].replace(/\s*\*/g, ' ') .replace(/\s{2,}/g, ' ') .trim() }; }); reflection[name].order = (function () { var orderParts = block.match(globalRxOrder); if (orderParts) { return +orderParts[0].match(localRxOrder)[1]; } return Number.MAX_SAFE_INTEGER; })(); reflection[name].group = (function () { var groupParts = block.match(globalRxGroup); if (groupParts) { return groupParts[0].match(localRxGroup)[1]; } return OPTIONS.defaultGroupName; })(); }); // Re-group tasks using user-defined groups var tmp = {}; Object.keys(reflection).forEach(function(task) { var group = reflection[task].group || OPTIONS.defaultGroupName; tmp[group] = tmp[group] || {}; tmp[group][task] = reflection[task]; }); reflection = tmp; }
javascript
{ "resource": "" }
q63488
chunk
test
function chunk(str, maxLen) { var len = maxLen || OPTIONS.lineWidth; var curr = len; var prev = 0; var out = []; while (str[curr]) { if (str[curr++] == ' ') { out.push(str.substring(prev, curr)); prev = curr; curr += len; } } out.push(str.substr(prev)); return out; }
javascript
{ "resource": "" }
q63489
usage
test
function usage(gulp, options) { // re-define options if needed if (options) { Object.assign(OPTIONS, options); } return new Promise(function(resolve) { build(gulp); print(); resolve(); }); }
javascript
{ "resource": "" }
q63490
filterArray
test
function filterArray (arr, toKeep) { var i = 0 while (i < arr.length) { if (toKeep(arr[i])) { i++ } else { arr.splice(i, 1) } } }
javascript
{ "resource": "" }
q63491
cssExtract
test
function cssExtract (bundle, opts) { opts = opts || {} var outFile = opts.out || opts.o || 'bundle.css' var sourceMap = d(opts.sourceMap, bundle && bundle._options && bundle._options.debug, false) assert.equal(typeof bundle, 'object', 'bundle should be an object') assert.equal(typeof opts, 'object', 'opts should be an object') // every time .bundle is called, attach hook bundle.on('reset', addHooks) addHooks() function addHooks () { const extractStream = through.obj(write, flush) const writeStream = (typeof outFile === 'function') ? outFile() : bl(writeComplete) // run before the "label" step in browserify pipeline bundle.pipeline.get('label').unshift(extractStream) function write (chunk, enc, cb) { // Performance boost: don't do ast parsing unless we know it's needed if (!/(insert-css|sheetify\/insert)/.test(chunk.source)) { return cb(null, chunk) } var source = from2(chunk.source) var sm = staticModule({ 'insert-css': function (src) { writeStream.write(String(src) + '\n') return from2('null') }, 'sheetify/insert': function (src) { writeStream.write(String(src) + '\n') return from2('null') } }, { sourceMap: sourceMap }) source.pipe(sm).pipe(bl(complete)) function complete (err, source) { if (err) return extractStream.emit('error', err) chunk.source = String(source) cb(null, chunk) } } // close stream and signal end function flush (cb) { writeStream.end() cb() } function writeComplete (err, buffer) { if (err) return extractStream.emit('error', err) fs.writeFileSync(outFile, buffer) } } }
javascript
{ "resource": "" }
q63492
validatePlaceholders
test
function validatePlaceholders( { id, idPlural, translations }, validationErrors ) { // search for {{placeholderName}} // Also search for e.g. Chinese symbols in the placeholderName let pattern = /{{\s*(\S+?)\s*?}}/g; let placeholders = id.match(pattern) || []; // We also want to add placeholders from the plural string if (idPlural) { let pluralPlaceholders = idPlural.match(pattern) || []; pluralPlaceholders.forEach((placeholder) => { if (!placeholders.includes(placeholder)) { placeholders.push(placeholder); } }); } if (!placeholders.length) { return; } translations.forEach((translation) => { let translatedPlaceholders = translation.match(pattern) || []; // Search for placeholders in the translated string that are not in the original string let invalidPlaceholder = translatedPlaceholders.find( (placeholder) => !placeholders.includes(placeholder) ); if (invalidPlaceholder) { validationErrors.push({ id, translation, message: `The placeholder "${invalidPlaceholder}" seems to be wrongly translated. Allowed: ${placeholders.join( ', ' )}`, level: 'ERROR' }); } }); }
javascript
{ "resource": "" }
q63493
groupGettextItems
test
function groupGettextItems(gettextItems) { return gettextItems .filter((item) => item.messageId) // filter out items without message id .sort((item1, item2) => { return ( item1.loc.fileName.localeCompare(item2.loc.fileName) || item1.loc.line - item2.loc.line ); }) .reduce((allGroups, item) => { let group = allGroups.find((group) => { return ( group.messageId === item.messageId && group.messageContext === item.messageContext ); }); if (group) { group.locs.push(item.loc); // Although it is an edge case, it is possible for two translations to have the same messageID // while only one of them has a plural // For example: {{t 'Find item'}} {{n 'Find item' 'Find items' count}} // For such a case, we make sure to also add the plural, if it was previously missing if (!group.messageIdPlural && item.messageIdPlural) { group.messageIdPlural = item.messageIdPlural; } } else { group = Object.assign({}, item); group.locs = [item.loc]; delete group.loc; allGroups.push(group); } return allGroups; }, []); }
javascript
{ "resource": "" }
q63494
traverseJson
test
function traverseJson(json, callback) { let { translations } = json; Object.keys(translations).forEach((namespace) => { Object.keys(translations[namespace]).forEach((k) => { callback(translations[namespace][k], translations[namespace], k); }); }); }
javascript
{ "resource": "" }
q63495
findAllDependencies
test
function findAllDependencies(file, knownDependencies, sourceDirectories, knownFiles) { if (!knownDependencies) { knownDependencies = []; } if (typeof knownFiles === "undefined"){ knownFiles = []; } else if (knownFiles.indexOf(file) > -1){ return knownDependencies; } if (sourceDirectories) { return findAllDependenciesHelp(file, knownDependencies, sourceDirectories, knownFiles).then(function(thing){ return thing.knownDependencies; }); } else { return getBaseDir(file) .then(getElmPackageSourceDirectories) .then(function(newSourceDirs) { return findAllDependenciesHelp(file, knownDependencies, newSourceDirs, knownFiles).then(function(thing){ return thing.knownDependencies; }); }); } }
javascript
{ "resource": "" }
q63496
parse
test
function parse(query) { if (query[0] == "?") query = query.slice(1); var pairs = query.split("&"), obj = {}; for (var i in pairs) { var pair = pairs[i].split("="), key = decodeURIComponent(pair[0]), value = pair[1] ? decodeURIComponent(pair[1]) : ""; obj[key] = value; } return obj; }
javascript
{ "resource": "" }
q63497
stringify
test
function stringify(obj) { var arr = []; for (var x in obj) { arr.push(encodeURIComponent(x) + "=" + encodeURIComponent(obj[x])); } return arr.join("&"); }
javascript
{ "resource": "" }
q63498
_compileAny
test
function _compileAny(any, options){ // Array if (oj.isArray(any)) _compileTag(any, options) // String else if (oj.isString(any)){ if (options.html != null) options.html.push(any) if (any.length > 0 && any[0] === '<'){ var root = document.createElement('div') root.innerHTML = any if (options.dom != null) options.dom.appendChild(root) } else { if (options.dom != null) options.dom.appendChild(document.createTextNode(any)) } // Boolean or Number } else if (oj.isBoolean(any) || oj.isNumber(any)){ if (options.html != null) options.html.push("" + any) if (options.dom != null) options.dom.appendChild(document.createTextNode("" + any)) // Function } else if (oj.isFunction(any)){ // Wrap function call to allow full oj generation within any _compileAny(oj(any), options) // Date } else if (oj.isDate(any)){ if (options.html != null) options.html.push("" + (any.toLocaleString())) if (options.dom != null) options.dom.appendChild(document.createTextNode("" + (any.toLocaleString()))) // OJ Type or Instance } else if (oj.isOJ(any)){ if (options.types != null) options.types.push(any) if (options.html != null) options.html.push(any.toHTML(options)) if (options.dom != null) options.dom.appendChild(any.toDOM(options)) if (options.css != null) _extend(options.css, any.toCSSMap(options)) } // Do nothing for: null, undefined, object }
javascript
{ "resource": "" }
q63499
_attributesBindEventsToDOM
test
function _attributesBindEventsToDOM(events, el, inserts){ var ek, ev, _results = [] for (ek in events){ ev = events[ek] _a(oj.$ != null, "jquery is missing when binding a '" + ek + "' event") // accumulate insert events manually since DOMNodeInserted is slow and depreciated if (ek == 'insert' && inserts) inserts.push(function(){ev.call(el,el)}) else if (oj.isArray(ev)) _results.push(oj.$(el)[ek].apply(this, ev)) else _results.push(oj.$(el)[ek](ev)) } return _results }
javascript
{ "resource": "" }