_id
stringlengths 2
6
| title
stringlengths 0
58
| partition
stringclasses 3
values | text
stringlengths 52
373k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q54500
|
flattenPath
|
train
|
function flattenPath(file, opts) {
var fileName = path.basename(file.path);
var dirs;
if (!opts.includeParents && !opts.subPath) {
return fileName;
}
dirs = path.dirname(file.relative).split(path.sep);
if (opts.includeParents) {
dirs = includeParents(dirs, opts.includeParents);
}
if (opts.subPath) {
dirs = subPath(dirs, opts.subPath);
}
dirs.push(fileName);
return path.join.apply(path, dirs);
}
|
javascript
|
{
"resource": ""
}
|
q54501
|
train
|
function(r) {
if (!seenConnectionBanner) {
self.emit('connected', r);
seenConnectionBanner = true;
deferredSetup.resolve(r);
return;
}
if (r.hasOwnProperty("rx") && self.serialPortData === null) {
self.ignoredResponses--;
if (!self.timedSendsOnly) {
self.linesRequested = r.rx - 1;
}
// -1 is okay, that just means wait until we've sent two lines to send again
} else if (self.ignoredResponses > 0) {
self.ignoredResponses--;
return;
} else {
if (!self.timedSendsOnly) {
self.linesRequested++;
}
}
self._sendLines();
}
|
javascript
|
{
"resource": ""
}
|
|
q54502
|
train
|
function (k, v) {
promiseChain = promiseChain.then(function() {
return self.set(k, v);
}).catch(function (e) {
// console.log("Caught error setting {", k, ":", v, "}: ", e);
self.emit('error', e);
return Q.fcall(function () {});
});
}
|
javascript
|
{
"resource": ""
}
|
|
q54503
|
getFormat
|
train
|
function getFormat (obj) {
//undefined format - no format-related props, for sure
if (!obj) return {}
//if is string - parse format
if (typeof obj === 'string' || obj.id) {
return parse(obj.id || obj)
}
//if audio buffer - we know it’s format
else if (isAudioBuffer(obj)) {
var arrayFormat = fromTypedArray(obj.getChannelData(0))
return {
sampleRate: obj.sampleRate,
channels: obj.numberOfChannels,
samplesPerFrame: obj.length,
float: true,
signed: true,
bitDepth: arrayFormat.bitDepth
}
}
//if is array - detect format
else if (ArrayBuffer.isView(obj)) {
return fromTypedArray(obj)
}
//FIXME: add AudioNode, stream detection
//else detect from obhect
return fromObject(obj)
}
|
javascript
|
{
"resource": ""
}
|
q54504
|
equal
|
train
|
function equal (a, b) {
return (a.id || stringify(a)) === (b.id || stringify(b))
}
|
javascript
|
{
"resource": ""
}
|
q54505
|
toArrayBuffer
|
train
|
function toArrayBuffer (audioBuffer, format) {
if (!isNormalized(format)) format = normalize(format)
var data
//convert to arraybuffer
if (audioBuffer._data) data = audioBuffer._data.buffer;
else {
var floatArray = audioBuffer.getChannelData(0).constructor;
data = new floatArray(audioBuffer.length * audioBuffer.numberOfChannels);
for (var channel = 0; channel < audioBuffer.numberOfChannels; channel++) {
data.set(audioBuffer.getChannelData(channel), channel * audioBuffer.length);
}
}
var arrayFormat = fromTypedArray(audioBuffer.getChannelData(0))
var buffer = convert(data, {
float: true,
channels: audioBuffer.numberOfChannels,
sampleRate: audioBuffer.sampleRate,
interleaved: false,
bitDepth: arrayFormat.bitDepth
}, format)
return buffer
}
|
javascript
|
{
"resource": ""
}
|
q54506
|
toAudioBuffer
|
train
|
function toAudioBuffer (buffer, format) {
if (!isNormalized(format)) format = normalize(format)
buffer = convert(buffer, format, {
channels: format.channels,
sampleRate: format.sampleRate,
interleaved: false,
float: true
})
var len = Math.floor(buffer.byteLength * .25 / format.channels)
var audioBuffer = new AudioBuffer(null, {
length: len,
numberOfChannels: format.channels,
sampleRate: format.sampleRate
})
var step = len * 4
for (var channel = 0; channel < format.channels; channel++) {
var offset = channel * step
var data = new Float32Array(buffer.slice(offset, offset + step))
audioBuffer.getChannelData(channel).set(data)
}
return audioBuffer
}
|
javascript
|
{
"resource": ""
}
|
q54507
|
convert
|
train
|
function convert (buffer, from, to) {
//ensure formats are full
if (!isNormalized(from)) from = normalize(from)
if (!isNormalized(to)) to = normalize(to)
//convert buffer/alike to arrayBuffer
var data
if (buffer instanceof ArrayBuffer) {
data = buffer
}
else if (ArrayBuffer.isView(buffer)) {
if (buffer.byteOffset != null) data = buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);
else data = buffer.buffer;
}
else {
data = (new Uint8Array(buffer.length != null ? buffer : [buffer])).buffer
}
//ignore needless conversion
if (equal(from ,to)) {
return data
}
//create containers for conversion
var fromArray = new (arrayClass(from))(data)
//toArray is automatically filled with mapped values
//but in some cases mapped badly, e. g. float → int(round + rotate)
var toArray = new (arrayClass(to))(fromArray)
//if range differ, we should apply more thoughtful mapping
if (from.max !== to.max) {
fromArray.forEach(function (value, idx) {
//ignore not changed range
//bring to 0..1
var normalValue = (value - from.min) / (from.max - from.min)
//bring to new format ranges
value = normalValue * (to.max - to.min) + to.min
//clamp (buffers does not like values outside of bounds)
toArray[idx] = Math.max(to.min, Math.min(to.max, value))
})
}
//reinterleave, if required
if (from.interleaved != to.interleaved) {
var channels = from.channels
var len = Math.floor(fromArray.length / channels)
//deinterleave
if (from.interleaved && !to.interleaved) {
toArray = toArray.map(function (value, idx, data) {
var targetOffset = idx % len
var targetChannel = ~~(idx / len)
return data[targetOffset * channels + targetChannel]
})
}
//interleave
else if (!from.interleaved && to.interleaved) {
toArray = toArray.map(function (value, idx, data) {
var targetOffset = ~~(idx / channels)
var targetChannel = idx % channels
return data[targetChannel * len + targetOffset]
})
}
}
//ensure endianness
if (!to.float && from.byteOrder !== to.byteOrder) {
var le = to.byteOrder === 'LE'
var view = new DataView(toArray.buffer)
var step = to.bitDepth / 8
var methodName = 'set' + getDataViewSuffix(to)
for (var i = 0, l = toArray.length; i < l; i++) {
view[methodName](i*step, toArray[i], le)
}
}
return toArray.buffer
}
|
javascript
|
{
"resource": ""
}
|
q54508
|
fromTypedArray
|
train
|
function fromTypedArray (array) {
if (array instanceof Int8Array) {
return {
float: false,
signed: true,
bitDepth: 8
}
}
if ((array instanceof Uint8Array) || (array instanceof Uint8ClampedArray)) {
return {
float: false,
signed: false,
bitDepth: 8
}
}
if (array instanceof Int16Array) {
return {
float: false,
signed: true,
bitDepth: 16
}
}
if (array instanceof Uint16Array) {
return {
float: false,
signed: false,
bitDepth: 16
}
}
if (array instanceof Int32Array) {
return {
float: false,
signed: true,
bitDepth: 32
}
}
if (array instanceof Uint32Array) {
return {
float: false,
signed: false,
bitDepth: 32
}
}
if (array instanceof Float32Array) {
return {
float: true,
signed: false,
bitDepth: 32
}
}
if (array instanceof Float64Array) {
return {
float: true,
signed: false,
bitDepth: 64
}
}
//other dataview types are Uint8Arrays
return {
float: false,
signed: false,
bitDepth: 8
}
}
|
javascript
|
{
"resource": ""
}
|
q54509
|
fromObject
|
train
|
function fromObject (obj) {
//else retrieve format properties from object
var format = {}
formatProperties.forEach(function (key) {
if (obj[key] != null) format[key] = obj[key]
})
//some AudioNode/etc-specific options
if (!format.channels && (obj.channelCount || obj.numberOfChannels)) {
format.channels = obj.channelCount || obj.numberOfChannels
}
if (!format.sampleRate && obj.rate) {
format.sampleRate = obj.rate
}
return format
}
|
javascript
|
{
"resource": ""
}
|
q54510
|
isObject
|
train
|
function isObject (arg) {
return arg === Object(arg) && Object.prototype.toString.call(arg) !== '[object Array]'
}
|
javascript
|
{
"resource": ""
}
|
q54511
|
removeKeysFrom
|
train
|
function removeKeysFrom (val, props, recursive = false) {
// Replace circular values with '[Circular]'
const obj = fclone(val)
if (isObject(obj)) {
return removeKeysFromObject(obj, props, recursive)
}
return removeKeysFromArray(obj, props, recursive)
}
|
javascript
|
{
"resource": ""
}
|
q54512
|
removeKeysFromObject
|
train
|
function removeKeysFromObject (obj, props, recursive = false) {
const res = {}
const keys = Object.keys(obj)
const isRecursive = !!recursive
for (let i = 0; i < keys.length; i++) {
const key = keys[i]
const val = obj[key]
const hasKey = props.indexOf(key) === -1
if (isRecursive && hasKey && isObject(val)) {
res[key] = removeKeysFromObject(val, props, true)
} else if (isRecursive && hasKey && isArray(val)) {
res[key] = removeKeysFromArray(val, props, true)
} else if (hasKey) {
res[key] = val
}
}
return res
}
|
javascript
|
{
"resource": ""
}
|
q54513
|
removeKeysFromArray
|
train
|
function removeKeysFromArray (array, props, recursive = false) {
const res = []
let val = {}
if (!array.length) {
return res
}
for (let i = 0; i < array.length; i++) {
if (isObject(array[i])) {
val = removeKeysFromObject(array[i], props, recursive)
} else if (isArray(array[i])) {
val = removeKeysFromArray(array[i], props, recursive)
} else {
val = array[i]
}
res.push(val)
}
return res
}
|
javascript
|
{
"resource": ""
}
|
q54514
|
assertEqual
|
train
|
function assertEqual (_super) {
return function (val) {
const props = utils.flag(this, 'excludingProps')
if (utils.flag(this, 'excluding')) {
val = removeKeysFrom(val, props)
} else if (utils.flag(this, 'excludingEvery')) {
val = removeKeysFrom(val, props, true)
}
// In case of 'use strict' and babelified code
arguments[0] = val
_super.apply(this, arguments)
}
}
|
javascript
|
{
"resource": ""
}
|
q54515
|
getStorageObj
|
train
|
function getStorageObj(client, namespace) {
return {
get: function(id, cb) {
client.hget(namespace, id, function(err, res) {
cb(err, res ? JSON.parse(res) : null);
});
},
save: function(object, cb) {
if (!object.id) {
return cb(new Error('The given object must have an id property'), {});
}
client.hset(namespace, object.id, JSON.stringify(object), cb);
},
remove: function(id, cb) {
client.hdel(namespace, [id], cb);
},
all: function(cb, options) {
client.hgetall(namespace, function(err, res) {
if (err) {
return cb(err);
}
var parsed,
array = [];
for (var i in res) {
parsed = JSON.parse(res[i]);
res[i] = parsed;
array.push(parsed);
}
cb(null, options && options.type === 'object' ? res : array);
});
}
};
}
|
javascript
|
{
"resource": ""
}
|
q54516
|
inTag
|
train
|
function inTag(stream, state) {
var ch = stream.next();
if (ch == ">" || (ch == "/" && stream.eat(">"))) {
state.jsxTag.depth -= 1;
state.tokenize = tokenBase;
return ret(ch == ">" ? "endTag" : "selfcloseTag", "tag bracket");
} else if (ch == "=") {
type = "equals";
return null;
} else if (ch == "<") {
state.tokenize = tokenBase;
var next = state.tokenize(stream, state);
return next ? next + " tag error" : "tag error";
} else if (ch == "{") {
state.jsxTag.brackets[state.jsxTag.depth] = 1; // counter for brackets
state.tokenize = tokenBase;
return null;
} else if (ch == "}") {
return null;
} else if (/[\'\"]/.test(ch)) {
state.tokenize = inAttribute(ch);
return state.tokenize(stream, state);
} else {
stream.match(/^[^\s\u00a0=<>\"\']*[^\s\u00a0=<>\"\'\/]/);
return "word";
}
}
|
javascript
|
{
"resource": ""
}
|
q54517
|
applyPropDoclets
|
train
|
function applyPropDoclets(props, propName) {
let prop = props[propName];
let doclets = prop.doclets;
let value;
// the @type doclet to provide a prop type
// Also allows enums (oneOf) if string literals are provided
// ex: @type {("optionA"|"optionB")}
if (doclets.type) {
value = cleanDocletValue(doclets.type);
prop.type.name = value;
if (value[0] === '(') {
value = value.substring(1, value.length - 1).split('|');
prop.type.value = value;
prop.type.name = value.every(isLiteral) ? 'enum' : 'union';
}
}
// Use @required to mark a prop as required
// useful for custom propTypes where there isn't a `.isRequired` addon
if (doclets.required) {
prop.required = true;
}
// Use @defaultValue to provide a prop's default value
if (doclets.defaultValue) {
prop.defaultValue = cleanDocletValue(doclets.defaultValue);
}
}
|
javascript
|
{
"resource": ""
}
|
q54518
|
generateHTML
|
train
|
function generateHTML(fileName) {
return new Promise( resolve => {
const location = fileName === 'index.html' ? '/' : `/${fileName}`;
match({routes, location}, (error, redirectLocation, renderProps) => {
let html = ReactDOMServer.renderToString(
<RouterContext {...renderProps} />
);
html = '<!doctype html>' + html;
let write = fsp.writeFile(path.join(docsBuilt, fileName), html);
resolve(write);
});
});
}
|
javascript
|
{
"resource": ""
}
|
q54519
|
AuthTicket
|
train
|
function AuthTicket(json) {
var self = this;
if (!(this instanceof AuthTicket)) return new AuthTicket(json);
for (var p in json) {
if (json.hasOwnProperty(p)) {
self[p] = p.indexOf('Expiration') !== -1 ? new Date(json[p]) : json[p]; // dateify the dates, this'll break if the prop name changes
}
}
}
|
javascript
|
{
"resource": ""
}
|
q54520
|
Delegate
|
train
|
function Delegate(root) {
/**
* Maintain a map of listener
* lists, keyed by event name.
*
* @type Object
*/
this.listenerMap = [{}, {}];
if (root) {
this.root(root);
}
/** @type function() */
this.handle = Delegate.prototype.handle.bind(this);
}
|
javascript
|
{
"resource": ""
}
|
q54521
|
loadPartials
|
train
|
function loadPartials(template, templatePath) {
var templateDir = path.dirname(templatePath)
var partialRegexp = new RegExp(
escapeRegex(mustache.tags[0]) +
'>\\s*(\\S+)\\s*' +
escapeRegex(mustache.tags[1]),
'g'
)
var partialMatch
while ((partialMatch = partialRegexp.exec(template))) {
var partialName = partialMatch[1]
if (!partials[partialName]) {
try {
var partialPath = null
var partial = null
// ignore `partial` with file extension.
// e.g.
// 1, `{{> ./path/to/partial.html }}`
// 2, `{{> ./path/to/partial. }}`
if (path.extname(partialName) !== '') {
partialPath = path.resolve(templateDir, partialName)
partial = fs.readFileSync(partialPath, 'utf8')
} else {
// ignore `partial` file is exists without file extension.
// e.g.
// 1, `{{> ./path/to/partial }}` is exists.
// 2, `{{> ./path/to/.partial }}` is exists.
partialPath = path.resolve(templateDir, partialName)
if (fs.existsSync(partialPath)) {
partial = fs.readFileSync(partialPath, 'utf8')
} else {
// or check if `partial + options.extension` is exists.
// e.g.
// if `options.extension` equals ".html":
// the `{{> ./path/to/partial }}` will load
// `./path/to/partial.html`.
if (typeof options.extension === 'string') {
partialPath = path.resolve(
templateDir,
partialName + options.extension
)
if (fs.existsSync(partialPath)) {
partial = fs.readFileSync(partialPath, 'utf8')
}
}
// when `options.extension` is not a string or
// `partialName + options.extension` does not exists.
// try use `.mustache` extension to load `partial` file.
if (partial === null) {
partialPath = path.resolve(
templateDir,
partialName + '.mustache'
)
partial = fs.readFileSync(partialPath, 'utf8')
}
}
}
partials[partialName] = partial
loadPartials.call(this, partial, partialPath)
} catch (ex) {
this.emit(
'error',
new PluginError(
'gulp-mustache',
// use `ex.message` property instead of `partialPath`,
// because `this.emit()` seems not a sync method.
// also the `ex.message` property provide more details
// about error information.
'Unable to load partial file: ' + ex.message
)
)
}
}
}
}
|
javascript
|
{
"resource": ""
}
|
q54522
|
notifySlack
|
train
|
function notifySlack ({ channel, text, webhook }) {
return fetch(webhook, {
body: JSON.stringify({
channel,
text
}),
headers: {
'Content-Type': 'application/json'
},
method: 'POST'
})
.then(response => response.text())
.catch(err => {
logToErrorConsole('Error posting to Slack webhook')
logToErrorConsole(err)
return err
})
}
|
javascript
|
{
"resource": ""
}
|
q54523
|
findFile
|
train
|
function findFile (filename) {
const file = configDirectory + '/' + filename
if (fs.existsSync(file)) return file
const defaultFile = defaultDirectory + '/' + filename
if (fs.existsSync(defaultFile)) return defaultFile
return null
}
|
javascript
|
{
"resource": ""
}
|
q54524
|
loadYaml
|
train
|
function loadYaml (filename) {
const file = findFile(`${filename}.yml`)
return file ? YAML.parse(fs.readFileSync(file, 'utf8')) : {}
}
|
javascript
|
{
"resource": ""
}
|
q54525
|
overrideWithEnvironment
|
train
|
function overrideWithEnvironment (object, environment) {
if (object.environments && object.environments[environment]) {
const newObject = Object.assign(
{},
object,
object.environments[environment]
)
delete newObject.environments
return newObject
}
return object
}
|
javascript
|
{
"resource": ""
}
|
q54526
|
parseImportLine
|
train
|
function parseImportLine (line) {
if (IS_IMPORT.test(line)) {
// could be either depending on whether default import was before or after named imports
const [, default0, named, default1] = IMPORT.exec(line)
const defaultImport = default0 || default1
const namedImports = []
if (named) {
let next
while ((next = NAMED_IMPORTS.exec(named)) != null) {
const [, src, importedAs] = next
namedImports.push([src, importedAs || src]) // if no alternate name, use default name
}
}
return [defaultImport, namedImports]
} else {
return null
}
}
|
javascript
|
{
"resource": ""
}
|
q54527
|
lintFileContents
|
train
|
function lintFileContents (messages, js) {
// what was messages imported as
let importedAtRootAs = false
let importedMembersAs
const importedMembersLookup = new Map()
let namedMatcher, rootMatcher
// TODO handle importing members, e.g. import { analysis } from messages
const foundMessages = []
let lineNumber = 0 // first increment lands at 0
for (const line of js.split('\n')) {
lineNumber++
const parsedImport = parseImportLine(line)
if (parsedImport) {
;[importedAtRootAs, importedMembersAs] = parsedImport
// make sure we don't catch things like display_messages by making sure there's a diff char before
rootMatcher = new RegExp(
`[^a-zA-Z0-9_$]${importedAtRootAs}\\.([^ ,\\(\\)\\[}]+)`,
'g'
)
namedMatcher = new RegExp(
`[^a-zA-Z0-9_$](${importedMembersAs
.map(a => a[1])
.join('|')})\\.([^ ,\\(\\)\\[}]+)`,
'g'
)
importedMembersAs.forEach(([member, importedAs]) =>
importedMembersLookup.set(importedAs, member)
)
} else if (importedAtRootAs || importedMembersAs) {
let result
// each subsequent call gets next match
if (importedAtRootAs) {
while ((result = rootMatcher.exec(line)) != null) {
foundMessages.push([result[1], lineNumber])
}
}
// do the same for imported members
if (importedMembersAs.length > 0) {
while ((result = namedMatcher.exec(line)) != null) {
// map back to the names in the messages file
foundMessages.push([
`${importedMembersLookup.get(result[1])}.${result[2]}`,
lineNumber
])
}
}
}
}
// filter to only the missing ones
return foundMessages.filter(([message, lineNumber]) => {
let current = messages
for (const sub of message.split('.')) {
current = current[sub]
if (current == null) {
return true // something in the chain is undefined
}
}
return false
})
}
|
javascript
|
{
"resource": ""
}
|
q54528
|
svgToString
|
train
|
function svgToString (filename) {
if (!/\.svg$/i.test(filename)) {
return through()
}
return through(function (buf, enc, next) {
this.push('module.exports=' + JSON.stringify(buf.toString('utf8')))
next()
})
}
|
javascript
|
{
"resource": ""
}
|
q54529
|
yamlTransform
|
train
|
function yamlTransform (filename) {
if (!/\.yml|\.yaml$/i.test(filename)) {
return through()
}
return through(function (buf, enc, next) {
this.push(
'module.exports=' + JSON.stringify(YAML.parse(buf.toString('utf8')))
)
next()
})
}
|
javascript
|
{
"resource": ""
}
|
q54530
|
logAndSend
|
train
|
function logAndSend ({ err, res }) {
logger.error('flyle >> sending default image: ', err.message)
sendImg({
path: DEFAULT_PNG,
res
})
}
|
javascript
|
{
"resource": ""
}
|
q54531
|
sendImg
|
train
|
function sendImg ({ path, res }) {
res.writeHead(STATUS_OK, {
'Content-Type': 'image/png'
})
fs.createReadStream(path).pipe(res)
}
|
javascript
|
{
"resource": ""
}
|
q54532
|
classifyFile
|
train
|
function classifyFile (file) {
return stat(file).then(({ err, stats }) => {
if (err) {
if (err.code === 'ENOENT') {
missingFiles.push(file)
} else {
throw err
}
} else {
foundFiles.push(file)
}
})
}
|
javascript
|
{
"resource": ""
}
|
q54533
|
globPromise
|
train
|
function globPromise (file) {
return new Promise((resolve, reject) => {
glob(file, (err, files) => {
if (err) {
reject(err)
} else {
resolve(files)
}
})
})
}
|
javascript
|
{
"resource": ""
}
|
q54534
|
globFile
|
train
|
function globFile (file) {
return stat(file).then(({ err, stats }) => {
if (err) throw err
if (stats.isDirectory()) {
// TODO what if file is already slash-terminated?
switch (file) {
case './':
return globPromise('./*.js')
case 'bin':
return globPromise('bin/*')
default:
return globPromise(`${file}/**/*.js`)
}
} else {
return Promise.resolve(file)
}
})
}
|
javascript
|
{
"resource": ""
}
|
q54535
|
clearTimeouts
|
train
|
function clearTimeouts() {
// stop tracking time for network operations
self._networkTime = contimer.stop(self._timerCtx, self.buildTimerId('network'));
if (socketTimeout) {
clearTimeout(socketTimeout);
socketTimeout = null;
}
if (queueTimeout) {
clearTimeout(queueTimeout);
queueTimeout = null;
}
}
|
javascript
|
{
"resource": ""
}
|
q54536
|
breakRequest
|
train
|
function breakRequest(retryReason) {
clearTimeouts();
// mark this request as rejected, response must not be built in this case
httpRequest.rejected = true;
// force agent "freeness" (e.g. release) in Node.js<0.12 and dump response object internally
httpRequest.abort();
if (retryReason) {
// call for retry if retryReason provided
self._retryHttpRequest(retryReason);
}
}
|
javascript
|
{
"resource": ""
}
|
q54537
|
upload
|
train
|
function upload ({ body, s3bucket, cloudfront, outfile }) {
const bucketUrl = `https://s3.amazonaws.com/${s3bucket}`
return new Promise((resolve, reject) => {
const s3object = new AWS.S3({
params: {
ACL: 'public-read',
Body: body,
Bucket: s3bucket,
ContentType: mime.getType(outfile),
Key: outfile
}
})
const bytes = bytesToSize(body.byteLength || body.length)
const bucketLink = `<${bucketUrl}/${outfile}|${s3bucket}/${outfile}>`
s3object.upload().send(function (err) {
if (err) {
return reject(
new Error(
`s3 upload to ${bucketLink} rejected with ${err.code} ${
err.message
}`
)
)
}
if (cloudfront) {
const cf = new AWS.CloudFront()
logger
.log(`:lightning: *cloudfront:* invalidating path ${outfile}`)
.then(() => {
cf.createInvalidation(
{
DistributionId: cloudfront,
InvalidationBatch: {
CallerReference: uuid.v4(),
Paths: {
Items: ['/' + outfile],
Quantity: 1
}
}
},
function (err) {
if (err) {
return reject(
new Error(`cf invalidation rejected with ${err.message}`)
)
}
done()
}
)
})
} else {
done()
}
})
/**
* Helper function to log a successful upload to s3.
*/
function done () {
logger
.log(`:checkered_flag: *uploaded:* ${bucketLink} (${bytes})`)
.then(resolve)
}
})
}
|
javascript
|
{
"resource": ""
}
|
q54538
|
bytesToSize
|
train
|
function bytesToSize (bytes) {
const sizes = ['bytes', 'kb', 'mb', 'gb', 'tb']
if (bytes === 0) return '0 byte'
const i = parseInt(Math.floor(Math.log(bytes) / Math.log(BYTES)))
return (bytes / Math.pow(BYTES, i)).toFixed(DISPLAY_DECIMALS) + sizes[i]
}
|
javascript
|
{
"resource": ""
}
|
q54539
|
getUrl
|
train
|
function getUrl (value) {
const reg = /url\((\s*)(['"]?)(.+?)\2(\s*)\)/g
const match = reg.exec(value)
const url = match[URL_POSITION]
return url
}
|
javascript
|
{
"resource": ""
}
|
q54540
|
browserifyIt
|
train
|
function browserifyIt ({ config, entry, env, instrument }) {
return browserify(entry, {
basedir: process.cwd(),
cache: {},
debug: true,
fullPaths: env === 'development',
packageCache: {},
paths: [
path.join(__dirname, '/../node_modules'),
path.join(process.cwd(), '/node_modules')
],
transform: transform({ config, env, instrument })
})
}
|
javascript
|
{
"resource": ""
}
|
q54541
|
transformDir
|
train
|
function transformDir ({ config, entry, outdir }) {
return glob.sync(`${entry[0]}/**/*.js`).map(filename =>
transformFile({
config,
entry: [filename, filename.replace(entry[0], entry[1] || outdir)]
})
)
}
|
javascript
|
{
"resource": ""
}
|
q54542
|
transformFile
|
train
|
function transformFile ({ config, entry, outdir }) {
const filename = entry[0]
const filepath = entry[1] || `${outdir}/${filename}`
const results = babel.transform(
fs.readFileSync(filename, 'utf8'),
Object.assign({}, config, { filename, sourceMaps: true })
)
mkdirp.sync(path.dirname(filepath))
fs.writeFileSync(
filepath,
results.code + '\n\n//# sourceMappingURL=' + path.basename(filepath)
)
fs.writeFileSync(`${filepath}.map`, JSON.stringify(results.map))
return results
}
|
javascript
|
{
"resource": ""
}
|
q54543
|
dirParseSync
|
train
|
function dirParseSync(startDir, result) {
var files;
var i;
var tmpPath;
var currFile;
// initialize the `result` object if it is the first iteration
if (result === undefined) {
result = {};
result[localSep] = [];
}
// check if `startDir` is a valid location
if (!fs.existsSync(startDir)) {
grunt.warn(startDir + ' is not an existing location');
}
// iterate throught the contents of the `startDir` location of the current iteration
files = fs.readdirSync(startDir);
for (i = 0; i < files.length; i++) {
currFile = startDir + localSep + files[i];
if (!file.isMatch({matchBase: true}, exclusions, currFile)) {
if (file.isDir(currFile)) {
tmpPath = path.relative(localRoot, startDir + localSep + files[i]);
if (!_.has(result, tmpPath)) {
result[tmpPath] = [];
}
dirParseSync(currFile, result);
} else {
tmpPath = path.relative(localRoot, startDir);
if (!tmpPath.length) {
tmpPath = localSep;
}
result[tmpPath].push(files[i]);
}
}
}
return result;
}
|
javascript
|
{
"resource": ""
}
|
q54544
|
train
|
function() {
this.plugin('done', stats => {
fs.writeFileSync(
path.join(targetConfig.output.path, paths.appBuildAssetsJson()),
`{"hash":"${stats.hash}"}`
)
})
}
|
javascript
|
{
"resource": ""
}
|
|
q54545
|
createCache
|
train
|
function createCache (cacheForMilliSeconds) {
var cache = {}
var newRemove = new Set()
var oldRemove = new Set()
function use (id) {
newRemove.delete(id)
oldRemove.delete(id)
}
function release (id) {
newRemove.add(id)
}
var timer = setInterval(() => {
oldRemove.forEach(id => {
if (cache[id]) {
cache[id].destroy()
delete cache[id]
}
})
oldRemove.clear()
// cycle
var hold = oldRemove
oldRemove = newRemove
newRemove = hold
}, cacheForMilliSeconds || 5e3)
if (timer.unref) timer.unref()
/**
* Takes a thread ID (for the cache ID) and a pull stream to populate the
* backlinks observable with. After the backlinks obserable is unsubscribed
* from it is cached for the configured amount of time before the pull stream
* is aborted unless there is a new incoming listener
*/
function cachedBacklinks (id, backlinksPullStream) {
if (!cache[id]) {
var sync = Value(false)
var aborter = Abortable()
var collection = Value([])
// try not to saturate the thread
onceIdle(() => {
pull(
backlinksPullStream,
aborter,
pull.drain((msg) => {
if (msg.sync) {
sync.set(true)
} else {
var value = resolve(collection)
value.push(msg)
collection.set(value)
}
})
)
})
cache[id] = computed([collection], x => x, {
onListen: () => use(id),
onUnlisten: () => release(id)
})
cache[id].destroy = aborter.abort
cache[id].sync = sync
}
return cache[id]
}
return {
cachedBacklinks: cachedBacklinks
}
}
|
javascript
|
{
"resource": ""
}
|
q54546
|
getIndex
|
train
|
function getIndex(index) {
if (this[_privateKey]._options.keysIgnoreCase && typeof index === 'string') {
const indexLowerCase = index.toLowerCase();
for (const key in this[_privateKey]._schema) {
if (typeof key === 'string' && key.toLowerCase() === indexLowerCase) {
return key;
}
}
}
return index;
}
|
javascript
|
{
"resource": ""
}
|
q54547
|
detectCustomErrorMessage
|
train
|
function detectCustomErrorMessage(key) {
if (typeof properties[key] === 'object' && properties[key].errorMessage && properties[key].value) {
return properties[key];
}
else if (_.isArray(properties[key])) {
return {
value: properties[key][0],
errorMessage: properties[key][1]
};
}
else {
return {
value: properties[key],
errorMessage: undefined
};
}
}
|
javascript
|
{
"resource": ""
}
|
q54548
|
addToSchema
|
train
|
function addToSchema(index, properties) {
this[_privateKey]._schema[index] = normalizeProperties.call(this, properties, index);
defineGetter.call(this[_privateKey]._getset, index, this[_privateKey]._schema[index]);
defineSetter.call(this[_privateKey]._getset, index, this[_privateKey]._schema[index]);
}
|
javascript
|
{
"resource": ""
}
|
q54549
|
defineGetter
|
train
|
function defineGetter(index, properties) {
// If the field type is an alias, we retrieve the value through the alias's index.
let indexOrAliasIndex = properties.type === 'alias' ? properties.index : index;
this.__defineGetter__(index, () => {
// If accessing object or array, lazy initialize if not set.
if (!this[_privateKey]._obj[indexOrAliasIndex] && (properties.type === 'object' || properties.type === 'array')) {
// Initialize object.
if (properties.type === 'object') {
if (properties.default !== undefined) {
writeValue.call(this[_privateKey]._this, _.isFunction(properties.default) ?
properties.default.call(this) :
properties.default, properties);
} else {
writeValue.call(this[_privateKey]._this,
properties.objectType ? new properties.objectType({}, this[_privateKey]._root) : {}, properties);
}
// Native arrays are not used so that Array class can be extended with custom behaviors.
} else if (properties.type === 'array') {
writeValue.call(this[_privateKey]._this, new SchemaArray(this, properties), properties);
}
}
try {
return getter.call(this, this[_privateKey]._obj[indexOrAliasIndex], properties);
} catch (error) {
// This typically happens when the default value isn't valid -- log error.
this[_privateKey]._errors.push(error);
}
});
}
|
javascript
|
{
"resource": ""
}
|
q54550
|
defineSetter
|
train
|
function defineSetter(index, properties) {
this.__defineSetter__(index, (value) => {
// Don't proceed if readOnly is true.
if (properties.readOnly) {
return;
}
try {
// this[_privateKey]._this[index] is used instead of this[_privateKey]._obj[index] to route through the public interface.
writeValue.call(this[_privateKey]._this,
typecast.call(this, value, this[_privateKey]._this[index], properties), properties);
} catch (error) {
// Setter failed to validate value -- log error.
this[_privateKey]._errors.push(error);
}
});
}
|
javascript
|
{
"resource": ""
}
|
q54551
|
train
|
function(where, name) {
var orig = [][name];
DefineList.prototype[name] = function() {
if (!this._length) {
// For shift and pop, we just return undefined without
// triggering events.
return undefined;
}
var args = getArgs(arguments),
len = where && this._length ? this._length - 1 : 0,
oldLength = this._length ? this._length : 0,
res;
// Call the original method.
runningNative = true;
res = orig.apply(this, args);
runningNative = false;
// Create a change where the args are
// `len` - Where these items were removed.
// `remove` - Items removed.
// `undefined` - The new values (there are none).
// `res` - The old, removed values (should these be unbound).
queues.batch.start();
this._triggerChange("" + len, "remove", undefined, [ res ]);
this.dispatch('length', [ this._length, oldLength ]);
queues.batch.stop();
return res;
};
}
|
javascript
|
{
"resource": ""
}
|
|
q54552
|
train
|
function(key, handler, queue) {
var translationHandler;
if (isNaN(key)) {
return onKeyValue.apply(this, arguments);
}
else {
translationHandler = function() {
handler(this[key]);
};
//!steal-remove-start
if(process.env.NODE_ENV !== 'production') {
Object.defineProperty(translationHandler, "name", {
value: "translationHandler(" + key + ")::" + canReflect.getName(this) + ".onKeyValue('length'," + canReflect.getName(handler) + ")",
});
}
//!steal-remove-end
singleReference.set(handler, this, translationHandler, key);
return onKeyValue.call(this, 'length', translationHandler, queue);
}
}
|
javascript
|
{
"resource": ""
}
|
|
q54553
|
train
|
function(key, handler, queue) {
var translationHandler;
if ( isNaN(key)) {
return offKeyValue.apply(this, arguments);
}
else {
translationHandler = singleReference.getAndDelete(handler, this, key);
return offKeyValue.call(this, 'length', translationHandler, queue);
}
}
|
javascript
|
{
"resource": ""
}
|
|
q54554
|
train
|
function(prop, get, defaultValueFn) {
return function() {
var map = this,
defaultValue = defaultValueFn && defaultValueFn.call(this),
observable, computeObj;
if(get.length === 0) {
observable = new Observation(get, map);
} else if(get.length === 1) {
observable = new SettableObservable(get, map, defaultValue);
} else {
observable = new AsyncObservable(get, map, defaultValue);
}
computeObj = make.computeObj(map, prop, observable);
//!steal-remove-start
if(process.env.NODE_ENV !== 'production') {
Object.defineProperty(computeObj.handler, "name", {
value: canReflect.getName(get).replace('getter', 'event emitter')
});
}
//!steal-remove-end
return computeObj;
};
}
|
javascript
|
{
"resource": ""
}
|
|
q54555
|
train
|
function(definition, behavior, value) {
if(behavior === "enumerable") {
// treat enumerable like serialize
definition.serialize = !!value;
}
else if(behavior === "type") {
var behaviorDef = value;
if(typeof behaviorDef === "string") {
behaviorDef = define.types[behaviorDef];
if(typeof behaviorDef === "object" && !isDefineType(behaviorDef)) {
assign(definition, behaviorDef);
behaviorDef = behaviorDef[behavior];
}
}
if (typeof behaviorDef !== 'undefined') {
definition[behavior] = behaviorDef;
}
}
else {
definition[behavior] = value;
}
}
|
javascript
|
{
"resource": ""
}
|
|
q54556
|
BufferObject
|
train
|
function BufferObject(data, getUpdate, maxAge) {
if (!Object.is(typeof getUpdate, 'function')) {
throw new Error('BufferObject requires an update function');
}
maxAge = maxAge || 6e4;
return {
lastUpdate: data ? Date.now() : 0,
data: data || null,
set(setData) {
this.data = setData;
this.lastUpdate = Date.now();
},
get(callback) {
if (this.data != null) {
if (maxAge < 0 || this.lastUpdate >= Date.now() - maxAge) {
if (Object.is(typeof callback, 'function')) {
return callback(this.data);
}
return this.data;
}
}
const that = this;
getUpdate(function(err, updateData) {
if (err) {
that.get();
return;
}
that.set(updateData);
if (Object.is(typeof callback, 'function')) {
return callback(updateData);
}
return updateData;
});
},
push(pushData) {
// Be sure the data is loaded
this.get();
if (Array.isArray(this.data)) {
this.data.push(pushData);
}
},
remove(removeData) {
this.get();
for (const i in this.data) {
if (!this.data.hasOwnProperty(i)) continue;
if (Object.is(this.data[i], removeData)) {
this.data.splice(i, 1);
return;
}
}
},
removeAt(index) {
// Be sure the data is loaded
this.get();
if (Array.isArray(this.data) && index < this.data.length) {
this.data.splice(index, 1);
}
}
};
}
|
javascript
|
{
"resource": ""
}
|
q54557
|
train
|
function() {
var ctx = u.context(SMALL);
logger.debugf('Invoke iterator.close(msgId=%d,iteratorId=%s) on %s', ctx.id, iterId, conn.toString());
return futurePinned(
ctx, 0x35, p.encodeIterId(iterId), p.complete(p.hasSuccess), conn);
}
|
javascript
|
{
"resource": ""
}
|
|
q54558
|
train
|
function(k) {
var ctx = u.context(SMALL);
logger.debugf('Invoke containsKey(msgId=%d,key=%s)', ctx.id, u.str(k));
return futureKey(ctx, 0x0F, k, p.encodeKey(k), p.complete(p.hasSuccess));
}
|
javascript
|
{
"resource": ""
}
|
|
q54559
|
train
|
function(k) {
var ctx = u.context(SMALL);
logger.debugf('Invoke getWithMetadata(msgId=%d,key=%s)', ctx.id, u.str(k));
var decoder = p.decodeWithMeta();
return futureKey(ctx, 0x1B, k, p.encodeKey(k), decoder);
}
|
javascript
|
{
"resource": ""
}
|
|
q54560
|
train
|
function(k, opts) {
var ctx = u.context(SMALL);
logger.debugl(function() {return ['Invoke remove(msgId=%d,key=%s,opts=%s)',
ctx.id, u.str(k), JSON.stringify(opts)]; });
var decoder = p.decodePrevOrElse(opts, p.hasSuccess, p.complete(p.hasSuccess));
return futureKey(ctx, 0x0B, k, p.encodeKey(k), decoder, opts);
}
|
javascript
|
{
"resource": ""
}
|
|
q54561
|
train
|
function(k, v, version, opts) {
var ctx = u.context(MEDIUM);
logger.debugl(function() { return ['Invoke replaceWithVersion(msgId=%d,key=%s,value=%s,version=0x%s,opts=%s)',
ctx.id, u.str(k), u.str(v), version.toString('hex'), JSON.stringify(opts)]; });
var decoder = p.decodePrevOrElse(opts, p.hasPrevious, p.complete(p.hasSuccess));
return futureKey(ctx, 0x09, k, p.encodeKeyValueVersion(k, v, version), decoder, opts);
}
|
javascript
|
{
"resource": ""
}
|
|
q54562
|
train
|
function(pairs, opts) {
var ctx = u.context(BIG);
logger.debugl(function() { return ['Invoke putAll(msgId=%d,pairs=%s,opts=%s)',
ctx.id, JSON.stringify(pairs), JSON.stringify(opts)]; });
return future(ctx, 0x2D, p.encodeMultiKeyValue(pairs), p.complete(_.constant(undefined)), opts);
}
|
javascript
|
{
"resource": ""
}
|
|
q54563
|
train
|
function(batchSize, opts) {
var ctx = u.context(SMALL);
logger.debugf('Invoke iterator(msgId=%d,batchSize=%d,opts=%s)', ctx.id, batchSize, u.str(opts));
var remote = future(ctx, 0x31, p.encodeIterStart(batchSize, opts), p.decodeIterId);
return remote.then(function(result) {
return iterator(result.iterId, result.conn);
});
}
|
javascript
|
{
"resource": ""
}
|
|
q54564
|
train
|
function(event, listener, opts) {
var ctx = u.context(SMALL);
return _.has(opts, 'listenerId')
? addLocalListener(ctx, event, listener, opts)
: addRemoteListener(ctx, event, listener, opts);
}
|
javascript
|
{
"resource": ""
}
|
|
q54565
|
train
|
function(listenerId) {
var ctx = u.context(SMALL);
logger.debugf('Invoke removeListener(msgId=%d,listenerId=%s) remotely', ctx.id, listenerId);
var conn = p.findConnectionListener(listenerId);
if (!f.existy(conn))
return Promise.reject(
new Error('No server connection for listener (listenerId=' + listenerId + ')'));
var remote = futurePinned(ctx, 0x27, p.encodeListenerId(listenerId), p.complete(p.hasSuccess), conn);
return remote.then(function (success) {
if (success) {
p.removeListeners(listenerId);
return true;
}
return false;
})
}
|
javascript
|
{
"resource": ""
}
|
|
q54566
|
train
|
function(scriptName, params) {
var ctx = u.context(SMALL);
logger.debugf('Invoke execute(msgId=%d,scriptName=%s,params=%s)', ctx.id, scriptName, u.str(params));
// TODO update jsdoc, value does not need to be String, can be JSON too
return futureExec(ctx, 0x2B, p.encodeNameParams(scriptName, params), p.decodeValue());
}
|
javascript
|
{
"resource": ""
}
|
|
q54567
|
train
|
function(transport) {
return {
/**
* Get the server topology identifier.
*
* @returns {Number} Topology identifier.
* @memberof Topology#
* @since 0.3
*/
getTopologyId: function() {
return transport.getTopologyId();
},
/**
* Get the list of servers that the client is currently connected to.
*
* @return {ServerAddress[]} An array of server addresses.
* @memberof Topology#
* @since 0.3
*/
getMembers: function() {
return transport.getMembers();
},
/**
* Find the list of server addresses that are owners for a given key.
*
* @param {(String|Object)} k Key to find owners for.
* @return {ServerAddress[]}
* An array of server addresses that are owners for the given key.
* @memberof Topology#
* @since 0.3
*/
findOwners: function(k) {
return transport.findOwners(k);
},
/**
* Switch remote cache manager to a different cluster,
* previously declared via configuration.
*
* @param clusterName name of the cluster to which to switch to
* @return {module:promise.Promise<Boolean>}
* A promise encapsulating a Boolean that indicates {@code true} if the
* switch happened, or {@code false} otherwise.
* @memberof Topology#
* @since 0.4
*/
switchToCluster: function(clusterName) {
return transport.switchToCluster(clusterName);
},
/**
* Switch remote cache manager to the default cluster,
* previously declared via configuration.
*
* @return {module:promise.Promise<Boolean>}
* A promise encapsulating a Boolean that indicates {@code true} if the
* switch happened, or {@code false} otherwise.
* @memberof Topology#
* @since 0.4
*/
switchToDefaultCluster: function() {
return transport.switchToDefaultCluster();
}
}
}
|
javascript
|
{
"resource": ""
}
|
|
q54568
|
train
|
function(values) {
if (values.length < 3) {
logger.tracef("Not enough to read (not array): %s", values);
return undefined;
}
return {listenerId: values[0], isCustom: values[1] == 1, isRetried: values[2] == 1}
}
|
javascript
|
{
"resource": ""
}
|
|
q54569
|
train
|
function(values) {
if (values.length < 2) {
logger.tracef("Not enough to read (not array): %s", values);
return undefined;
}
return {segments: values[0], count: values[1]}
}
|
javascript
|
{
"resource": ""
}
|
|
q54570
|
toHex
|
train
|
function toHex(bignum) {
var tmp0 = bignum[0] < 0 ? (bignum[0]>>>0) : bignum[0];
var tmp1 = bignum[1] < 0 ? (bignum[1]>>>0) : bignum[1];
return tmp0.toString(16) + tmp1.toString(16);
}
|
javascript
|
{
"resource": ""
}
|
q54571
|
waitIdleTimeExpire
|
train
|
function waitIdleTimeExpire(key, timeout) {
return function(client) {
var contains = true;
t.sleepFor(200); // sleep required
waitsFor(function() {
client.containsKey(key).then(function(success) {
contains = success;
});
return !contains;
}, '`' + key + '` key should be expired', timeout);
return client;
}
}
|
javascript
|
{
"resource": ""
}
|
q54572
|
validSemverTag
|
train
|
function validSemverTag(list, tag) {
if (semver.valid(tag)) {
list.push(tag);
}
return list;
}
|
javascript
|
{
"resource": ""
}
|
q54573
|
release
|
train
|
function release(type) {
target.test();
echo("Generating new version");
const newVersion = execSilent("npm version " + type).trim();
target.changelog();
// add changelog to commit
exec("git add CHANGELOG.md");
exec("git commit --amend --no-edit");
// replace existing tag
exec("git tag -f " + newVersion);
// push all the things
echo("Publishing to git");
exec("git push origin master --tags");
echo("Publishing to npm");
exec("npm publish");
}
|
javascript
|
{
"resource": ""
}
|
q54574
|
reportPath
|
train
|
function reportPath(node) {
const moduleName = node.value.trim();
const customMessage = restrictedPathMessages[moduleName];
const message = customMessage
? CUSTOM_MESSAGE_TEMPLATE
: DEFAULT_MESSAGE_TEMPLATE;
context.report({
node,
message,
data: {
moduleName,
customMessage
}
});
}
|
javascript
|
{
"resource": ""
}
|
q54575
|
isStringLiteralArray
|
train
|
function isStringLiteralArray(node) {
return isArrayExpr(node) &&
isArray(node.elements) &&
node.elements.every(isStringLiteral);
}
|
javascript
|
{
"resource": ""
}
|
q54576
|
hasParams
|
train
|
function hasParams(node) {
return isObject(node) &&
isArray(node.params) &&
node.params.length > 0;
}
|
javascript
|
{
"resource": ""
}
|
q54577
|
hasCallback
|
train
|
function hasCallback(node) {
return isObject(node) &&
isArray(node.arguments) &&
node.arguments.some(isFunctionExpr);
}
|
javascript
|
{
"resource": ""
}
|
q54578
|
ancestor
|
train
|
function ancestor(predicate, node) {
while ((node = node.parent)) {
if (predicate(node)) return true;
}
return false;
}
|
javascript
|
{
"resource": ""
}
|
q54579
|
nearest
|
train
|
function nearest(predicate, node) {
while ((node = node.parent)) {
if (predicate(node)) return node;
}
return undefined;
}
|
javascript
|
{
"resource": ""
}
|
q54580
|
throttle
|
train
|
function throttle(fn) {
var called = false;
var throttled = false;
var timeout = 10000;
function unthrottled() {
throttled = false;
maybeCall();
}
function maybeCall() {
if (called && !throttled) {
called = false;
throttled = true;
setTimeout(unthrottled, timeout);
fn();
}
}
return function() {
called = true;
maybeCall();
};
}
|
javascript
|
{
"resource": ""
}
|
q54581
|
train
|
function (node, key) {
'use strict';
var i, k;
if (node.key === key) {
return node;
} else {
for (i = 0; i < node.children.length; i += 1) {
k = slow_findKey(node.children[i], key);
if (k) {
return k;
}
}
}
}
|
javascript
|
{
"resource": ""
}
|
|
q54582
|
irishPub
|
train
|
function irishPub(root) {
root = root || process.cwd();
var out = new PassThrough();
getMetadata(root, function(err, meta) {
if (err) return out.emit('error', err);
out.emit('metadata', meta);
listFiles(root, out);
});
return out;
}
|
javascript
|
{
"resource": ""
}
|
q54583
|
focusWithTimeout
|
train
|
function focusWithTimeout(props, el, timeout = 0) {
setTimeout(() => {
if (props.stopScroll) {
el.focus({ preventScroll: true });
} else {
const x = window.pageXOffset,
y = window.pageYOffset;
el.focus();
window.scrollTo(x, y);
}
}, timeout);
}
|
javascript
|
{
"resource": ""
}
|
q54584
|
train
|
function (target) {
var elem = $(target);
var maxValue = 0;
var position, value;
while (elem.length && elem[0] !== document) {
position = elem.css("position");
if (position === "absolute" || position === "relative" || position === "fixed") {
value = parseInt(elem.css("zIndex"), 10);
if (!isNaN(value) && value !== 0) {
if (value > maxValue) { maxValue = value; }
}
}
elem = elem.parent();
}
return maxValue;
}
|
javascript
|
{
"resource": ""
}
|
|
q54585
|
train
|
function(type, pattern, data) {
if (type == 'tag') { // case-insensitive match on tag name
return pattern && pattern.toLowerCase() == data.toLowerCase();
} else {
return pattern === data;
}
}
|
javascript
|
{
"resource": ""
}
|
|
q54586
|
convertBinaryToBase36
|
train
|
function convertBinaryToBase36(binary)
{
var result = '';
for (var i = 0; i < 25; i++)
{
var c;
if (typeof binary == 'string')
{
c = binary.charCodeAt(i) % 36;
}
else
{
c = binary[i] % 36;
}
if (c < 10)
{
result += String.fromCharCode(48 + c);
}
else
{
result += String.fromCharCode(87 + c);
}
}
return result;
}
|
javascript
|
{
"resource": ""
}
|
q54587
|
Mark
|
train
|
function Mark(typeName, props, contents) {
// handle special shorthand
if (arguments.length === 1 && (typeName[0] === '{' || typeName[0] === '[' || ws.indexOf(typeName[0]) >= 0)) {
return MARK.parse(typeName);
}
// 1. prepare the constructor
if (typeof typeName !== 'string') {
if (this instanceof Mark) { // called through new operator
this[$length] = 0;
// no need to do further construction
// props, contents are not supported at the moment
return;
}
throw "Type name should be a string";
}
let con = $ctrs[typeName];
if (!con) {
con = $ctrs[typeName] = function(){};
// con.prototype.constructor is set to con by JS
// sets the type name
Object.defineProperty(con, 'name', {value:typeName, configurable:true}); // non-writable, as we don't want the name to be changed
// con.prototype.__proto__ = Array.prototype; // Mark no longer extends Array; Mark is array like, but not array.
// con is set to extend Mark, instead of copying all the API functions
// for (let a in api) { Object.defineProperty(con.prototype, a, {value:api[a], writable:true, configurable:true}); } // make API functions non-enumerable
Object.setPrototypeOf(con.prototype, Mark.prototype);
}
// 2. create object
let obj = Object.create(con.prototype);
// 3. copy properties, numeric keys are not allowed
if (props) {
for (let p in props) {
// accept only non-numeric key; and we do no check key duplication here, last definition wins
if (isNaN(p*1)) { obj[p] = props[p]; }
}
}
// 4. copy contents if any
obj[$length] = 0;
if (contents) { push.call(obj, contents); }
return obj;
}
|
javascript
|
{
"resource": ""
}
|
q54588
|
isNameChar
|
train
|
function isNameChar(c) {
return ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || ('0' <= c && c <= '9') ||
c === '_' || c === '$' || c === '.' || c === '-';
}
|
javascript
|
{
"resource": ""
}
|
q54589
|
resolveRequest
|
train
|
function resolveRequest({ requestKey, res, err }) {
const handlers = requests[requestKey] || [];
handlers.forEach(handler => {
if (res) {
handler.resolve(res);
} else {
handler.reject(err);
}
});
// This list of handlers has been, well, handled. So we
// clear the handlers for the next request.
requests[requestKey] = null;
}
|
javascript
|
{
"resource": ""
}
|
q54590
|
byName
|
train
|
function byName(a, b) {
if (a === INDEX_FILE) {
return a === b ? 0 : -1;
}
if (b === INDEX_FILE) {
return 1;
}
return a.toLowerCase().localeCompare(b.toLowerCase());
}
|
javascript
|
{
"resource": ""
}
|
q54591
|
log
|
train
|
function log() {
if (typeof print === 'function') {
print.apply(this, arguments)
} else {
console.log.apply(console, arguments)
}
}
|
javascript
|
{
"resource": ""
}
|
q54592
|
buildCSS
|
train
|
function buildCSS(src, filename, dest, applyHeader) {
dest = dest || config.dist.cssPath;
applyHeader = applyHeader || false;
return gulp.src(src)
.pipe(sass({
includePaths: [config.src.scssPath, config.packagesPath]
})
.on('error', sass.logError))
.pipe(cleanCSS())
.pipe(autoprefixer({
// Supported browsers added in package.json ("browserslist")
cascade: false
}))
.pipe(gulpif(applyHeader, header(config.prj.header, {
config: config
})))
// Remove charset rule (if present) and add to the first
// line of the stylesheet (@charset must be on the first line)
.pipe(replace('@charset "UTF-8";', ''))
.pipe(header('@charset "UTF-8";\n'))
.pipe(rename(filename))
.pipe(gulp.dest(dest));
}
|
javascript
|
{
"resource": ""
}
|
q54593
|
buildJS
|
train
|
function buildJS(src, filename, dest, applyHeader, forceIncludePaths) {
dest = dest || config.dist.jsPath;
applyHeader = applyHeader || false;
forceIncludePaths = forceIncludePaths || false;
return gulp.src(src)
.pipe(gulpif(
forceIncludePaths,
include({
includePaths: [
path.dirname(src),
__dirname,
config.packagesPath
]
}),
include()
))
.on('error', console.log) // eslint-disable-line no-console
.pipe(babel())
.pipe(uglify({
output: {
// try to preserve non-standard headers (e.g. from objectFitPolyfill)
comments: /^(!|---)/
}
}))
.pipe(gulpif(applyHeader, header(config.prj.header, {
config: config
})))
.pipe(rename(filename))
.pipe(gulp.dest(dest));
}
|
javascript
|
{
"resource": ""
}
|
q54594
|
buildDocsIndex
|
train
|
function buildDocsIndex(dataPath, indexPath, done) {
dataPath = dataPath || `${config.docsLocalPath}/search-data.json`;
indexPath = indexPath || `${config.docsLocalPath}/search-index.json`;
const documents = JSON.parse(fs.readFileSync(dataPath));
// Generate index
const idx = lunr(function () {
this.ref('id');
this.field('title');
documents.forEach(function (doc) {
this.add(doc);
}, this);
});
const searchIndex = JSON.stringify(idx);
// Save search index
fs.writeFileSync(indexPath, searchIndex);
done();
}
|
javascript
|
{
"resource": ""
}
|
q54595
|
fastCheck
|
train
|
function fastCheck() {
for (var i = watchArray.length - 1; i >= 0; i--) {
if (!watchArray[i].inited) continue;
var deltaTop = Math.abs(getDocOffsetTop(watchArray[i].clone) - watchArray[i].docOffsetTop),
deltaHeight = Math.abs(watchArray[i].parent.node.offsetHeight - watchArray[i].parent.height);
if (deltaTop >= 2 || deltaHeight >= 2) return false;
}
return true;
}
|
javascript
|
{
"resource": ""
}
|
q54596
|
train
|
function($container) {
var styles = window.getComputedStyle($container, null);
var position = styles.getPropertyValue("position");
var overflow = styles.getPropertyValue("overflow");
var display = styles.getPropertyValue("display");
if (!position || position === "static") {
$container.style.position = "relative";
}
if (overflow !== "hidden") {
$container.style.overflow = "hidden";
}
// Guesstimating that people want the parent to act like full width/height wrapper here.
// Mostly attempts to target <picture> elements, which default to inline.
if (!display || display === "inline") {
$container.style.display = "block";
}
if ($container.clientHeight === 0) {
$container.style.height = "100%";
}
// Add a CSS class hook, in case people need to override styles for any reason.
if ($container.className.indexOf("object-fit-polyfill") === -1) {
$container.className = $container.className + " object-fit-polyfill";
}
}
|
javascript
|
{
"resource": ""
}
|
|
q54597
|
train
|
function(axis, $media, objectPosition) {
var position, other, start, end, side;
objectPosition = objectPosition.split(" ");
if (objectPosition.length < 2) {
objectPosition[1] = objectPosition[0];
}
if (axis === "x") {
position = objectPosition[0];
other = objectPosition[1];
start = "left";
end = "right";
side = $media.clientWidth;
}
else if (axis === "y") {
position = objectPosition[1];
other = objectPosition[0];
start = "top";
end = "bottom";
side = $media.clientHeight;
}
else {
return; // Neither x or y axis specified
}
if (position === start || other === start) {
$media.style[start] = "0";
return;
}
if (position === end || other === end) {
$media.style[end] = "0";
return;
}
if (position === "center" || position === "50%") {
$media.style[start] = "50%";
$media.style["margin-" + start] = (side / -2) + "px";
return;
}
// Percentage values (e.g., 30% 10%)
if (position.indexOf("%") >= 0) {
position = parseInt(position);
if (position < 50) {
$media.style[start] = position + "%";
$media.style["margin-" + start] = side * (position / -100) + "px";
}
else {
position = 100 - position;
$media.style[end] = position + "%";
$media.style["margin-" + end] = side * (position / -100) + "px";
}
return;
}
// Length-based values (e.g. 10px / 10em)
else {
$media.style[start] = position;
}
}
|
javascript
|
{
"resource": ""
}
|
|
q54598
|
train
|
function($media) {
// Fallbacks, IE 10- data
var fit = ($media.dataset) ? $media.dataset.objectFit : $media.getAttribute("data-object-fit");
var position = ($media.dataset) ? $media.dataset.objectPosition : $media.getAttribute("data-object-position");
fit = fit || "cover";
position = position || "50% 50%";
// If necessary, make the parent container work with absolutely positioned elements
var $container = $media.parentNode;
checkParentContainer($container);
// Check for any pre-set CSS which could mess up image calculations
checkMediaProperties($media);
// Mathematically figure out which side needs covering, and add CSS positioning & centering
$media.style.position = "absolute";
$media.style.height = "100%";
$media.style.width = "auto";
if (fit === "scale-down") {
$media.style.height = "auto";
if (
$media.clientWidth < $container.clientWidth &&
$media.clientHeight < $container.clientHeight
) {
setPosition("x", $media, position);
setPosition("y", $media, position);
}
else {
fit = "contain";
$media.style.height = "100%";
}
}
if (fit === "none") {
$media.style.width = "auto";
$media.style.height = "auto";
setPosition("x", $media, position);
setPosition("y", $media, position);
}
else if (
fit === "cover" && $media.clientWidth > $container.clientWidth ||
fit === "contain" && $media.clientWidth < $container.clientWidth
) {
$media.style.top = "0";
$media.style.marginTop = "0";
setPosition("x", $media, position);
}
else if (fit !== "scale-down") {
$media.style.width = "100%";
$media.style.height = "auto";
$media.style.left = "0";
$media.style.marginLeft = "0";
setPosition("y", $media, position);
}
}
|
javascript
|
{
"resource": ""
}
|
|
q54599
|
fastHash
|
train
|
function fastHash(str) {
let hash = 5381;
for (let j = str.length - 1; j >= 0; j -= 1) {
hash = (hash * 33) ^ str.charCodeAt(j);
}
return hash >>> 0;
}
|
javascript
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.