_id stringlengths 2 6 | title stringlengths 0 58 | partition stringclasses 3 values | text stringlengths 52 373k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q25000 | isHTMLElement | train | function isHTMLElement(o) {
var strOwnerDocument = 'ownerDocument';
var strHTMLElement = 'HTMLElement';
var wnd = o && o[strOwnerDocument] ? (o[strOwnerDocument].parentWindow || window) : window;
return (
typeof wnd[strHTMLElement] == TYPES.o ? o instanceof wnd[strHTMLElement] : //DOM2
o && typeof o == TYPES.o && o !== null && o.nodeType === 1 && typeof o.nodeName == TYPES.s
);
} | javascript | {
"resource": ""
} |
q25001 | getArrayDifferences | train | function getArrayDifferences(a1, a2) {
var a = [ ];
var diff = [ ];
var i;
var k;
for (i = 0; i < a1.length; i++)
a[a1[i]] = true;
for (i = 0; i < a2.length; i++) {
if (a[a2[i]])
delete a[a2[i]];
else
a[a2[i]] = true;
}
for (k in a)
diff.push(k);
return diff;
} | javascript | {
"resource": ""
} |
q25002 | parseToZeroOrNumber | train | function parseToZeroOrNumber(value, toFloat) {
var num = toFloat ? parseFloat(value) : parseInt(value, 10);
return isNaN(num) ? 0 : num;
} | javascript | {
"resource": ""
} |
q25003 | getTextareaInfo | train | function getTextareaInfo() {
//read needed values
var textareaCursorPosition = _targetElementNative.selectionStart;
if (textareaCursorPosition === undefined)
return;
var strLength = 'length';
var textareaValue = _targetElement.val();
var textareaLength = textareaValue[strLength];
var textareaRowSplit = textareaValue.split("\n");
var textareaLastRow = textareaRowSplit[strLength];
var textareaCurrentCursorRowSplit = textareaValue.substr(0, textareaCursorPosition).split("\n");
var widestRow = 0;
var textareaLastCol = 0;
var cursorRow = textareaCurrentCursorRowSplit[strLength];
var cursorCol = textareaCurrentCursorRowSplit[textareaCurrentCursorRowSplit[strLength] - 1][strLength];
var rowCols;
var i;
//get widest Row and the last column of the textarea
for (i = 0; i < textareaRowSplit[strLength]; i++) {
rowCols = textareaRowSplit[i][strLength];
if (rowCols > textareaLastCol) {
widestRow = i + 1;
textareaLastCol = rowCols;
}
}
return {
_cursorRow: cursorRow, //cursorRow
_cursorColumn: cursorCol, //cursorCol
_rows: textareaLastRow, //rows
_columns: textareaLastCol, //cols
_widestRow: widestRow, //wRow
_cursorPosition: textareaCursorPosition, //pos
_cursorMax: textareaLength //max
};
} | javascript | {
"resource": ""
} |
q25004 | generateDiv | train | function generateDiv(classesOrAttrs, content) {
return '<div ' + (classesOrAttrs ? type(classesOrAttrs) == TYPES.s ?
'class="' + classesOrAttrs + '"' :
(function() {
var key;
var attrs = '';
if(FRAMEWORK.isPlainObject(classesOrAttrs)) {
for (key in classesOrAttrs)
attrs += (key === 'className' ? 'class' : key) + '="' + classesOrAttrs[key] + '" ';
}
return attrs;
})() :
_strEmpty) +
'>' +
(content ? content : _strEmpty) +
'</div>';
} | javascript | {
"resource": ""
} |
q25005 | getObjectPropVal | train | function getObjectPropVal(obj, path) {
var splits = path.split(_strDot);
var i = 0;
var val;
for(; i < splits.length; i++) {
if(!obj.hasOwnProperty(splits[i]))
return;
val = obj[splits[i]];
if(i < splits.length && type(val) == TYPES.o)
obj = val;
}
return val;
} | javascript | {
"resource": ""
} |
q25006 | setObjectPropVal | train | function setObjectPropVal(obj, path, val) {
var splits = path.split(_strDot);
var splitsLength = splits.length;
var i = 0;
var extendObj = { };
var extendObjRoot = extendObj;
for(; i < splitsLength; i++)
extendObj = extendObj[splits[i]] = i + 1 < splitsLength ? { } : val;
FRAMEWORK.extend(obj, extendObjRoot, true);
} | javascript | {
"resource": ""
} |
q25007 | checkCacheDouble | train | function checkCacheDouble(current, cache, prop1, prop2, force) {
if (force === true)
return force;
if (prop2 === undefined && force === undefined) {
if (prop1 === true)
return prop1;
else
prop1 = undefined;
}
prop1 = prop1 === undefined ? 'w' : prop1;
prop2 = prop2 === undefined ? 'h' : prop2;
if (cache === undefined)
return true;
else if (current[prop1] !== cache[prop1] || current[prop2] !== cache[prop2])
return true;
return false;
} | javascript | {
"resource": ""
} |
q25008 | checkCacheTRBL | train | function checkCacheTRBL(current, cache) {
if (cache === undefined)
return true;
else if (current.t !== cache.t ||
current.r !== cache.r ||
current.b !== cache.b ||
current.l !== cache.l)
return true;
return false;
} | javascript | {
"resource": ""
} |
q25009 | validGh | train | function validGh (opts) {
if (!opts.ghrelease) {
return Promise.resolve(true)
}
if (!opts.ghtoken) {
return Promise.reject(new Error('Missing GitHub access token. ' +
'Have you set `AEGIR_GHTOKEN`?'))
}
return Promise.resolve()
} | javascript | {
"resource": ""
} |
q25010 | isDirty | train | function isDirty () {
return pify(git.raw.bind(git))(['status', '-s'])
.then((out) => {
if (out && out.trim().length > 0) {
throw new Error('Dirty git repo, aborting')
}
})
} | javascript | {
"resource": ""
} |
q25011 | isOcspValidationDisabled | train | function isOcspValidationDisabled(host)
{
// ocsp is disabled if insecure-connect is enabled, or if we've disabled ocsp
// for non-snowflake endpoints and the host is a non-snowflake endpoint
return GlobalConfig.isInsecureConnect() || (Parameters.getValue(
Parameters.names.JS_DRIVER_DISABLE_OCSP_FOR_NON_SF_ENDPOINTS) &&
!REGEX_SNOWFLAKE_ENDPOINT.test(host));
} | javascript | {
"resource": ""
} |
q25012 | validateCertChain | train | function validateCertChain(cert, cb)
{
// walk up the certificate chain and collect all the certificates in an array
var certs = [];
while (cert && cert.issuerCertificate &&
(cert.fingerprint !== cert.issuerCertificate.fingerprint))
{
certs.push(cert);
cert = cert.issuerCertificate;
}
// create an array to store any errors encountered
// while validating the certificate chain
var errors = new Array(certs.length);
/**
* Called for every certificate as we traverse the certificate chain and
* validate each one.
*
* @param certs
* @param index
*/
var eachCallback = function(certs, index)
{
var cert = certs[index];
validateCert(cert, function(err, data)
{
completed++;
errors[index] = err;
// if we have an ocsp response, cache it
if (data)
{
getOcspResponseCache().set(cert, data);
}
// if this is the last request to complete
if (completed === certs.length)
{
// if we saw one or more errors, invoke the callback with the first
// error we saw; otherwise invoke the callback without any error
for (var errorIndex = 0, length = errors.length;
errorIndex < length; errorIndex++)
{
var error = errors[errorIndex];
if (error)
{
break;
}
}
cb(error);
}
});
};
// fire off requests to validate all the certificates in the chain
var completed = 0;
for (var index = 0, length = certs.length; index < length; index++)
{
eachCallback(certs, index);
}
} | javascript | {
"resource": ""
} |
q25013 | train | function(certs, index)
{
var cert = certs[index];
validateCert(cert, function(err, data)
{
completed++;
errors[index] = err;
// if we have an ocsp response, cache it
if (data)
{
getOcspResponseCache().set(cert, data);
}
// if this is the last request to complete
if (completed === certs.length)
{
// if we saw one or more errors, invoke the callback with the first
// error we saw; otherwise invoke the callback without any error
for (var errorIndex = 0, length = errors.length;
errorIndex < length; errorIndex++)
{
var error = errors[errorIndex];
if (error)
{
break;
}
}
cb(error);
}
});
} | javascript | {
"resource": ""
} | |
q25014 | validateCert | train | function validateCert(cert, cb)
{
// if we already have an entry in the cache, use it
var ocspResponse = getOcspResponseCache().get(cert);
if (ocspResponse)
{
process.nextTick(function()
{
Logger.getInstance().trace('Returning OCSP status for certificate %s ' +
'from cache', cert.serialNumber);
cb(null, ocspResponse);
});
}
else
{
try
{
var decoded = CertUtil.decode(cert);
}
catch(e)
{
process.nextTick(function()
{
cb(e);
});
}
// issue a request to get the ocsp status
check(decoded, cb);
}
} | javascript | {
"resource": ""
} |
q25015 | train | function (url, paramName, paramValue)
{
// if the specified url is valid
var urlAsObject = Url.parse(url);
if (urlAsObject)
{
// if the url already has query parameters, use '&' as the separator
// when appending the additional query parameter, otherwise use '?'
url += (urlAsObject.search ? '&' : '?') + paramName + '=' + paramValue;
}
return url;
} | javascript | {
"resource": ""
} | |
q25016 | Logger | train | function Logger(options)
{
/**
* The array to which all log messages will be added.
*
* @type {String[]}
*/
var buffer = [];
/**
* Logs a message at a given level.
*
* @param {String} levelTag the tag associated with the level at which to log
* the message.
* @param {String} message the message to log.
* @param {Number} bufferMaxLength the maximum size to which the message
* buffer can grow.
*/
var logMessage = function(levelTag, message, bufferMaxLength)
{
// add the log level tag (e.g. info, warn, etc.) to the front of the message
message = Util.format('%s: %s', levelTag, message);
// if the buffer is full, evict old messages
while (buffer.length >= bufferMaxLength)
{
buffer.shift();
}
// add the new message to the buffer
buffer.push(message);
};
// create an inner implementation to which all our methods will be forwarded
var common = Core.createLogger(options, logMessage);
/**
* Configures this logger.
*
* @param {Object} options
*/
this.configure = function(options)
{
common.configure(options);
};
/**
* Returns the current log level.
*
* @returns {Number}
*/
this.getLevel = function()
{
return common.getLevelNumber();
};
/**
* Logs a given message at the error level.
*
* @param {String} message
*/
this.error = function(message)
{
common.error.apply(common, arguments);
};
/**
* Logs a given message at the warning level.
*
* @param {String} message
*/
this.warn = function(message)
{
common.warn.apply(common, arguments);
};
/**
* Logs a given message at the info level.
*
* @param {String} message
*/
this.info = function(message)
{
common.info.apply(common, arguments);
};
/**
* Logs a given message at the debug level.
*
* @param {String} message
*/
this.debug = function(message)
{
common.debug.apply(common, arguments);
};
/**
* Logs a given message at the trace level.
*
* @param {String} message
*/
this.trace = function(message)
{
common.trace.apply(common, arguments);
};
/**
* Returns the log buffer.
*
* @returns {String[]}
*/
this.getLogBuffer = function()
{
// return a copy of the buffer array; calling slice() shallow-copies the
// original array, but that's sufficient in this case because the array
// contains strings
return buffer.slice();
};
} | javascript | {
"resource": ""
} |
q25017 | ConnectionContext | train | function ConnectionContext(connectionConfig, httpClient, config)
{
// validate input
Errors.assertInternal(Util.isObject(connectionConfig));
Errors.assertInternal(Util.isObject(httpClient));
// if a config object was specified, verify
// that it has all the information we need
var sfServiceConfig;
if (Util.exists(config))
{
Errors.assertInternal(Util.isObject(config));
Errors.assertInternal(Util.isObject(config.services));
Errors.assertInternal(Util.isObject(config.services.sf));
sfServiceConfig = config.services.sf;
}
// create a map that contains all the services we'll be using
var services =
{
sf: new SfService(connectionConfig, httpClient, sfServiceConfig),
largeResultSet: new LargeResultSetService(connectionConfig, httpClient)
};
/**
* Returns the ConnectionConfig for use by the connection.
*
* @returns {ConnectionConfig}
*/
this.getConnectionConfig = function()
{
return connectionConfig;
};
/**
* Returns a map that contains all the available services.
*
* @returns {Object}
*/
this.getServices = function()
{
return services;
};
/**
* Returns a configuration object that can be passed as an optional argument
* to the ConnectionContext constructor to create a new object that has the
* same state as this ConnectionContext instance.
*
* @returns {Object}
*/
this.getConfig = function()
{
return {
services:
{
sf: services.sf.getConfig()
}
};
};
} | javascript | {
"resource": ""
} |
q25018 | SfTimestamp | train | function SfTimestamp(epochSeconds, nanoSeconds, scale, timezone, format)
{
// pick reasonable defaults for the inputs if needed
epochSeconds = Util.isNumber(epochSeconds) ? epochSeconds : 0;
nanoSeconds = Util.isNumber(nanoSeconds) ? nanoSeconds : 0;
scale = Util.isNumber(scale) ? scale : 0;
format = Util.isString(format) ? format : '';
// save any information we'll need later
this.epochSeconds = epochSeconds;
this.nanoSeconds = nanoSeconds;
this.scale = scale;
this.timezone = timezone;
this.format = format;
// compute the epoch milliseconds and create a moment object from them
var moment = Moment((epochSeconds * 1000) + (nanoSeconds / 1000000));
// set the moment's timezone
if (Util.isString(timezone))
{
moment = moment.tz(timezone);
}
else if (Util.isNumber(timezone))
{
moment = moment.utcOffset(timezone);
}
// save the moment
this.moment = moment;
} | javascript | {
"resource": ""
} |
q25019 | Core | train | function Core(options)
{
// validate input
Errors.assertInternal(Util.isObject(options));
Errors.assertInternal(
Util.exists(options.httpClient || options.httpClientClass));
Errors.assertInternal(Util.exists(options.loggerClass));
// set the logger instance
Logger.setInstance(new (options.loggerClass)());
// if a connection class is specified, it must be an object or function
var connectionClass = options.connectionClass;
if (Util.exists(connectionClass))
{
Errors.assertInternal(
Util.isObject(connectionClass) || Util.isFunction(connectionClass));
}
else
{
// fall back to Connection
connectionClass = Connection;
}
var qaMode = options.qaMode;
var clientInfo = options.client;
/**
* Creates a new Connection instance.
*
* @param {Object} connectionOptions
* @param {Object} [config]
*
* @returns {Object}
*/
var createConnection = function createConnection(connectionOptions, config)
{
// create a new ConnectionConfig and skip credential-validation if a config
// object has been specified; this is because if a config object has been
// specified, we're trying to deserialize a connection and the account name,
// username and password don't need to be specified because the config
// object already contains the tokens we need
var connectionConfig =
new ConnectionConfig(connectionOptions, !config, qaMode, clientInfo);
// if an http client was specified in the options passed to the module, use
// it, otherwise create a new HttpClient
var httpClient = options.httpClient ||
new options.httpClientClass(connectionConfig);
return new connectionClass(
new ConnectionContext(connectionConfig, httpClient, config));
};
var instance =
{
/**
* Creates a connection object that can be used to communicate with
* Snowflake.
*
* @param {Object} options
*
* @returns {Object}
*/
createConnection: function(options)
{
return createConnection(options);
},
/**
* Deserializes a serialized connection.
*
* @param {Object} options
* @param {String} serializedConnection
*
* @returns {Object}
*/
deserializeConnection: function(options, serializedConnection)
{
// check for missing serializedConfig
Errors.checkArgumentExists(Util.exists(serializedConnection),
ErrorCodes.ERR_CONN_DESERIALIZE_MISSING_CONFIG);
// check for invalid serializedConfig
Errors.checkArgumentValid(Util.isString(serializedConnection),
ErrorCodes.ERR_CONN_DESERIALIZE_INVALID_CONFIG_TYPE);
// try to json-parse serializedConfig
var config;
try
{
config = JSON.parse(serializedConnection);
}
catch (err) {}
finally
{
// if serializedConfig can't be parsed to json, throw an error
Errors.checkArgumentValid(Util.isObject(config),
ErrorCodes.ERR_CONN_DESERIALIZE_INVALID_CONFIG_FORM);
}
return createConnection(options, config);
},
/**
* Serializes a given connection.
*
* @param {Object} connection
*
* @returns {String} a serialized version of the connection.
*/
serializeConnection: function(connection)
{
return connection ? connection.serialize() : connection;
},
/**
* Configures this instance of the Snowflake core module.
*
* @param {Object} options
*/
configure: function(options)
{
var logTag = options.logLevel;
if (Util.exists(logTag))
{
// check that the specified value is a valid tag
Errors.checkArgumentValid(LoggerCore.isValidLogTag(logTag),
ErrorCodes.ERR_GLOGAL_CONFIGURE_INVALID_LOG_LEVEL);
Logger.getInstance().configure(
{
level: LoggerCore.logTagToLevel(logTag)
});
}
var insecureConnect = options.insecureConnect;
if (Util.exists(insecureConnect))
{
// check that the specified value is a boolean
Errors.checkArgumentValid(Util.isBoolean(insecureConnect),
ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_INSECURE_CONNECT);
GlobalConfig.setInsecureConnect(insecureConnect);
}
}
};
// add some read-only constants
var nativeTypeValues = DataTypes.NativeTypes.values;
Object.defineProperties(instance,
{
STRING : { value: nativeTypeValues.STRING },
BOOLEAN : { value: nativeTypeValues.BOOLEAN },
NUMBER : { value: nativeTypeValues.NUMBER },
DATE : { value: nativeTypeValues.DATE },
JSON : { value: nativeTypeValues.JSON }
});
return instance;
} | javascript | {
"resource": ""
} |
q25020 | train | function(options, serializedConnection)
{
// check for missing serializedConfig
Errors.checkArgumentExists(Util.exists(serializedConnection),
ErrorCodes.ERR_CONN_DESERIALIZE_MISSING_CONFIG);
// check for invalid serializedConfig
Errors.checkArgumentValid(Util.isString(serializedConnection),
ErrorCodes.ERR_CONN_DESERIALIZE_INVALID_CONFIG_TYPE);
// try to json-parse serializedConfig
var config;
try
{
config = JSON.parse(serializedConnection);
}
catch (err) {}
finally
{
// if serializedConfig can't be parsed to json, throw an error
Errors.checkArgumentValid(Util.isObject(config),
ErrorCodes.ERR_CONN_DESERIALIZE_INVALID_CONFIG_FORM);
}
return createConnection(options, config);
} | javascript | {
"resource": ""
} | |
q25021 | train | function(options)
{
var logTag = options.logLevel;
if (Util.exists(logTag))
{
// check that the specified value is a valid tag
Errors.checkArgumentValid(LoggerCore.isValidLogTag(logTag),
ErrorCodes.ERR_GLOGAL_CONFIGURE_INVALID_LOG_LEVEL);
Logger.getInstance().configure(
{
level: LoggerCore.logTagToLevel(logTag)
});
}
var insecureConnect = options.insecureConnect;
if (Util.exists(insecureConnect))
{
// check that the specified value is a boolean
Errors.checkArgumentValid(Util.isBoolean(insecureConnect),
ErrorCodes.ERR_GLOBAL_CONFIGURE_INVALID_INSECURE_CONNECT);
GlobalConfig.setInsecureConnect(insecureConnect);
}
} | javascript | {
"resource": ""
} | |
q25022 | Logger | train | function Logger(options)
{
var common;
var winstonLogger;
/**
* Logs a message at a given level.
*
* @param {String} levelTag the tag associated with the level at which to log
* the message.
* @param {String} message the message to log.
* @param {Number} bufferMaxLength the maximum size to which the message
* buffer can grow.
*/
var logMessage = function(levelTag, message, bufferMaxLength)
{
// initialize the winston logger if needed
if (!winstonLogger)
{
winstonLogger = new winston.createLogger(
{
transports:
[
new (winston.transports.Console)(),
new (winston.transports.File)({ filename: 'snowflake.log' })
],
level: common.getLevelTag(),
levels: common.getLevelTagsMap()
});
}
// get the appropriate logging method using the level tag and use this
// method to log the message
winstonLogger[levelTag](message);
};
// create an inner implementation to which all our methods will be forwarded
common = Core.createLogger(options, logMessage);
/**
* Configures this logger.
*
* @param {Object} options
*/
this.configure = function(options)
{
common.configure(options);
};
/**
* Returns the current log level.
*
* @returns {Number}
*/
this.getLevel = function()
{
return common.getLevelNumber();
};
/**
* Logs a given message at the error level.
*
* @param {String} message
*/
this.error = function(message)
{
common.error.apply(common, arguments);
};
/**
* Logs a given message at the warning level.
*
* @param {String} message
*/
this.warn = function(message)
{
common.warn.apply(common, arguments);
};
/**
* Logs a given message at the info level.
*
* @param {String} message
*/
this.info = function(message)
{
common.info.apply(common, arguments);
};
/**
* Logs a given message at the debug level.
*
* @param {String} message
*/
this.debug = function(message)
{
common.debug.apply(common, arguments);
};
/**
* Logs a given message at the trace level.
*
* @param {String} message
*/
this.trace = function(message)
{
common.trace.apply(common, arguments);
};
/**
* Returns the log buffer.
*
* @returns {String[]}
*/
this.getLogBuffer = function()
{
return common.getLogBuffer();
};
} | javascript | {
"resource": ""
} |
q25023 | createParameters | train | function createParameters()
{
var isNonNegativeInteger = Util.number.isNonNegativeInteger.bind(Util.number);
var isPositiveInteger = Util.number.isPositiveInteger.bind(Util.number);
var isNonNegativeNumber = Util.number.isNonNegative.bind(Util.number);
return [
{
name : PARAM_TIMEOUT,
defaultValue : 90 * 1000,
external : true,
validate : isPositiveInteger
},
{
name : PARAM_RESULT_PREFETCH,
defaultValue : 2,
external : true,
validate : isPositiveInteger
},
{
name : PARAM_RESULT_STREAM_INTERRUPTS,
defaultValue : 3,
validate : isPositiveInteger
},
// for now we set chunk cache size to 1, which is same as
// disabling the chunk cache. Otherwise, cache will explode
// memory when fetching large result set
{
name : PARAM_RESULT_CHUNK_CACHE_SIZE,
defaultValue : 1,
validate : isPositiveInteger
},
{
name : PARAM_RESULT_PROCESSING_BATCH_SIZE,
defaultValue : 1000,
validate : isPositiveInteger
},
{
name : PARAM_RESULT_PROCESSING_BATCH_DURATION,
defaultValue : 100,
validate : isPositiveInteger
},
{
name : PARAM_ROW_STREAM_HIGH_WATER_MARK,
defaultValue : 10,
validate : isPositiveInteger
},
{
name : PARAM_RETRY_LARGE_RESULT_SET_MAX_NUM_RETRIES,
defaultValue : 10,
validate : isNonNegativeInteger
},
{
name : PARAM_RETRY_LARGE_RESULT_SET_MAX_SLEEP_TIME,
defaultValue : 16,
validate : isNonNegativeInteger
},
{
name : PARAM_RETRY_SF_MAX_NUM_RETRIES,
defaultValue : 1000,
validate : isNonNegativeInteger
},
{
name : PARAM_RETRY_SF_STARTING_SLEEP_TIME,
defaultValue : 0.25,
validate : isNonNegativeNumber
},
{
name : PARAM_RETRY_SF_MAX_SLEEP_TIME,
defaultValue : 16,
validate : isNonNegativeNumber
}
];
} | javascript | {
"resource": ""
} |
q25024 | train | function(options)
{
var localIncludeTimestamp;
var localBufferMaxLength;
var localMessageMaxLength;
var localLevel;
// if an options argument is specified
if (Util.exists(options))
{
// make sure it's an object
Errors.assertInternal(Util.isObject(options));
localIncludeTimestamp = options.includeTimestamp;
localBufferMaxLength = options.bufferMaxLength;
localMessageMaxLength = options.messageMaxLength;
localLevel = options.level;
}
// if an includeTimestamp options is specified, convert it to a boolean
if (Util.exists(localIncludeTimestamp))
{
includeTimestamp = !!localIncludeTimestamp;
}
else if (!Util.exists(includeTimestamp))
{
// default to true
includeTimestamp = true;
}
// if a bufferMaxLength option is specified, make sure
// it's a positive integer before updating the logger option
if (Util.exists(localBufferMaxLength))
{
Errors.assertInternal(
Util.number.isPositiveInteger(localBufferMaxLength));
bufferMaxLength = localBufferMaxLength;
}
// initialize logger option if configure() hasn't been called before
else if (!Util.exists(bufferMaxLength))
{
bufferMaxLength = DEFAULT_BUFFER_MAX_LENGTH;
}
// if a messageMaxLength option is specified, make sure
// it's a positive integer before updating the logger option
if (Util.exists(localMessageMaxLength))
{
Errors.assertInternal(
Util.number.isPositiveInteger(localMessageMaxLength));
messageMaxLength = localMessageMaxLength;
}
// initialize logger option if configure() hasn't been called before
else if (!Util.exists(messageMaxLength))
{
messageMaxLength = DEFAULT_MESSAGE_MAX_LENGTH;
}
// if a level option is specified, make sure
// it's valid before updating the logger option
if (Util.exists(localLevel))
{
Errors.assertInternal(
MAP_LOG_LEVEL_TO_OBJECT.hasOwnProperty(localLevel));
currlevelObject = MAP_LOG_LEVEL_TO_OBJECT[localLevel];
}
// initialize logger option if configure() hasn't been called before
else if (!Util.exists(currlevelObject))
{
currlevelObject = DEFAULT_LEVEL;
}
} | javascript | {
"resource": ""
} | |
q25025 | Parameter | train | function Parameter(options)
{
// validate input
Errors.assertInternal(Util.isObject(options));
Errors.assertInternal(Util.isString(options.name));
Errors.assertInternal(Util.exists(options.value));
var name = options.name;
var value = options.value;
/**
* Returns the name of the parameter.
*
* @returns {String}
*/
this.getName = function ()
{
return name;
};
/**
* Returns the value of the parameter.
*
* @returns {*}
*/
this.getValue = function ()
{
return value;
};
/**
* Updates the value of the parameter.
*
* @param {*} targetValue
*/
this.setValue = function (targetValue)
{
value = targetValue;
};
} | javascript | {
"resource": ""
} |
q25026 | createError | train | function createError(name, options)
{
// TODO: validate that name is a string and options is an object
// TODO: this code is a bit of a mess and needs to be cleaned up
// create a new error
var error = new Error();
// set its name
error.name = name;
// set the error code
var code;
error.code = code = options.code;
// if no error message was specified in the options
var message = options.message;
if (!message)
{
// use the error code to get the error message template
var messageTemplate = errorMessages[code];
// if some error message arguments were specified, substitute them into the
// error message template to get the full error message, otherwise just use
// the error message template as the error message
var messageArgs = options.messageArgs;
if (messageArgs)
{
messageArgs = messageArgs.slice();
messageArgs.unshift(messageTemplate);
message = Util.format.apply(Util, messageArgs);
}
else
{
message = messageTemplate;
}
}
error.message = message;
// if no sql state was specified in the options, use the error code to try to
// get the appropriate sql state
var sqlState = options.sqlState;
if (!sqlState)
{
sqlState = errCodeToSqlState[code];
}
error.sqlState = sqlState;
// set the error data
error.data = options.data;
// set the error response and response body
error.response = options.response;
error.responseBody = options.responseBody;
// set the error cause
error.cause = options.cause;
// set the error's fatal flag
error.isFatal = options.isFatal;
// if the error is not synchronous, add an externalize() method
if (!options.synchronous)
{
error.externalize = function(errorCode, errorMessageArgs, sqlState)
{
var propNames =
[
'name',
'code',
'message',
'sqlState',
'data',
'response',
'responseBody',
'cause',
'isFatal',
'stack'
];
var externalizedError = new Error();
var propName, propValue;
for (var index = 0, length = propNames.length; index < length; index++)
{
propName = propNames[index];
propValue = this[propName];
if (Util.exists(propValue))
{
externalizedError[propName] = propValue;
}
}
return externalizedError;
};
}
return error;
} | javascript | {
"resource": ""
} |
q25027 | OcspResponseCache | train | function OcspResponseCache(capacity, maxAge)
{
// validate input
Errors.assertInternal(Util.number.isPositiveInteger(capacity));
Errors.assertInternal(Util.number.isPositiveInteger(maxAge));
// create a cache to store the responses
var cache = new SimpleCache({ maxSize: capacity });
/**
* Adds an entry to the cache.
*
* @param cert
* @param response
*/
this.set = function set(cert, response)
{
// for the value, use an object that contains the response
// as well as the time at which the response was saved
cache.set(CertUtil.buildCertId(cert),
{
response : response,
savedAt : Date.now()
});
};
/**
* Returns an entry from the cache.
*
* @param cert
* @returns {*}
*/
this.get = function get(cert)
{
// build the certificate id
var certId = CertUtil.buildCertId(cert);
// if we have an entry in the cache
var value = cache.get(certId);
if (value)
{
var response = value.response;
var savedAt = value.savedAt;
var now = Date.now();
// if the cache entry has expired, or if the current time doesn't fall
// within the response validity range, remove the entry from the cache
if (((now - savedAt) > maxAge) ||
(response.thisUpdate > now) || (response.nextUpdate < now))
{
cache.del(certId);
response = null;
}
}
return response;
};
} | javascript | {
"resource": ""
} |
q25028 | ResultStream | train | function ResultStream(options)
{
// options should be an object
Errors.assertInternal(Util.isObject(options));
var chunks = options.chunks;
var prefetchSize = options.prefetchSize;
// chunks should be an array
Errors.assertInternal(Util.isArray(chunks));
// prefetch size should be non-negative
Errors.assertInternal(Util.isNumber(prefetchSize) && (prefetchSize >= 0));
// start with the first chunk
var start = 0;
var self = this;
/**
* Called when a chunk fires a 'loadcomplete' event.
*
* @param {Error} err
* @param {Chunk} chunk
*/
var onLoadComplete = function(err, chunk)
{
// unsubscribe from the 'loadcomplete' event
chunk.removeListener('loadcomplete', onLoadComplete);
// if the chunk load succeeded
if (!err)
{
// move on to the next chunk
start++;
// emit an event to signal that new data is available
self.emit('data', chunk);
}
else
{
// close the stream with an error; also, include a callback when emitting
// the event in case someone wants to fix the problem and ask us to
// continue from where we got interrupted
close(self, err, doLoad);
}
};
/**
* Identifies the next chunk to load and issues requests to fetch both its
* contents plus the contents of the next few chunks. If there are no more
* chunks to load, a 'close' event is fired on the stream to notify
* subscribers that all the chunks have been successfully read.
*/
var doLoad = function()
{
// get the array of chunks whose contents need to be fetched
var buffer = chunks.slice(start, start + prefetchSize + 1);
// the first chunk in the buffer is the next chunk we want to load
var nextChunk = buffer[0];
// if we don't have anymore chunks to load, we're done
if (!nextChunk)
{
self.asyncClose();
}
else
{
// fire off requests to load all the chunks in the buffer that aren't
// already loading
var chunk, index, length;
for (index = 0, length = buffer.length; index < length; index++)
{
chunk = buffer[index];
if (!chunk.isLoading())
{
chunk.load();
}
}
// subscribe to the loadcomplete event on the next chunk
nextChunk.on('loadcomplete', onLoadComplete);
}
};
/**
* Reads the next chunk of data in the result stream.
*/
this.read = function()
{
// TODO: if there are no more chunks to read, should we raise an error?
// TODO: what if we're already in the middle of a read?
// read the next chunk
doLoad();
};
} | javascript | {
"resource": ""
} |
q25029 | train | function(err, chunk)
{
// unsubscribe from the 'loadcomplete' event
chunk.removeListener('loadcomplete', onLoadComplete);
// if the chunk load succeeded
if (!err)
{
// move on to the next chunk
start++;
// emit an event to signal that new data is available
self.emit('data', chunk);
}
else
{
// close the stream with an error; also, include a callback when emitting
// the event in case someone wants to fix the problem and ask us to
// continue from where we got interrupted
close(self, err, doLoad);
}
} | javascript | {
"resource": ""
} | |
q25030 | train | function()
{
// get the array of chunks whose contents need to be fetched
var buffer = chunks.slice(start, start + prefetchSize + 1);
// the first chunk in the buffer is the next chunk we want to load
var nextChunk = buffer[0];
// if we don't have anymore chunks to load, we're done
if (!nextChunk)
{
self.asyncClose();
}
else
{
// fire off requests to load all the chunks in the buffer that aren't
// already loading
var chunk, index, length;
for (index = 0, length = buffer.length; index < length; index++)
{
chunk = buffer[index];
if (!chunk.isLoading())
{
chunk.load();
}
}
// subscribe to the loadcomplete event on the next chunk
nextChunk.on('loadcomplete', onLoadComplete);
}
} | javascript | {
"resource": ""
} | |
q25031 | invokeStatementComplete | train | function invokeStatementComplete(statement, context)
{
// find out if the result will be streamed;
// if a value is not specified, get it from the connection
var streamResult = context.streamResult;
if (!Util.exists(streamResult))
{
streamResult = context.connectionConfig.getStreamResult();
}
// if the result will be streamed later,
// invoke the complete callback right away
if (streamResult)
{
context.complete(Errors.externalize(context.resultError), statement);
}
else
{
process.nextTick(function()
{
// aggregate all the rows into an array and pass this
// array to the complete callback as the last argument
var rows = [];
statement.streamRows()
.on('data', function(row)
{
rows.push(row);
})
.on('end', function()
{
context.complete(null, statement, rows);
})
.on('error', function(err)
{
context.complete(Errors.externalize(err), statement);
});
});
}
} | javascript | {
"resource": ""
} |
q25032 | createOnStatementRequestSuccRow | train | function createOnStatementRequestSuccRow(statement, context)
{
return function(body)
{
// if we don't already have a result
if (!context.result)
{
// build a result from the response
context.result = new Result(
{
response : body,
statement : statement,
services : context.services,
connectionConfig : context.connectionConfig
});
// save the statement id
context.statementId = context.result.getStatementId();
}
else
{
// refresh the existing result
context.result.refresh(body);
}
// only update the parameters if the statement isn't a post-exec statement
if (context.type !== statementTypes.ROW_POST_EXEC)
{
Parameters.update(context.result.getParametersArray());
}
};
} | javascript | {
"resource": ""
} |
q25033 | FileStatementPreExec | train | function FileStatementPreExec(
statementOptions, context, services, connectionConfig)
{
// call super
BaseStatement.apply(this, arguments);
// add the result request headers to the context
context.resultRequestHeaders = buildResultRequestHeadersFile();
/**
* Called when the statement request is successful.
*
* @param {Object} body
*/
context.onStatementRequestSucc = function(body)
{
context.fileMetadata = body;
};
/**
* Returns the file metadata generated by the statement.
*
* @returns {Object}
*/
this.getFileMetadata = function()
{
return context.fileMetadata;
};
// send a request to execute the file statement
sendRequestPreExec(context, context.onStatementRequestComp);
} | javascript | {
"resource": ""
} |
q25034 | RowStatementPostExec | train | function RowStatementPostExec(
statementOptions, context, services, connectionConfig)
{
// call super
BaseStatement.apply(this, arguments);
// add the result request headers to the context
context.resultRequestHeaders = buildResultRequestHeadersRow();
/**
* Called when the statement request is successful.
*
* @param {Object} body
*/
context.onStatementRequestSucc =
createOnStatementRequestSuccRow(this, context);
/**
* Fetches the rows in this statement's result and invokes the each()
* callback on each row. If startIndex and endIndex values are specified, the
* each() callback will only be invoked on rows in the requested range. The
* end() callback will be invoked when either all the requested rows have been
* successfully processed, or if an error was encountered while trying to
* fetch the requested rows.
*
* @param {Object} options
*/
this.fetchRows = createFnFetchRows(this, context);
/**
* Streams the rows in this statement's result. If start and end values are
* specified, only rows in the specified range are streamed.
*
* @param {Object} options
*/
this.streamRows = createFnStreamRows(this, context);
// send a request to fetch the result
sendRequestPostExec(context, context.onStatementRequestComp);
} | javascript | {
"resource": ""
} |
q25035 | createFnStreamRows | train | function createFnStreamRows(statement, context)
{
return function(options)
{
// if some options are specified
if (Util.exists(options))
{
// check for invalid options
Errors.checkArgumentValid(Util.isObject(options),
ErrorCodes.ERR_STMT_FETCH_ROWS_INVALID_OPTIONS);
// check for invalid start
if (Util.exists(options.start))
{
Errors.checkArgumentValid(Util.isNumber(options.start),
ErrorCodes.ERR_STMT_STREAM_ROWS_INVALID_START);
}
// check for invalid end
if (Util.exists(options.end))
{
Errors.checkArgumentValid(Util.isNumber(options.end),
ErrorCodes.ERR_STMT_STREAM_ROWS_INVALID_END);
}
// check for invalid fetchAsString
var fetchAsString = options.fetchAsString;
if (Util.exists(fetchAsString))
{
// check that the value is an array
Errors.checkArgumentValid(Util.isArray(fetchAsString),
ErrorCodes.ERR_STMT_STREAM_ROWS_INVALID_FETCH_AS_STRING);
// check that all the array elements are valid
var invalidValueIndex = NativeTypes.findInvalidValue(fetchAsString);
Errors.checkArgumentValid(invalidValueIndex === -1,
ErrorCodes.ERR_STMT_STREAM_ROWS_INVALID_FETCH_AS_STRING_VALUES,
JSON.stringify(fetchAsString[invalidValueIndex]));
}
}
return new RowStream(statement, context, options);
};
} | javascript | {
"resource": ""
} |
q25036 | fetchRowsFromResult | train | function fetchRowsFromResult(options, statement, context)
{
var numInterrupts = 0;
// forward to the result to get a FetchRowsOperation object
var operation = context.result.fetchRows(options);
// subscribe to the operation's 'complete' event
operation.on('complete', function(err, continueCallback)
{
// we want to retry if the error is retryable and the
// result stream hasn't been closed too many times
if (Errors.isLargeResultSetError(err) && err.response &&
(err.response.statusCode === 403) &&
(numInterrupts <
context.connectionConfig.getResultStreamInterrupts()))
{
// increment the interrupt counter
numInterrupts++;
// issue a request to fetch the result again
sendRequestPostExec(context, function(err, body)
{
// refresh the result
context.onStatementRequestComp(err, body);
// if there was no error, continue from where we got interrupted
if (!err)
{
continueCallback();
}
});
}
else
{
endFetchRows(options, statement, context);
}
});
} | javascript | {
"resource": ""
} |
q25037 | sendCancelStatement | train | function sendCancelStatement(statementContext, statement, callback)
{
var url;
var json;
// use different rest endpoints based on whether the statement id is available
if (statementContext.statementId)
{
url = '/queries/' + statementContext.statementId + '/abort-request';
}
else
{
url = '/queries/v1/abort-request';
json =
{
requestId: statementContext.requestId
};
}
// issue a request to cancel the statement
statementContext.services.sf.request(
{
method : 'POST',
url : url,
json : json,
callback : function(err)
{
// if a callback was specified, invoke it
if (Util.isFunction(callback))
{
callback(Errors.externalize(err), statement);
}
}
});
} | javascript | {
"resource": ""
} |
q25038 | sendRequestPreExec | train | function sendRequestPreExec(statementContext, onResultAvailable)
{
// get the request headers
var headers = statementContext.resultRequestHeaders;
// build the basic json for the request
var json =
{
disableOfflineChunks : false,
sqlText : statementContext.sqlText
};
// if binds are specified, build a binds map and include it in the request
if (Util.exists(statementContext.binds))
{
json.bindings = buildBindsMap(statementContext.binds);
}
// include statement parameters if a value was specified
if (Util.exists(statementContext.parameters))
{
json.parameters = statementContext.parameters;
}
// include the internal flag if a value was specified
if (Util.exists(statementContext.internal))
{
json.isInternal = statementContext.internal;
}
// use the snowflake service to issue the request
sendSfRequest(statementContext,
{
method : 'POST',
headers : headers,
url : Url.format(
{
pathname : '/queries/v1/query-request',
search : QueryString.stringify(
{
requestId: statementContext.requestId
})
}),
json: json,
callback: buildResultRequestCallback(
statementContext, headers, onResultAvailable)
},
true);
} | javascript | {
"resource": ""
} |
q25039 | buildBindsMap | train | function buildBindsMap(bindsArray)
{
var bindsMap = {};
var isArrayBinding = bindsArray.length >0 && Util.isArray(bindsArray[0]);
var singleArray = isArrayBinding ? bindsArray[0] : bindsArray;
for (var index = 0, length = singleArray.length; index < length; index++)
{
var value = singleArray[index];
// pick the appropriate logical data type based on the bind value
var type;
if (Util.isBoolean(value))
{
type = 'BOOLEAN';
}
else if (Util.isObject(value) || Util.isArray(value))
{
type = 'VARIANT';
}
else if (Util.isNumber(value))
{
type = 'REAL';
}
else
{
type = 'TEXT';
}
// convert non-null values to a string if necessary; we don't convert null
// because the client might want to run something like
// sql text = update t set name = :1 where id = 1;, binds = [null]
// and converting null to a string would result in us executing
// sql text = update t set name = 'null' where id = 1;
// instead of
// sql text = update t set name = null where id = 1;
if (!isArrayBinding)
{
if (value !== null && !Util.isString(value))
{
value = JSON.stringify(value);
}
}
else
{
value = [];
for (var rowIndex = 0; rowIndex < bindsArray.length; rowIndex++)
{
var value0 = bindsArray[rowIndex][index];
if (value0 !== null && !Util.isString(value0))
{
value0 = JSON.stringify(value0);
}
value.push(value0);
}
}
// add an entry for the bind variable to the map
bindsMap[index + 1] =
{
type : type,
value : value
};
}
return bindsMap;
} | javascript | {
"resource": ""
} |
q25040 | sendRequestPostExec | train | function sendRequestPostExec(statementContext, onResultAvailable)
{
// get the request headers
var headers = statementContext.resultRequestHeaders;
// use the snowflake service to issue the request
sendSfRequest(statementContext,
{
method : 'GET',
headers: headers,
url : Url.format(
{
pathname: '/queries/' + statementContext.statementId + '/result',
search : QueryString.stringify(
{
disableOfflineChunks: false
})
}),
callback: buildResultRequestCallback(
statementContext, headers, onResultAvailable)
});
} | javascript | {
"resource": ""
} |
q25041 | sendSfRequest | train | function sendSfRequest(statementContext, options, appendQueryParamOnRetry)
{
var sf = statementContext.services.sf;
var connectionConfig = statementContext.connectionConfig;
// clone the options
options = Util.apply({}, options);
// get the original url and callback
var urlOrig = options.url;
var callbackOrig = options.callback;
var numRetries = 0;
var maxNumRetries = connectionConfig.getRetrySfMaxNumRetries();
var sleep = connectionConfig.getRetrySfStartingSleepTime();
// create a function to send the request
var sendRequest = function sendRequest()
{
// if this is a retry and a query parameter should be appended to the url on
// retry, update the url
if ((numRetries > 0) && appendQueryParamOnRetry)
{
options.url = Util.url.appendParam(urlOrig, 'retry', true);
}
sf.request(options);
};
// replace the specified callback with a new one that retries
options.callback = function(err)
{
// if we haven't exceeded the maximum number of retries yet and the server
// came back with a retryable error code
if (numRetries < maxNumRetries &&
err && Util.isRetryableHttpError(
err.response, false // no retry for HTTP 403
))
{
// increment the retry count
numRetries++;
// use exponential backoff with decorrelated jitter to compute the
// next sleep time.
var cap = connectionConfig.getRetrySfMaxSleepTime();
sleep = Util.nextSleepTime(1, cap, sleep);
Logger.getInstance().debug(
'Retrying statement with request id %s, retry count = %s',
statementContext.requestId, numRetries);
// wait the appropriate amount of time before retrying the request
setTimeout(sendRequest, sleep * 1000);
}
else
{
// invoke the original callback
callbackOrig.apply(this, arguments);
}
};
// issue the request
sendRequest();
} | javascript | {
"resource": ""
} |
q25042 | buildResultRequestCallback | train | function buildResultRequestCallback(
statementContext, headers, onResultAvailable)
{
var callback = function(err, body)
{
// if the result is not ready yet, extract the result url from the response
// and issue a GET request to try to fetch the result again
if (!err && body && (body.code === '333333' || body.code === '333334'))
{
// extract the statement id from the response and save it
statementContext.statementId = body.data.queryId;
// extract the result url from the response and try to get the result
// again
sendSfRequest(statementContext,
{
method : 'GET',
headers : headers,
url : body.data.getResultUrl,
callback : callback
});
}
else
{
onResultAvailable.call(null, err, body);
}
};
return callback;
} | javascript | {
"resource": ""
} |
q25043 | Chunk | train | function Chunk(options)
{
// make sure the options object contains all the necessary information
Errors.assertInternal(Util.isObject(options));
Errors.assertInternal(Util.isObject(options.statement));
Errors.assertInternal(Util.isObject(options.services));
Errors.assertInternal(Util.isNumber(options.startIndex));
Errors.assertInternal(Util.isArray(options.columns));
Errors.assertInternal(Util.isObject(options.mapColumnNameToIndices));
Errors.assertInternal(Util.isObject(options.statementParameters));
Errors.assertInternal(Util.isString(options.resultVersion));
Errors.assertInternal(Util.isNumber(options.rowCount));
// if the result is small (i.e. not persisted on S3/Blob), there's no
// compressed and uncompressed size, so default to -1
this._compressedSize = options.compressedSize || -1;
this._uncompressedSize = options.uncompressedSize || -1;
// copy out other information from the options object and save it
this._statement = options.statement;
this._services = options.services;
this._startIndex = options.startIndex;
this._url = options.url;
this._columns = options.columns;
this._mapColumnNameToIndices = options.mapColumnNameToIndices;
this._chunkHeaders = options.chunkHeaders;
this._rowset = options.rowset;
// use the start index and row count to compute the end index
this._endIndex = this._startIndex + options.rowCount - 1;
// use the start and end index to build an id for this chunk
this._id = buildId(this._startIndex, this._endIndex);
} | javascript | {
"resource": ""
} |
q25044 | train | function(err)
{
// we're done loading
self._isLoading = false;
// emit an event to notify subscribers
self.emit('loadcomplete', err, self);
// invoke the callback if one was specified
if (Util.isFunction(callback))
{
callback(err, self);
}
} | javascript | {
"resource": ""
} | |
q25045 | convertRowsetToRows | train | function convertRowsetToRows(
statement,
startIndex,
rowset,
columns,
mapColumnNameToIndices)
{
// assert that rowset and columns are arrays
Errors.assertInternal(Util.isArray(rowset));
Errors.assertInternal(Util.isArray(columns));
///////////////////////////////////////////////////////////////////////////
//// Create functions that will be used as row methods ////
///////////////////////////////////////////////////////////////////////////
/**
* Returns the index of this row in the result.
*
* @returns {Number}
*/
var getRowIndex = function()
{
return this.rowIndex;
};
/**
* Returns the statement that produced this row.
*
* @returns {*}
*/
var getStatement = function getStatement()
{
return statement;
};
/**
* Returns the value of a column.
*
* @param {String | Number} columnIdentifier this can be either the column
* name or the column index.
*
* @returns {*}
*/
var getColumnValue = function getColumnValue(columnIdentifier)
{
// resolve the column identifier to the correct column if possible
var column = resolveColumnIdentifierToColumn(
columns, columnIdentifier, mapColumnNameToIndices);
return column ? column.getRowValue(this) : undefined;
};
/**
* Returns the value of a column as a String.
*
* @param {String | Number} columnIdentifier this can be either the column
* name or the column index.
*
* @returns {*}
*/
var getColumnValueAsString = function getColumnValueAsString(columnIdentifier)
{
// resolve the column identifier to the correct column if possible
var column = resolveColumnIdentifierToColumn(
columns, columnIdentifier, mapColumnNameToIndices);
return column ? column.getRowValueAsString(this) : undefined;
};
///////////////////////////////////////////////////////////////////////////
//// Convert the rowset to an array of row objects ////
///////////////////////////////////////////////////////////////////////////
// create a new array to store the processed rows
var length = rowset.length;
var rows = new Array(length);
for (var index = 0; index < length; index++)
{
// add a new item to the rows array
rows[index] =
{
_arrayProcessedColumns : [],
values : rowset[index],
rowIndex : startIndex + index,
getRowIndex : getRowIndex,
getStatement : getStatement,
getColumnValue : getColumnValue,
getColumnValueAsString : getColumnValueAsString
};
}
return rows;
} | javascript | {
"resource": ""
} |
q25046 | getColumnValue | train | function getColumnValue(columnIdentifier)
{
// resolve the column identifier to the correct column if possible
var column = resolveColumnIdentifierToColumn(
columns, columnIdentifier, mapColumnNameToIndices);
return column ? column.getRowValue(this) : undefined;
} | javascript | {
"resource": ""
} |
q25047 | getColumnValueAsString | train | function getColumnValueAsString(columnIdentifier)
{
// resolve the column identifier to the correct column if possible
var column = resolveColumnIdentifierToColumn(
columns, columnIdentifier, mapColumnNameToIndices);
return column ? column.getRowValueAsString(this) : undefined;
} | javascript | {
"resource": ""
} |
q25048 | resolveColumnIdentifierToColumn | train | function resolveColumnIdentifierToColumn(
columns, columnIdentifier, mapColumnNameToIndices)
{
var columnIndex;
// if the column identifier is a string, treat it as a column
// name and use it to get the index of the specified column
if (Util.isString(columnIdentifier))
{
// if a valid column name was specified, get the index of the first column
// with the specified name
if (mapColumnNameToIndices.hasOwnProperty(columnIdentifier))
{
columnIndex = mapColumnNameToIndices[columnIdentifier][0];
}
}
// if the column identifier is a number, treat it as a column index
else if (Util.isNumber(columnIdentifier))
{
columnIndex = columnIdentifier;
}
return columns[columnIndex];
} | javascript | {
"resource": ""
} |
q25049 | LargeResultSetService | train | function LargeResultSetService(connectionConfig, httpClient)
{
// validate input
Errors.assertInternal(Util.isObject(connectionConfig));
Errors.assertInternal(Util.isObject(httpClient));
function isRetryableError(response, err)
{
// https://aws.amazon.com/articles/1904 (Handling Errors)
// Note: 403's are retried because of a bug in S3/Blob
return Util.isRetryableHttpError(
response, true // retry HTTP 403
) || err && err.code === "ECONNRESET";
}
/**
* Issues a request to get an object from S3/Blob.
*
* @param {Object} options
*/
this.getObject = function getObject(options)
{
var numRetries = 0, sleep = 1;
// get the maximum number of retries
var maxNumRetries = options.maxNumRetries;
if (!Util.exists(maxNumRetries))
{
maxNumRetries = connectionConfig.getRetryLargeResultSetMaxNumRetries();
}
Errors.assertInternal(Util.isNumber(maxNumRetries) && maxNumRetries >= 0);
// invoked when the request completes
var callback = function callback(err, response, body)
{
if (err)
{
// if we haven't exceeded the maximum number of retries yet and the
// server came back with a retryable error code.
if (numRetries < maxNumRetries && isRetryableError(response, err))
{
// increment the number of retries
numRetries++;
// use exponential backoff with decorrelated jitter to compute the
// next sleep time:
var cap = connectionConfig.getRetryLargeResultSetMaxSleepTime();
sleep = Util.nextSleepTime(1, cap, sleep);
// wait the appropriate amount of time before retrying the request
setTimeout(sendRequest, sleep * 1000);
}
else
{
// wrap the error into a network error
err = Errors.createNetworkError(
ErrorCodes.ERR_LARGE_RESULT_SET_NETWORK_COULD_NOT_CONNECT, err);
}
}
// if the response contains xml, build an S3/Blob error from the response
else if (response.getResponseHeader('Content-Type') ===
'application/xml')
{
err = Errors.createLargeResultSetError(
ErrorCodes.ERR_LARGE_RESULT_SET_RESPONSE_FAILURE, response);
}
// if we have an error, clear the body
if (err)
{
body = null;
}
// if a callback was specified, invoke it
if (Util.isFunction(options.callback))
{
options.callback(err, body);
}
};
var sendRequest = function sendRequest()
{
// issue a request to get the object from S3/Blob
httpClient.request(
{
method: 'GET',
url: options.url,
headers: options.headers,
gzip: true, // gunzip the response
appendRequestId: false,
callback: callback
});
};
sendRequest();
};
} | javascript | {
"resource": ""
} |
q25050 | callback | train | function callback(err, response, body)
{
if (err)
{
// if we haven't exceeded the maximum number of retries yet and the
// server came back with a retryable error code.
if (numRetries < maxNumRetries && isRetryableError(response, err))
{
// increment the number of retries
numRetries++;
// use exponential backoff with decorrelated jitter to compute the
// next sleep time:
var cap = connectionConfig.getRetryLargeResultSetMaxSleepTime();
sleep = Util.nextSleepTime(1, cap, sleep);
// wait the appropriate amount of time before retrying the request
setTimeout(sendRequest, sleep * 1000);
}
else
{
// wrap the error into a network error
err = Errors.createNetworkError(
ErrorCodes.ERR_LARGE_RESULT_SET_NETWORK_COULD_NOT_CONNECT, err);
}
}
// if the response contains xml, build an S3/Blob error from the response
else if (response.getResponseHeader('Content-Type') ===
'application/xml')
{
err = Errors.createLargeResultSetError(
ErrorCodes.ERR_LARGE_RESULT_SET_RESPONSE_FAILURE, response);
}
// if we have an error, clear the body
if (err)
{
body = null;
}
// if a callback was specified, invoke it
if (Util.isFunction(options.callback))
{
options.callback(err, body);
}
} | javascript | {
"resource": ""
} |
q25051 | convertRawDate | train | function convertRawDate(rawColumnValue, column, context)
{
return new SfTimestamp(
Number(rawColumnValue) * 86400, // convert to seconds
0, // no nano seconds
0, // no scale required
'UTC', // use utc as the timezone
context.format).toSfDate();
} | javascript | {
"resource": ""
} |
q25052 | convertRawTime | train | function convertRawTime(rawColumnValue, column, context)
{
var columnScale = column.getScale();
// the values might be big so use BigNumber to do arithmetic
var valFracSecsBig =
new BigNumber(rawColumnValue).times(Math.pow(10, columnScale));
return convertRawTimestampHelper(
valFracSecsBig,
columnScale,
'UTC',
context.format).toSfTime();
} | javascript | {
"resource": ""
} |
q25053 | convertRawTimestampLtz | train | function convertRawTimestampLtz(rawColumnValue, column, context)
{
var columnScale = column.getScale();
// the values might be big so use BigNumber to do arithmetic
var valFracSecsBig =
new BigNumber(rawColumnValue).times(Math.pow(10, columnScale));
// create a new snowflake date
return convertRawTimestampHelper(
valFracSecsBig,
columnScale,
context.statementParameters['TIMEZONE'],
context.format).toSfDate();
} | javascript | {
"resource": ""
} |
q25054 | convertRawTimestampTz | train | function convertRawTimestampTz(rawColumnValue, column, context)
{
var valFracSecsBig;
var valFracSecsWithTzBig;
var timezoneBig;
var timezone;
var timestampAndTZIndex;
// compute the scale factor
var columnScale = column.getScale();
var scaleFactor = Math.pow(10, columnScale);
var resultVersion = context.resultVersion;
if (resultVersion === '0' || resultVersion === undefined)
{
// the values might be big so use BigNumber to do arithmetic
valFracSecsBig =
new BigNumber(rawColumnValue).times(scaleFactor);
// for _tz, the timezone is baked into the value
valFracSecsWithTzBig = valFracSecsBig;
// extract everything but the lowest 14 bits to get the fractional seconds
valFracSecsBig =
valFracSecsWithTzBig.dividedBy(16384).floor();
// extract the lowest 14 bits to get the timezone
if (valFracSecsWithTzBig.greaterThanOrEqualTo(0))
{
timezoneBig = valFracSecsWithTzBig.modulo(16384);
}
else
{
timezoneBig =
valFracSecsWithTzBig.modulo(16384).plus(16384);
}
}
else
{
// split the value into number of seconds and timezone index
timestampAndTZIndex = rawColumnValue.split(' ');
// the values might be big so use BigNumber to do arithmetic
valFracSecsBig =
new BigNumber(timestampAndTZIndex[0]).times(scaleFactor);
timezoneBig = new BigNumber(timestampAndTZIndex[1]);
}
timezone = timezoneBig.toNumber();
// assert that timezone is valid
Errors.assertInternal(timezone >= 0 && timezone <= 2880);
// subtract 24 hours from the timezone to map [0, 48] to
// [-24, 24], and convert the result to a number
timezone = timezone - 1440;
// create a new snowflake date
return convertRawTimestampHelper(
valFracSecsBig,
columnScale,
timezone,
context.format).toSfDate();
} | javascript | {
"resource": ""
} |
q25055 | convertRawVariant | train | function convertRawVariant(rawColumnValue, column, context)
{
var ret;
// if the input is a non-empty string, convert it to a json object
if (Util.string.isNotNullOrEmpty(rawColumnValue))
{
try
{
ret = eval("(" + rawColumnValue + ")");
}
catch (parseError)
{
// TODO: log the error
// throw the error
throw parseError;
}
}
return ret;
} | javascript | {
"resource": ""
} |
q25056 | convertRawBinary | train | function convertRawBinary(rawColumnValue, column, context)
{
// Ensure the format is valid.
var format = context.format.toUpperCase();
Errors.assertInternal(format === "HEX" || format === "BASE64");
// Decode hex string sent by GS.
var buffer = Buffer.from(rawColumnValue, "HEX");
if (format === "HEX")
{
buffer.toStringSf = function()
{
// The raw value is already an uppercase hex string, so just return it.
// Note that buffer.toString("HEX") returns a lowercase hex string, but we
// want upper case.
return rawColumnValue;
}
}
else
{
buffer.toStringSf = function()
{
return this.toString("BASE64");
}
}
buffer.getFormat = function()
{
return format;
};
return buffer;
} | javascript | {
"resource": ""
} |
q25057 | extractFromRow | train | function extractFromRow(row, context, asString)
{
var map = row._arrayProcessedColumns;
var values = row.values;
// get the value
var columnIndex = this.getIndex();
var ret = values[columnIndex];
// if we want the value as a string, and the column is of type variant, and we
// haven't already processed the value before, we don't need to process the
// value, so only process if none of the aforementioned conditions are true
if (!(asString && this.isVariant() && !map[columnIndex]))
{
// if the column value has not been processed yet, process it, put it back
// in the values array, and remember that the value has been processed
if (!map[columnIndex])
{
if (ret !== null)
{
ret = values[columnIndex] =
context.convert(values[columnIndex], this, context);
}
map[columnIndex] = true;
}
// use the appropriate extraction function depending on whether
// we want the value or a string representation of the value
var extractFn = !asString ? context.toValue : context.toString;
ret = extractFn(ret);
}
return ret;
} | javascript | {
"resource": ""
} |
q25058 | Result | train | function Result(options)
{
var data;
var chunkHeaders;
var parametersMap;
var parametersArray;
var length;
var index;
var parameter;
var mapColumnNameToIndices;
var columns;
var column;
// assert that options is a valid object that contains a response, statement,
// services and connection config
Errors.assertInternal(Util.isObject(options));
Errors.assertInternal(Util.isObject(options.response));
Errors.assertInternal(Util.isObject(options.statement));
Errors.assertInternal(Util.isObject(options.services));
Errors.assertInternal(Util.isObject(options.connectionConfig));
// save the statement, services and connection config
this._statement = options.statement;
this._services = options.services;
this._connectionConfig = options.connectionConfig;
data = options.response.data;
this._statementId = data.queryId;
this._version = version = String(data.version); // don't rely on the version being a number
this._returnedRows = data.returned;
this._totalRows = data.total;
this._statementTypeId = data.statementTypeId;
// if no chunk headers were specified, but a query-result-master-key (qrmk)
// was specified, build the chunk headers from the qrmk
chunkHeaders = data.chunkHeaders;
if (!Util.isObject(chunkHeaders) && Util.isString(data.qrmk))
{
chunkHeaders =
{
'x-amz-server-side-encryption-customer-algorithm': 'AES256',
'x-amz-server-side-encryption-customer-key': data.qrmk
};
}
this._chunkHeaders = chunkHeaders;
// build a session state object from the response data; this can be used to
// get the values of the current role, current warehouse, current database,
// etc.
this._sessionState = createSessionState(data);
// convert the parameters array to a map
parametersMap = {};
parametersArray = data.parameters;
for (index = 0, length = parametersArray.length; index < length; index++)
{
parameter = parametersArray[index];
parametersMap[parameter.name] = parameter.value;
}
// save the parameters array
this._parametersArray = parametersArray;
// TODO: add timezone related information to columns
// create columns from the rowtype array returned in the result
var rowtype = data.rowtype;
var numColumns = rowtype.length;
this._columns = columns = new Array(numColumns);
// convert the rowtype array to an array of columns and build an inverted
// index map in which the keys are the column names and the values are the
// indices of the columns with the corresponding names
this._mapColumnNameToIndices = mapColumnNameToIndices = {};
for (index = 0; index < numColumns; index++)
{
// create a new column and add it to the columns array
columns[index] = column =
new Column(rowtype[index], index, parametersMap, version);
// if we don't already have an index array for a column with this name,
// create a new one, otherwise just append to the existing array of indices
mapColumnNameToIndices[column.getName()] =
mapColumnNameToIndices[column.getName()] || [];
mapColumnNameToIndices[column.getName()].push(index);
}
// create chunks
this._chunks = createChunks(
data.chunks,
data.rowset,
this._columns,
this._mapColumnNameToIndices,
this._chunkHeaders,
parametersMap,
this._version,
this._statement,
this._services);
// create a chunk cache and save a reference to it in case we need to
// TODO: should we be clearing the cache at some point, e.g. when the result
// is destroyed?
this._chunkCache = createChunkCache(
this._chunks,
this._connectionConfig.getResultChunkCacheSize());
} | javascript | {
"resource": ""
} |
q25059 | createSessionState | train | function createSessionState(responseData)
{
var currentRole = responseData.finalRoleName;
var currentWarehouse = responseData.finalWarehouseName;
var currentDatabaseProvider = responseData.databaseProvider;
var currentDatabase = responseData.finalDatabaseName;
var currentSchema = responseData.finalSchemaName;
return {
getCurrentRole: function()
{
return currentRole;
},
getCurrentWarehouse: function()
{
return currentWarehouse;
},
getCurrentDatabaseProvider: function()
{
return currentDatabaseProvider;
},
getCurrentDatabase: function()
{
return currentDatabase;
},
getCurrentSchema: function()
{
return currentSchema;
}
};
} | javascript | {
"resource": ""
} |
q25060 | createChunks | train | function createChunks(chunkCfgs,
rowset,
columns,
mapColumnNameToIndices,
chunkHeaders,
statementParameters,
resultVersion,
statement,
services)
{
var chunks;
var startIndex;
var length;
var index;
var chunkCfg;
// if we don't have any chunks, or if some records were returned inline,
// fabricate a config object for the first chunk
chunkCfgs = chunkCfgs || [];
if (!chunkCfgs || rowset.length > 0)
{
chunkCfgs.unshift(
{
rowCount: rowset.length,
url: null,
rowset: rowset
});
}
chunks = new Array(chunkCfgs.length);
// loop over the chunk config objects and build Chunk instances out of them
startIndex = 0;
length = chunkCfgs.length;
for (index = 0; index < length; index++)
{
chunkCfg = chunkCfgs[index];
// augment the chunk config object with additional information
chunkCfg.statement = statement;
chunkCfg.services = services;
chunkCfg.startIndex = startIndex;
chunkCfg.columns = columns;
chunkCfg.mapColumnNameToIndices = mapColumnNameToIndices;
chunkCfg.chunkHeaders = chunkHeaders;
chunkCfg.statementParameters = statementParameters;
chunkCfg.resultVersion = resultVersion;
// increment the start index for the next chunk
startIndex += chunkCfg.rowCount;
// create a new Chunk from the config object, and add it to the chunks array
chunks[index] = new Chunk(chunkCfg);
}
return chunks;
} | javascript | {
"resource": ""
} |
q25061 | train | function(chunk)
{
// get all the rows in the current chunk that overlap with the requested
// window
var chunkStart = chunk.getStartIndex();
var chunkEnd = chunk.getEndIndex();
var rows = chunk.getRows().slice(
Math.max(chunkStart, start) - chunkStart,
Math.min(chunkEnd, end) + 1 - chunkStart);
var rowIndex = 0;
var rowsLength = rows.length;
// create a function that can be called to batch-process rows
var processRows = function()
{
// get the start position and start time
var startIndex = rowIndex;
var startTime = Date.now();
var each = options.each;
while (rowIndex < rowsLength)
{
// invoke the each() callback on the current row
var ret = each(rows[rowIndex++]);
context.numRowsProcessed++;
// if the callback returned false, stop processing rows
if (ret === false)
{
var stoppedProcessingRows = true;
break;
}
// use the current position and current time to check if we've been
// processing rows for too long; if so, leave the rest for the next
// tick of the event loop
if ((rowIndex - startIndex) >= context.rowBatchSize &&
(Date.now() - startTime) > context.rowBatchDuration)
{
process.nextTick(processRows);
break;
}
}
// if there are no more rows for us to process in this chunk
if (!(rowIndex < rowsLength) || stoppedProcessingRows)
{
// if we exhausted all the rows in this chunk and we haven't yet
// processed all the rows we want to process, ask the result stream to
// do another read
if (!(rowIndex < rowsLength) &&
context.numRowsProcessed !== context.maxNumRowsToProcess)
{
resultStream.read();
}
else
{
// we've either processed all the rows we wanted to process or we
// were told to stop processing rows by the each() callback; either
// way, close the result stream to complete the operation
resultStream.asyncClose();
}
}
};
// start processing rows
processRows();
} | javascript | {
"resource": ""
} | |
q25062 | train | function()
{
// get the start position and start time
var startIndex = rowIndex;
var startTime = Date.now();
var each = options.each;
while (rowIndex < rowsLength)
{
// invoke the each() callback on the current row
var ret = each(rows[rowIndex++]);
context.numRowsProcessed++;
// if the callback returned false, stop processing rows
if (ret === false)
{
var stoppedProcessingRows = true;
break;
}
// use the current position and current time to check if we've been
// processing rows for too long; if so, leave the rest for the next
// tick of the event loop
if ((rowIndex - startIndex) >= context.rowBatchSize &&
(Date.now() - startTime) > context.rowBatchDuration)
{
process.nextTick(processRows);
break;
}
}
// if there are no more rows for us to process in this chunk
if (!(rowIndex < rowsLength) || stoppedProcessingRows)
{
// if we exhausted all the rows in this chunk and we haven't yet
// processed all the rows we want to process, ask the result stream to
// do another read
if (!(rowIndex < rowsLength) &&
context.numRowsProcessed !== context.maxNumRowsToProcess)
{
resultStream.read();
}
else
{
// we've either processed all the rows we wanted to process or we
// were told to stop processing rows by the each() callback; either
// way, close the result stream to complete the operation
resultStream.asyncClose();
}
}
} | javascript | {
"resource": ""
} | |
q25063 | findOverlappingChunks | train | function findOverlappingChunks(chunks, windowStart, windowEnd)
{
var overlappingChunks = [];
if (chunks.length !== 0)
{
// get the index of the first chunk that overlaps with the specified window
var index = findFirstOverlappingChunk(chunks, windowStart, windowEnd);
// iterate over the chunks starting with the first overlapping chunk and
// keep going until there's no overlap
for (var length = chunks.length; index < length; index++)
{
var chunk = chunks[index];
if (chunk.overlapsWithWindow(windowStart, windowEnd))
{
overlappingChunks.push(chunk);
}
else
{
// no future chunks will overlap because the chunks array is sorted
break;
}
}
}
return overlappingChunks;
} | javascript | {
"resource": ""
} |
q25064 | findFirstOverlappingChunk | train | function findFirstOverlappingChunk(chunks, windowStartIndex, windowEndIndex)
{
var helper = function(chunks,
chunkIndexLeft,
chunkIndexRight,
windowStartIndex,
windowEndIndex)
{
var result;
var chunkIndexMiddle;
var middleChunk;
var middleChunkEndIndex;
// initialize the return value to -1
result = -1;
// compute the index of the middle chunk and get the middle chunk
chunkIndexMiddle = Math.floor((chunkIndexLeft + chunkIndexRight) / 2);
middleChunk = chunks[chunkIndexMiddle];
// if we have two or fewer chunks
if ((chunkIndexMiddle === chunkIndexLeft) ||
(chunkIndexMiddle === chunkIndexRight))
{
// if we have just one chunk, and it overlaps with the specified window,
// we've found the chunk we were looking for
if (chunkIndexLeft === chunkIndexRight)
{
if (middleChunk.overlapsWithWindow(windowStartIndex, windowEndIndex))
{
result = chunkIndexLeft;
}
}
else // we just have two chunks left to check
{
// if the first chunk overlaps with the specified window, that's the
// chunk we were looking for
if (chunks[chunkIndexLeft].overlapsWithWindow(
windowStartIndex, windowEndIndex))
{
result = chunkIndexLeft;
}
// otherwise, if the second chunk overlaps with the specified window,
// that's the chunk we were looking for
else if (chunks[chunkIndexRight].overlapsWithWindow(
windowStartIndex, windowEndIndex))
{
result = chunkIndexRight;
}
}
return result;
}
// if the middle chunk does not overlap with the specified window
if (!middleChunk.overlapsWithWindow(windowStartIndex, windowEndIndex))
{
middleChunkEndIndex = middleChunk.getEndIndex();
// if the window is to the right of the middle chunk,
// recurse on the right half
if (windowStartIndex > middleChunkEndIndex)
{
return helper(
chunks,
chunkIndexMiddle,
chunkIndexRight,
windowStartIndex,
windowEndIndex);
}
else
{
// recurse on the left half
return helper(
chunks,
chunkIndexLeft,
chunkIndexMiddle,
windowStartIndex,
windowEndIndex);
}
}
else
{
// if the middle chunk overlaps but the chunk before it does not, the
// middle chunk is the one we were looking
if ((chunkIndexMiddle === 0) ||
!chunks[chunkIndexMiddle - 1].overlapsWithWindow(
windowStartIndex, windowEndIndex))
{
return chunkIndexMiddle;
}
else
{
// recurse on the left half
return helper(
chunks,
chunkIndexLeft,
chunkIndexMiddle,
windowStartIndex,
windowEndIndex);
}
}
};
return helper(chunks, 0, chunks.length - 1, windowStartIndex, windowEndIndex);
} | javascript | {
"resource": ""
} |
q25065 | train | function(state, transitionContext)
{
// this check is necessary to make sure we don't re-enter a transient state
// like Renewing when we're already in it
if (currentState !== state)
{
// if we have a current state, exit it; the null check is necessary
// because the currentState is undefined at bootstrap time when we
// transition to the first state
if (currentState)
{
currentState.exit();
}
// update the current state
currentState = state;
// enter the new state
currentState.enter(transitionContext);
}
} | javascript | {
"resource": ""
} | |
q25066 | sendHttpRequest | train | function sendHttpRequest(requestOptions, httpClient)
{
return httpClient.request(
{
method : requestOptions.method,
headers : requestOptions.headers,
url : requestOptions.absoluteUrl,
gzip : requestOptions.gzip,
json : requestOptions.json,
callback : function(err, response, body)
{
// if we got an error, wrap it into a network error
if (err)
{
err = Errors.createNetworkError(
ErrorCodes.ERR_SF_NETWORK_COULD_NOT_CONNECT, err);
}
// if we didn't get a 200, the request failed
else if (!response || response.statusCode !== 200)
{
err = Errors.createRequestFailedError(
ErrorCodes.ERR_SF_RESPONSE_FAILURE, response);
}
else
{
// if the response body is a non-empty string and the response is
// supposed to contain json, try to json-parse the body
if (Util.isString(body) &&
response.getResponseHeader('Content-Type') === 'application/json')
{
try
{
body = JSON.parse(body);
}
catch (parseError)
{
// we expected to get json
err = Errors.createUnexpectedContentError(
ErrorCodes.ERR_SF_RESPONSE_NOT_JSON, response.body);
}
}
// if we were able to successfully json-parse the body and the success
// flag is false, the operation we tried to perform failed
if (body && !body.success)
{
var data = body.data;
err = Errors.createOperationFailedError(
body.code, data, body.message,
data && data.sqlState ? data.sqlState : undefined);
}
}
// if we have an error, clear the body
if (err)
{
body = undefined;
}
// if a callback was specified, invoke it
if (Util.isFunction(requestOptions.callback))
{
requestOptions.callback.apply(requestOptions.scope, [err, body]);
}
}
});
} | javascript | {
"resource": ""
} |
q25067 | buildLoginUrl | train | function buildLoginUrl(connectionConfig)
{
var queryParams =
[
{ name: 'warehouse', value: connectionConfig.getWarehouse() },
{ name: 'databaseName', value: connectionConfig.getDatabase() },
{ name: 'schemaName', value: connectionConfig.getSchema() },
{ name: 'roleName', value: connectionConfig.getRole() }
];
var queryStringObject = {};
for (var index = 0, length = queryParams.length; index < length; index++)
{
var queryParam = queryParams[index];
if (Util.string.isNotNullOrEmpty(queryParam.value))
{
queryStringObject[queryParam.name] = queryParam.value;
}
}
return Url.format(
{
pathname : '/session/v1/login-request',
search : QueryString.stringify(queryStringObject)
});
} | javascript | {
"resource": ""
} |
q25068 | HttpClient | train | function HttpClient(connectionConfig)
{
// save the connection config
this._connectionConfig = connectionConfig;
// check that we have a valid request module
var requestModule = this.getRequestModule();
Errors.assertInternal(
Util.isObject(requestModule) || Util.isFunction(requestModule));
} | javascript | {
"resource": ""
} |
q25069 | normalizeHeaders | train | function normalizeHeaders(headers)
{
var ret = headers;
if (Util.isObject(headers))
{
ret = {};
// shallow copy the headers object and convert some headers like 'Accept'
// and 'Content-Type' to lower case while copying; this is necessary
// because the browser-request module, which we use to make http requests in
// the browser, does not do case-insensitive checks when deciding whether to
// insert default values for the 'accept' and 'content-type' headers; in
// otherwise, if someone specifies an 'Accept': 'application/json' header,
// browser-request will inject its own 'accept': 'application/json' header
// and the browser XMLHttpRequest object will concatenate the two values and
// send 'Accept': 'application/json, application/json' with the request
var headerNameLowerCase;
for (var headerName in headers)
{
if (headers.hasOwnProperty(headerName))
{
headerNameLowerCase = headerName.toLowerCase();
if ((headerNameLowerCase === 'accept') ||
(headerNameLowerCase === 'content-type'))
{
ret[headerNameLowerCase] = headers[headerName];
}
else
{
ret[headerName] = headers[headerName];
}
}
}
}
return ret;
} | javascript | {
"resource": ""
} |
q25070 | normalizeResponse | train | function normalizeResponse(response)
{
// if the response doesn't already have a getResponseHeader() method, add one
if (response && !response.getResponseHeader)
{
response.getResponseHeader = function(header)
{
return response.headers && response.headers[
Util.isString(header) ? header.toLowerCase() : header];
};
}
return response;
} | javascript | {
"resource": ""
} |
q25071 | init | train | function init()
{
// the stream has now been initialized
initialized = true;
// if we have a result
if (context.result)
{
// if no value was specified for the start index or if the specified start
// index is negative, default to 0, otherwise truncate the fractional part
start = (!Util.isNumber(start) || (start < 0)) ? 0 : Math.floor(start);
// if no value was specified for the end index or if the end index is
// larger than the row index of the last row, default to the index of the
// last row, otherwise truncate the fractional part
var returnedRows = context.result.getReturnedRows();
end = (!Util.isNumber(end) || (end >= returnedRows)) ? returnedRows - 1 :
Math.floor(end);
// find all the chunks that overlap with the specified range
var overlappingChunks = context.result.findOverlappingChunks(start, end);
// if no chunks overlap or start is greater than end, we're done
if ((overlappingChunks.length === 0) || (start > end))
{
process.nextTick(close);
}
else
{
// create a result stream from the overlapping chunks
resultStream = new ResultStream(
{
chunks : overlappingChunks,
prefetchSize : context.connectionConfig.getResultPrefetch()
});
readNextRow();
}
}
else
{
close(context.resultError);
}
} | javascript | {
"resource": ""
} |
q25072 | processRowBuffer | train | function processRowBuffer()
{
// get the row to add to the read queue
var row = rowBuffer[rowIndex++];
// if we just read the last row in the row buffer, clear the row buffer and
// reset the row index so that we load the next chunk in the result stream
// when _read() is called
if (rowIndex === rowBuffer.length)
{
rowBuffer = null;
rowIndex = 0;
}
// initialize the columns and column-related maps if necessary
if (!columns)
{
columns = statement.getColumns();
}
if (!mapColumnIdToExtractFnName)
{
mapColumnIdToExtractFnName =
buildMapColumnExtractFnNames(columns, fetchAsString);
}
// add the next row to the read queue
process.nextTick(function()
{
self.push(externalizeRow(row, columns, mapColumnIdToExtractFnName));
});
} | javascript | {
"resource": ""
} |
q25073 | onResultStreamData | train | function onResultStreamData(chunk)
{
// unsubscribe from the result stream's 'data' and 'close' events
resultStream.removeListener('data', onResultStreamData);
resultStream.removeListener('close', onResultStreamClose);
// get all the rows in the chunk that overlap with the requested window,
// and use the resulting array as the new row buffer
var chunkStart = chunk.getStartIndex();
var chunkEnd = chunk.getEndIndex();
rowBuffer = chunk.getRows().slice(
Math.max(chunkStart, start) - chunkStart,
Math.min(chunkEnd, end) + 1 - chunkStart);
// reset the row index
rowIndex = 0;
// process the row buffer
processRowBuffer();
} | javascript | {
"resource": ""
} |
q25074 | onResultStreamClose | train | function onResultStreamClose(err, continueCallback)
{
// if the error is retryable and
// the result stream hasn't been closed too many times
if (isResultStreamErrorRetryable(err) &&
(numResultStreamInterrupts <
context.connectionConfig.getResultStreamInterrupts()))
{
numResultStreamInterrupts++;
// fetch the statement result again
context.refresh(function()
{
if (context.resultError)
{
close(context.resultError);
}
else
{
continueCallback();
}
});
}
else
{
close(err);
}
} | javascript | {
"resource": ""
} |
q25075 | train | function(err)
{
// if we have a result stream, stop listening to events on it
if (resultStream)
{
resultStream.removeListener('data', onResultStreamData);
resultStream.removeListener('close', onResultStreamClose);
}
// we're done, so time to clean up
rowBuffer = null;
rowIndex = 0;
resultStream = null;
numResultStreamInterrupts = 0;
if (err)
{
emitError(err);
}
else
{
self.push(null);
}
} | javascript | {
"resource": ""
} | |
q25076 | readNextRow | train | function readNextRow()
{
// if we have a row buffer, process it
if (rowBuffer)
{
processRowBuffer();
}
else
{
// subscribe to the result stream's 'data' and 'close' events
resultStream.on('data', onResultStreamData);
resultStream.on('close', onResultStreamClose);
// issue a request to fetch the next chunk in the result stream
resultStream.read();
}
} | javascript | {
"resource": ""
} |
q25077 | isResultStreamErrorRetryable | train | function isResultStreamErrorRetryable(error)
{
return Errors.isLargeResultSetError(error) && error.response &&
(error.response.statusCode === 403);
} | javascript | {
"resource": ""
} |
q25078 | buildMapColumnExtractFnNames | train | function buildMapColumnExtractFnNames(columns, fetchAsString)
{
var fnNameGetColumnValue = 'getColumnValue';
var fnNameGetColumnValueAsString = 'getColumnValueAsString';
var index, length, column;
var mapColumnIdToExtractFnName = {};
// if no native types need to be retrieved as strings, extract values normally
if (!Util.exists(fetchAsString))
{
for (index = 0, length = columns.length; index < length; index++)
{
column = columns[index];
mapColumnIdToExtractFnName[column.getId()] = fnNameGetColumnValue;
}
}
else
{
// build a map that contains all the native types that need to be
// retrieved as strings when extracting column values from rows
var nativeTypesMap = {};
for (index = 0, length = fetchAsString.length; index < length; index++)
{
nativeTypesMap[fetchAsString[index].toUpperCase()] = true;
}
// for each column, pick the appropriate extract function
// based on whether the value needs to be retrieved as a string
for (index = 0, length = columns.length; index < length; index++)
{
column = columns[index];
mapColumnIdToExtractFnName[column.getId()] =
nativeTypesMap[DataTypes.toNativeType(column.getType())] ?
fnNameGetColumnValueAsString : fnNameGetColumnValue;
}
}
return mapColumnIdToExtractFnName;
} | javascript | {
"resource": ""
} |
q25079 | externalizeRow | train | function externalizeRow(row, columns, mapColumnIdToExtractFnName)
{
var externalizedRow = {};
for (var index = 0, length = columns.length; index < length; index++)
{
var column = columns[index];
var extractFnName = mapColumnIdToExtractFnName[column.getId()];
externalizedRow[column.getName()] = row[extractFnName](column.getId());
}
return externalizedRow;
} | javascript | {
"resource": ""
} |
q25080 | getSelection | train | function getSelection (field) {
if (typeof field !== 'object') {
throw new TypeError('The field must be an object.')
}
return {
start: field.selectionStart,
end: field.selectionEnd
}
} | javascript | {
"resource": ""
} |
q25081 | setSelectionRange | train | function setSelectionRange (selection = false, field) {
if (!selection) return null
if (typeof selection !== 'object') {
throw new TypeError('The selection must be an object.')
}
if (typeof selection.start !== 'number') {
throw new TypeError('The selection start value must be a number.')
}
if (typeof selection.end !== 'number') {
throw new TypeError('The selection end value must be a number.')
}
if (typeof field !== 'object') {
throw new TypeError('The field must be an object.')
}
if (field.setSelectionRange) {
field.setSelectionRange(
selection.start,
selection.end
)
return null
}
if (!field.createTextRange) return null
const range = field.createTextRange()
range.collapse(true)
range.moveStart('character', selection.end)
range.moveEnd('character', selection.end)
range.select()
return null
} | javascript | {
"resource": ""
} |
q25082 | createChangeEvent | train | function createChangeEvent (selected, selection, markdown, native, html) {
if (typeof selected !== 'string') {
throw new TypeError('The selected content value must be a string.')
}
if (typeof selection !== 'object') {
throw new TypeError('The selection must be an object.')
}
if (typeof selection.start !== 'number') {
throw new TypeError('The selection start value must be a number.')
}
if (typeof selection.end !== 'number') {
throw new TypeError('The selection end value must be a number.')
}
if (typeof markdown !== 'string') {
throw new TypeError('The markdown content value must be a string.')
}
if (typeof native !== 'object') {
throw new TypeError('The native event must be an object.')
}
if (typeof html !== 'string') {
throw new TypeError('The html content value must be a string.')
}
return {
selected,
selection,
markdown,
native,
html
}
} | javascript | {
"resource": ""
} |
q25083 | updateContent | train | function updateContent (content, selection, updated) {
if (typeof content !== 'string') {
throw new TypeError('The content value must be a string.')
}
if (typeof selection !== 'object') {
throw new TypeError('The selection must be an object.')
}
if (typeof selection.start !== 'number') {
throw new TypeError('The selection start value must be a number.')
}
if (typeof selection.end !== 'number') {
throw new TypeError('The selection end value must be a number.')
}
if (typeof updated !== 'string') {
throw new TypeError('The updated content value must be a string.')
}
return content.slice(0, selection.start) + updated + content.slice(selection.end)
} | javascript | {
"resource": ""
} |
q25084 | getSelected | train | function getSelected (content, selection) {
if (typeof content !== 'string') {
throw new TypeError('The content must be a string.')
}
if (typeof selection !== 'object') {
throw new TypeError('The selection must be an object.')
}
if (typeof selection.start !== 'number') {
throw new TypeError('The selection start value must be a number.')
}
if (typeof selection.end !== 'number') {
throw new TypeError('The selection end value must be a number.')
}
return content.slice(selection.start, selection.end)
} | javascript | {
"resource": ""
} |
q25085 | BaseButton | train | function BaseButton (props) {
return (
<button
{...props}
className={props.className}
onClick={props.onClick}
name={props.name}
disabled={props.disabled}
type='button'
children={props.children}
/>
)
} | javascript | {
"resource": ""
} |
q25086 | RequestCallbackHandler | train | function RequestCallbackHandler(callback, thisp) {
var self = this;
self.callback = callback;
self.thisp = thisp || self;
} | javascript | {
"resource": ""
} |
q25087 | StreamedRequestCallbackHandler | train | function StreamedRequestCallbackHandler(callback, thisp) {
var self = this;
self.callback = callback;
self.thisp = thisp || self;
} | javascript | {
"resource": ""
} |
q25088 | allocifyPoolFn | train | function allocifyPoolFn(fn, ResultCons) {
return allocFn;
function allocFn(arg1, arg2, arg3) {
return fn(new ResultCons(), arg1, arg2, arg3);
}
} | javascript | {
"resource": ""
} |
q25089 | descStats | train | function descStats(sample) {
var S = [].concat(sample);
S.sort(function sortOrder(a, b) {
return a - b;
});
var N = S.length;
var q1 = S[Math.floor(0.25 * N)];
var q2 = S[Math.floor(0.50 * N)];
var q3 = S[Math.floor(0.70 * N)];
var iqr = q3 - q1;
var tol = 3 * iqr / 2;
var hi = q3 + tol;
var whiIndex = N;
while (--whiIndex > 0) {
if (S[whiIndex] <= hi) {
break;
}
}
var whiPct = (whiIndex + 1) / N;
var whi = S[whiIndex];
return {
min: S[0],
max: S[N - 1],
q1: q1,
q2: q2,
q3: q3,
hi: hi,
whi: whi,
whiPct: whiPct
};
} | javascript | {
"resource": ""
} |
q25090 | Parameter | train | function Parameter (opOrPathObject, definition, definitionFullyResolved, pathToDefinition) {
// Assign local properties
this.definition = definition;
this.definitionFullyResolved = definitionFullyResolved;
this.pathToDefinition = pathToDefinition;
this.ptr = JsonRefs.pathToPtr(pathToDefinition);
if (_.has(opOrPathObject, 'consumes')) {
this.operationObject = opOrPathObject;
this.pathObject = opOrPathObject.pathObject;
} else {
this.operationObject = undefined;
this.pathObject = opOrPathObject;
}
// Assign local properties from the OpenAPI Parameter Object definition
_.assign(this, definitionFullyResolved);
if (_.isUndefined(this.schema)) {
this.schema = helpers.computeParameterSchema(definitionFullyResolved);
}
this.pathObject.apiDefinition._debug(' %s%s (in: %s) at %s',
_.isUndefined(this.operationObject) ? '' : ' ',
definitionFullyResolved.name,
definitionFullyResolved.in,
this.ptr);
} | javascript | {
"resource": ""
} |
q25091 | validateStructure | train | function validateStructure (apiDefinition) {
var results = helpers.validateAgainstSchema(helpers.getJSONSchemaValidator(),
swaggerSchema,
apiDefinition.definitionFullyResolved);
// Make complex JSON Schema validation errors easier to understand (Issue 15)
results.errors = results.errors.map(function (error) {
var defType = ['additionalProperties', 'items'].indexOf(error.path[error.path.length - 1]) > -1 ?
'schema' :
error.path[error.path.length - 2];
if (['ANY_OF_MISSING', 'ONE_OF_MISSING'].indexOf(error.code) > -1) {
switch (defType) {
case 'parameters':
defType = 'parameter';
break;
case 'responses':
defType = 'response';
break;
case 'schema':
defType += ' ' + error.path[error.path.length - 1];
// no default
}
error.message = 'Not a valid ' + defType + ' definition';
}
return error;
});
// Treat invalid/missing references as structural errors
_.each(apiDefinition.references, function (refDetails, refPtr) {
var refPath = JsonRefs.pathFromPtr(refPtr);
var err;
if (refDetails.missing) {
err = {
code: 'UNRESOLVABLE_REFERENCE',
message: 'Reference could not be resolved: ' + refDetails.uri,
path: refPath.concat('$ref')
};
if (_.has(refDetails, 'error')) {
err.error = refDetails.error;
}
results.errors.push(err);
} else if (refDetails.type === 'invalid') {
results.errors.push({
code: 'INVALID_REFERENCE',
message: refDetails.error || 'Invalid JSON Reference',
path: refPath.concat('$ref')
});
} else if (_.has(refDetails, 'warning')) {
// json-refs only creates warnings for JSON References with superfluous properties which will be ignored
results.warnings.push({
code: 'EXTRA_REFERENCE_PROPERTIES',
message: refDetails.warning,
path: refPath
});
}
});
return results;
} | javascript | {
"resource": ""
} |
q25092 | ApiDefinition | train | function ApiDefinition (definition, definitionRemotesResolved, definitionFullyResolved, references, options) {
var that = this;
debug('Creating ApiDefinition from %s',
_.isString(options.definition) ? options.definition : 'the provided OpenAPI definition');
// Assign this so other object can use it
this._debug = debug;
// Assign local properties
this.customFormats = {};
this.customFormatGenerators = {};
this.customValidators = [];
this.definition = definition;
this.definitionFullyResolved = definitionFullyResolved;
this.definitionRemotesResolved = definitionRemotesResolved;
this.documentationUrl = 'https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md';
this.options = options;
this.references = references;
this.version = '2.0';
// Assign local properties from the OpenAPI Object definition
_.assign(this, definition);
// Register custom formats
_.each(options.customFormats, _.bind(ApiDefinition.prototype.registerFormat, this));
// Register custom formats
_.each(options.customFormatGenerators, _.bind(ApiDefinition.prototype.registerFormatGenerator, this));
// Register custom validators
_.each(options.customValidators, _.bind(ApiDefinition.prototype.registerValidator, this));
debug(' Paths:');
// Create the Path objects
this.pathObjects = _.map(definitionFullyResolved.paths, function (pathDef, path) {
return new Path(that,
path,
_.get(definitionRemotesResolved, ['paths', path]),
pathDef,
['paths', path]);
});
} | javascript | {
"resource": ""
} |
q25093 | Path | train | function Path (apiDefinition, path, definition, definitionFullyResolved, pathToDefinition) {
var basePathPrefix = apiDefinition.definitionFullyResolved.basePath || '/';
var that = this;
var sanitizedPath;
// TODO: We could/should refactor this to use the path module
// Remove trailing slash from the basePathPrefix so we do not end up with double slashes
if (basePathPrefix.charAt(basePathPrefix.length - 1) === '/') {
basePathPrefix = basePathPrefix.substring(0, basePathPrefix.length - 1);
}
// Converts OpenAPI parameters to Express-style parameters, and also escapes all path-to-regexp special characters
//
// @see: https://github.com/pillarjs/path-to-regexp/issues/76#issuecomment-219085357
sanitizedPath = basePathPrefix + path.replace('(', '\\(') // path-to-regexp
.replace(')', '\\)') // path-to-regexp
.replace(':', '\\:') // path-to-regexp
.replace('*', '\\*') // path-to-regexp
.replace('+', '\\+') // path-to-regexp
.replace('?', '\\?') // path-to-regexp
.replace(/\{/g, ':') // OpenAPI -> Express-style
.replace(/\}/g, ''); // OpenAPI -> Express-style
// Assign local properties
this.apiDefinition = apiDefinition;
this.definition = definition;
this.definitionFullyResolved = definitionFullyResolved;
this.path = path;
this.pathToDefinition = pathToDefinition;
this.ptr = JsonRefs.pathToPtr(pathToDefinition);
this.regexp = pathToRegexp(sanitizedPath, {sensitive: true});
// Assign local properties from the OpenAPI Path Object definition
_.assign(this, definitionFullyResolved);
this._debug = this.apiDefinition._debug;
this._debug(' %s', this.path);
this.parameterObjects = _.map(definitionFullyResolved.parameters, function (paramDef, index) {
var pPath = pathToDefinition.concat(['parameters', index.toString()]);
return new Parameter(that,
_.get(apiDefinition.definitionRemotesResolved, pPath),
paramDef,
pPath);
});
this._debug(' Operations:');
this.operationObjects = _.reduce(definitionFullyResolved, function (operations, operationDef, method) {
var oPath = pathToDefinition.concat(method);
if (supportedHttpMethods.indexOf(method) > -1) {
operations.push(new Operation(that, method, _.get(apiDefinition.definitionRemotesResolved, oPath), operationDef, oPath));
}
return operations;
}, []);
} | javascript | {
"resource": ""
} |
q25094 | Response | train | function Response (operationObject, statusCode, definition, definitionFullyResolved, pathToDefinition) {
// Assign local properties
this.definition = definition;
this.definitionFullyResolved = definitionFullyResolved;
this.operationObject = operationObject;
this.pathToDefinition = pathToDefinition;
this.ptr = JsonRefs.pathToPtr(pathToDefinition);
this.statusCode = statusCode;
// Assign local properties from the OpenAPI Response Object definition
_.assign(this, definitionFullyResolved);
this.operationObject.pathObject.apiDefinition._debug(' %s at %s', statusCode, this.ptr);
} | javascript | {
"resource": ""
} |
q25095 | train | function({options}, succeed, skip) {
return each(options.if_exec).call((cmd, next) => {
this.log({
message: `Nikita \`if_exec\`: ${cmd}`,
level: 'DEBUG',
module: 'nikita/misc/conditions'
});
return this.system.execute({
cmd: cmd,
relax: true,
stderr_log: false,
stdin_log: false,
stdout_log: false
}, function(err, {code}) {
this.log({
message: `Nikita \`if_exec\`: code is "${code}"`,
level: 'INFO',
module: 'nikita/misc/conditions'
});
if (code === 0) {
return next();
} else {
return skip();
}
});
}).next(succeed);
} | javascript | {
"resource": ""
} | |
q25096 | train | function({options}, succeed, skip) {
// Default to `options.target` if "true"
if (typeof options.if_exists === 'boolean' && options.target) {
options.if_exists = options.if_exists ? [options.target] : null;
}
return each(options.if_exists).call((if_exists, next) => {
return this.fs.exists({
target: if_exists
}, (err, {exists}) => {
if (exists) {
this.log({
message: `File exists ${if_exists}, continuing`,
level: 'DEBUG',
module: 'nikita/misc/conditions'
});
return next();
} else {
this.log({
message: `File doesnt exists ${if_exists}, skipping`,
level: 'INFO',
module: 'nikita/misc/conditions'
});
return skip();
}
});
}).next(succeed);
} | javascript | {
"resource": ""
} | |
q25097 | train | function({options}, succeed, skip) {
// Default to `options.target` if "true"
if (typeof options.unless_exists === 'boolean' && options.target) {
options.unless_exists = options.unless_exists ? [options.target] : null;
}
return each(options.unless_exists).call((unless_exists, next) => {
return this.fs.exists({
target: unless_exists
}, (err, {exists}) => {
if (exists) {
this.log({
message: `File exists ${unless_exists}, skipping`,
level: 'INFO',
module: 'nikita/misc/conditions'
});
return skip();
} else {
this.log({
message: `File doesnt exists ${unless_exists}, continuing`,
level: 'DEBUG',
module: 'nikita/misc/conditions'
});
return next();
}
});
}).next(succeed);
} | javascript | {
"resource": ""
} | |
q25098 | train | function({options}, succeed, skip) {
var ssh;
// SSH connection
ssh = this.ssh(options.ssh);
return each(options.should_exist).call(function(should_exist, next) {
return fs.exists(ssh, should_exist, function(err, exists) {
if (exists) {
return next();
} else {
return next(Error(`File does not exist: ${should_exist}`));
}
});
}).error(skip).next(succeed);
} | javascript | {
"resource": ""
} | |
q25099 | train | function(content, undefinedOnly) {
var k, v;
for (k in content) {
v = content[k];
if (v && typeof v === 'object') {
content[k] = module.exports.clean(v, undefinedOnly);
continue;
}
if (typeof v === 'undefined') {
delete content[k];
}
if (!undefinedOnly && v === null) {
delete content[k];
}
}
return content;
} | javascript | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.