text stringlengths 2 1.04M | meta dict |
|---|---|
import sys
from typing import TYPE_CHECKING
if sys.version_info < (3, 7) or TYPE_CHECKING:
from ._up import UpValidator
from ._projection import ProjectionValidator
from ._eye import EyeValidator
from ._center import CenterValidator
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__,
[],
[
"._up.UpValidator",
"._projection.ProjectionValidator",
"._eye.EyeValidator",
"._center.CenterValidator",
],
)
| {
"content_hash": "b9fa3c242fee83081c705505db6fae0e",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 55,
"avg_line_length": 27.476190476190474,
"alnum_prop": 0.5961871750433275,
"repo_name": "plotly/plotly.py",
"id": "6fda571b1ed2506974b823cd4f390f9217cceb7e",
"size": "577",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/layout/scene/camera/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
} |
'use strict';
module.exports = function (grunt) {
// Project configuration.
grunt.initConfig({
// Metadata.
pkg: grunt.file.readJSON('package.json'),
banner: '\n',
// Task configuration.
concat: {
options: {
banner: '<%= banner %>',
stripBanners: true
},
dist: {
src: ['lib/<%= pkg.name %>.js'],
dest: 'dist/<%= pkg.name %>.js'
},
},
uglify: {
options: {
banner: '<%= banner %>'
},
dist: {
src: '<%= concat.dist.dest %>',
dest: 'dist/<%= pkg.name %>.min.js'
},
},
jasmine: {
src: 'src/**/*.js',
options: {
specs: 'test/*-test.js',
vendor: 'test/vendor/*.js',
template: require('grunt-template-jasmine-requirejs')
}
},
open: {
jasmine: {
path: 'http://127.0.0.1:8000/_SpecRunner.html'
}
},
connect: {
test: {
port: 8000,
keepalive: true
}
},
'saucelabs-jasmine': {
all: {
username: 'pimterry',
key: 'KEY',
urls: ['http://localhost:8000/_SpecRunner.html'],
browsers: [
{"browserName": "iehta", "platform": "Windows 2008", "version": "9"},
// {"browserName": "firefox", "platform": "Windows 2003", "version": "3.0"},
// {"browserName": "firefox", "platform": "Windows 2003", "version": "3.5"},
{"browserName": "firefox", "platform": "Windows 2003", "version": "3.6"},
{"browserName": "firefox", "platform": "Windows 2003", "version": "4"},
{"browserName": "firefox", "platform": "Windows 2003", "version": "19"},
{"browserName": "safari", "platform": "Mac 10.6", "version": "5"},
{"browserName": "safari", "platform": "Mac 10.8", "version": "6"},
{"browserName": "googlechrome", "platform": "Windows 2003"},
{"browserName": "opera", "platform": "Windows 2003", "version": "12"},
{"browserName": "iehta", "platform": "Windows 2003", "version": "6"},
{"browserName": "iehta", "platform": "Windows 2003", "version": "7"},
{"browserName": "iehta", "platform": "Windows 2008", "version": "8"},
],
concurrency: 3,
detailedError: true,
testTimeout:10000,
testInterval:1000,
testReadyTimeout:2000,
testname: 'loglevel jasmine test',
tags: [process.env.TRAVIS_REPO_SLUG || "local", process.env.TRAVIS_COMMIT || "manual"]
}
},
jshint: {
options: {
jshintrc: '.jshintrc'
},
gruntfile: {
src: 'Gruntfile.js'
},
lib: {
options: {
jshintrc: 'lib/.jshintrc'
},
src: ['lib/**/*.js']
},
test: {
options: {
jshintrc: 'test/.jshintrc'
},
src: ['test/*.js']
},
},
watch: {
gruntfile: {
files: '<%= jshint.gruntfile.src %>',
tasks: ['jshint:gruntfile']
},
lib: {
files: '<%= jshint.lib.src %>',
tasks: ['jshint:lib', 'jasmine']
},
test: {
files: '<%= jshint.test.src %>',
tasks: ['jshint:test', 'jasmine']
},
},
});
// These plugins provide necessary tasks.
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-contrib-jasmine');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-contrib-connect');
grunt.loadNpmTasks('grunt-open');
grunt.loadNpmTasks('grunt-saucelabs');
// Default task.
grunt.registerTask('default', ['jshint', 'jasmine', 'concat', 'uglify']);
// Just tests
grunt.registerTask('test', ['jshint', 'jasmine']);
// Test with a live server and an actual browser
grunt.registerTask('integration-test', ['jasmine:src:build', 'connect:test:keepalive', 'open:jasmine']);
// Test with lots of browsers on saucelabs
grunt.registerTask('saucelabs', ['jasmine:src:build', 'connect:test', 'saucelabs-jasmine']);
};
| {
"content_hash": "1c407a3c1ba3a89a2fc663fc060e94aa",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 108,
"avg_line_length": 36.518248175182485,
"alnum_prop": 0.43154107535478714,
"repo_name": "deanius/cdnjs",
"id": "7d1ded4a6142a84d543d2848749a2161be12791f",
"size": "5351",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "temp/loglevel/0.1.0/package/Gruntfile.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "25832900"
},
{
"name": "CoffeeScript",
"bytes": "25217"
},
{
"name": "Gosu",
"bytes": "56300"
},
{
"name": "JavaScript",
"bytes": "681712452"
},
{
"name": "LiveScript",
"bytes": "97648"
},
{
"name": "PHP",
"bytes": "735"
},
{
"name": "Ruby",
"bytes": "168"
},
{
"name": "Shell",
"bytes": "6802"
}
],
"symlink_target": ""
} |
/* globals PDFJS */
'use strict';
(function (root, factory) {
if (typeof define === 'function' && define.amd) {
define('pdfjs/core/evaluator', ['exports', 'pdfjs/shared/util',
'pdfjs/core/primitives', 'pdfjs/core/stream', 'pdfjs/core/parser',
'pdfjs/core/image', 'pdfjs/core/colorspace', 'pdfjs/core/murmurhash3',
'pdfjs/core/fonts', 'pdfjs/core/function', 'pdfjs/core/pattern',
'pdfjs/core/cmap', 'pdfjs/core/metrics', 'pdfjs/core/bidi',
'pdfjs/core/encodings', 'pdfjs/core/standard_fonts',
'pdfjs/core/unicode'], factory);
} else if (typeof exports !== 'undefined') {
factory(exports, require('../shared/util.js'), require('./primitives.js'),
require('./stream.js'), require('./parser.js'), require('./image.js'),
require('./colorspace.js'), require('./murmurhash3.js'),
require('./fonts.js'), require('./function.js'), require('./pattern.js'),
require('./cmap.js'), require('./metrics.js'), require('./bidi.js'),
require('./encodings.js'), require('./standard_fonts.js'),
require('./unicode.js'));
} else {
factory((root.pdfjsCoreEvaluator = {}), root.pdfjsSharedUtil,
root.pdfjsCorePrimitives, root.pdfjsCoreStream, root.pdfjsCoreParser,
root.pdfjsCoreImage, root.pdfjsCoreColorSpace, root.pdfjsCoreMurmurHash3,
root.pdfjsCoreFonts, root.pdfjsCoreFunction, root.pdfjsCorePattern,
root.pdfjsCoreCMap, root.pdfjsCoreMetrics, root.pdfjsCoreBidi,
root.pdfjsCoreEncodings, root.pdfjsCoreStandardFonts,
root.pdfjsCoreUnicode);
}
}(this, function (exports, sharedUtil, corePrimitives, coreStream, coreParser,
coreImage, coreColorSpace, coreMurmurHash3, coreFonts,
coreFunction, corePattern, coreCMap, coreMetrics, coreBidi,
coreEncodings, coreStandardFonts, coreUnicode) {
var FONT_IDENTITY_MATRIX = sharedUtil.FONT_IDENTITY_MATRIX;
var IDENTITY_MATRIX = sharedUtil.IDENTITY_MATRIX;
var UNSUPPORTED_FEATURES = sharedUtil.UNSUPPORTED_FEATURES;
var ImageKind = sharedUtil.ImageKind;
var OPS = sharedUtil.OPS;
var TextRenderingMode = sharedUtil.TextRenderingMode;
var Util = sharedUtil.Util;
var assert = sharedUtil.assert;
var createPromiseCapability = sharedUtil.createPromiseCapability;
var error = sharedUtil.error;
var info = sharedUtil.info;
var isArray = sharedUtil.isArray;
var isNum = sharedUtil.isNum;
var isString = sharedUtil.isString;
var getLookupTableFactory = sharedUtil.getLookupTableFactory;
var warn = sharedUtil.warn;
var Dict = corePrimitives.Dict;
var Name = corePrimitives.Name;
var isCmd = corePrimitives.isCmd;
var isDict = corePrimitives.isDict;
var isName = corePrimitives.isName;
var isRef = corePrimitives.isRef;
var isStream = corePrimitives.isStream;
var DecodeStream = coreStream.DecodeStream;
var JpegStream = coreStream.JpegStream;
var Lexer = coreParser.Lexer;
var Parser = coreParser.Parser;
var isEOF = coreParser.isEOF;
var PDFImage = coreImage.PDFImage;
var ColorSpace = coreColorSpace.ColorSpace;
var MurmurHash3_64 = coreMurmurHash3.MurmurHash3_64;
var ErrorFont = coreFonts.ErrorFont;
var FontFlags = coreFonts.FontFlags;
var Font = coreFonts.Font;
var IdentityToUnicodeMap = coreFonts.IdentityToUnicodeMap;
var ToUnicodeMap = coreFonts.ToUnicodeMap;
var getFontType = coreFonts.getFontType;
var isPDFFunction = coreFunction.isPDFFunction;
var PDFFunction = coreFunction.PDFFunction;
var Pattern = corePattern.Pattern;
var getTilingPatternIR = corePattern.getTilingPatternIR;
var CMapFactory = coreCMap.CMapFactory;
var IdentityCMap = coreCMap.IdentityCMap;
var getMetrics = coreMetrics.getMetrics;
var bidi = coreBidi.bidi;
var WinAnsiEncoding = coreEncodings.WinAnsiEncoding;
var StandardEncoding = coreEncodings.StandardEncoding;
var MacRomanEncoding = coreEncodings.MacRomanEncoding;
var SymbolSetEncoding = coreEncodings.SymbolSetEncoding;
var ZapfDingbatsEncoding = coreEncodings.ZapfDingbatsEncoding;
var getEncoding = coreEncodings.getEncoding;
var getStdFontMap = coreStandardFonts.getStdFontMap;
var getSerifFonts = coreStandardFonts.getSerifFonts;
var getSymbolsFonts = coreStandardFonts.getSymbolsFonts;
var getNormalizedUnicodes = coreUnicode.getNormalizedUnicodes;
var reverseIfRtl = coreUnicode.reverseIfRtl;
var PartialEvaluator = (function PartialEvaluatorClosure() {
function PartialEvaluator(pdfManager, xref, handler, pageIndex,
uniquePrefix, idCounters, fontCache) {
this.pdfManager = pdfManager;
this.xref = xref;
this.handler = handler;
this.pageIndex = pageIndex;
this.uniquePrefix = uniquePrefix;
this.idCounters = idCounters;
this.fontCache = fontCache;
}
// Trying to minimize Date.now() usage and check every 100 time
var TIME_SLOT_DURATION_MS = 20;
var CHECK_TIME_EVERY = 100;
function TimeSlotManager() {
this.reset();
}
TimeSlotManager.prototype = {
check: function TimeSlotManager_check() {
if (++this.checked < CHECK_TIME_EVERY) {
return false;
}
this.checked = 0;
return this.endTime <= Date.now();
},
reset: function TimeSlotManager_reset() {
this.endTime = Date.now() + TIME_SLOT_DURATION_MS;
this.checked = 0;
}
};
var deferred = Promise.resolve();
var TILING_PATTERN = 1, SHADING_PATTERN = 2;
PartialEvaluator.prototype = {
hasBlendModes: function PartialEvaluator_hasBlendModes(resources) {
if (!isDict(resources)) {
return false;
}
var processed = Object.create(null);
if (resources.objId) {
processed[resources.objId] = true;
}
var nodes = [resources], xref = this.xref;
while (nodes.length) {
var key, i, ii;
var node = nodes.shift();
// First check the current resources for blend modes.
var graphicStates = node.get('ExtGState');
if (isDict(graphicStates)) {
var graphicStatesKeys = graphicStates.getKeys();
for (i = 0, ii = graphicStatesKeys.length; i < ii; i++) {
key = graphicStatesKeys[i];
var graphicState = graphicStates.get(key);
var bm = graphicState.get('BM');
if (isName(bm) && bm.name !== 'Normal') {
return true;
}
}
}
// Descend into the XObjects to look for more resources and blend modes.
var xObjects = node.get('XObject');
if (!isDict(xObjects)) {
continue;
}
var xObjectsKeys = xObjects.getKeys();
for (i = 0, ii = xObjectsKeys.length; i < ii; i++) {
key = xObjectsKeys[i];
var xObject = xObjects.getRaw(key);
if (isRef(xObject)) {
if (processed[xObject.toString()]) {
// The XObject has already been processed, and by avoiding a
// redundant `xref.fetch` we can *significantly* reduce the load
// time for badly generated PDF files (fixes issue6961.pdf).
continue;
}
xObject = xref.fetch(xObject);
}
if (!isStream(xObject)) {
continue;
}
if (xObject.dict.objId) {
if (processed[xObject.dict.objId]) {
// stream has objId and is processed already
continue;
}
processed[xObject.dict.objId] = true;
}
var xResources = xObject.dict.get('Resources');
// Checking objId to detect an infinite loop.
if (isDict(xResources) &&
(!xResources.objId || !processed[xResources.objId])) {
nodes.push(xResources);
if (xResources.objId) {
processed[xResources.objId] = true;
}
}
}
}
return false;
},
buildFormXObject: function PartialEvaluator_buildFormXObject(resources,
xobj, smask,
operatorList,
task,
initialState) {
var matrix = xobj.dict.getArray('Matrix');
var bbox = xobj.dict.getArray('BBox');
var group = xobj.dict.get('Group');
if (group) {
var groupOptions = {
matrix: matrix,
bbox: bbox,
smask: smask,
isolated: false,
knockout: false
};
var groupSubtype = group.get('S');
var colorSpace;
if (isName(groupSubtype) && groupSubtype.name === 'Transparency') {
groupOptions.isolated = (group.get('I') || false);
groupOptions.knockout = (group.get('K') || false);
colorSpace = (group.has('CS') ?
ColorSpace.parse(group.get('CS'), this.xref, resources) : null);
}
if (smask && smask.backdrop) {
colorSpace = colorSpace || ColorSpace.singletons.rgb;
smask.backdrop = colorSpace.getRgb(smask.backdrop, 0);
}
operatorList.addOp(OPS.beginGroup, [groupOptions]);
}
operatorList.addOp(OPS.paintFormXObjectBegin, [matrix, bbox]);
return this.getOperatorList(xobj, task,
(xobj.dict.get('Resources') || resources), operatorList, initialState).
then(function () {
operatorList.addOp(OPS.paintFormXObjectEnd, []);
if (group) {
operatorList.addOp(OPS.endGroup, [groupOptions]);
}
});
},
buildPaintImageXObject:
function PartialEvaluator_buildPaintImageXObject(resources, image,
inline, operatorList,
cacheKey, imageCache) {
var self = this;
var dict = image.dict;
var w = dict.get('Width', 'W');
var h = dict.get('Height', 'H');
if (!(w && isNum(w)) || !(h && isNum(h))) {
warn('Image dimensions are missing, or not numbers.');
return;
}
if (PDFJS.maxImageSize !== -1 && w * h > PDFJS.maxImageSize) {
warn('Image exceeded maximum allowed size and was removed.');
return;
}
var imageMask = (dict.get('ImageMask', 'IM') || false);
var imgData, args;
if (imageMask) {
// This depends on a tmpCanvas being filled with the
// current fillStyle, such that processing the pixel
// data can't be done here. Instead of creating a
// complete PDFImage, only read the information needed
// for later.
var width = dict.get('Width', 'W');
var height = dict.get('Height', 'H');
var bitStrideLength = (width + 7) >> 3;
var imgArray = image.getBytes(bitStrideLength * height);
var decode = dict.get('Decode', 'D');
var inverseDecode = (!!decode && decode[0] > 0);
imgData = PDFImage.createMask(imgArray, width, height,
image instanceof DecodeStream,
inverseDecode);
imgData.cached = true;
args = [imgData];
operatorList.addOp(OPS.paintImageMaskXObject, args);
if (cacheKey) {
imageCache[cacheKey] = {
fn: OPS.paintImageMaskXObject,
args: args
};
}
return;
}
var softMask = (dict.get('SMask', 'SM') || false);
var mask = (dict.get('Mask') || false);
var SMALL_IMAGE_DIMENSIONS = 200;
// Inlining small images into the queue as RGB data
if (inline && !softMask && !mask && !(image instanceof JpegStream) &&
(w + h) < SMALL_IMAGE_DIMENSIONS) {
var imageObj = new PDFImage(this.xref, resources, image,
inline, null, null);
// We force the use of RGBA_32BPP images here, because we can't handle
// any other kind.
imgData = imageObj.createImageData(/* forceRGBA = */ true);
operatorList.addOp(OPS.paintInlineImageXObject, [imgData]);
return;
}
// If there is no imageMask, create the PDFImage and a lot
// of image processing can be done here.
var uniquePrefix = (this.uniquePrefix || '');
var objId = 'img_' + uniquePrefix + (++this.idCounters.obj);
operatorList.addDependency(objId);
args = [objId, w, h];
if (!softMask && !mask && image instanceof JpegStream &&
image.isNativelySupported(this.xref, resources)) {
// These JPEGs don't need any more processing so we can just send it.
operatorList.addOp(OPS.paintJpegXObject, args);
this.handler.send('obj',
[objId, this.pageIndex, 'JpegStream', image.getIR()]);
return;
}
PDFImage.buildImage(self.handler, self.xref, resources, image, inline).
then(function(imageObj) {
var imgData = imageObj.createImageData(/* forceRGBA = */ false);
self.handler.send('obj', [objId, self.pageIndex, 'Image', imgData],
[imgData.data.buffer]);
}).then(undefined, function (reason) {
warn('Unable to decode image: ' + reason);
self.handler.send('obj', [objId, self.pageIndex, 'Image', null]);
});
operatorList.addOp(OPS.paintImageXObject, args);
if (cacheKey) {
imageCache[cacheKey] = {
fn: OPS.paintImageXObject,
args: args
};
}
},
handleSMask: function PartialEvaluator_handleSmask(smask, resources,
operatorList, task,
stateManager) {
var smaskContent = smask.get('G');
var smaskOptions = {
subtype: smask.get('S').name,
backdrop: smask.get('BC')
};
// The SMask might have a alpha/luminosity value transfer function --
// we will build a map of integer values in range 0..255 to be fast.
var transferObj = smask.get('TR');
if (isPDFFunction(transferObj)) {
var transferFn = PDFFunction.parse(this.xref, transferObj);
var transferMap = new Uint8Array(256);
var tmp = new Float32Array(1);
for (var i = 0; i < 256; i++) {
tmp[0] = i / 255;
transferFn(tmp, 0, tmp, 0);
transferMap[i] = (tmp[0] * 255) | 0;
}
smaskOptions.transferMap = transferMap;
}
return this.buildFormXObject(resources, smaskContent, smaskOptions,
operatorList, task, stateManager.state.clone());
},
handleTilingType:
function PartialEvaluator_handleTilingType(fn, args, resources,
pattern, patternDict,
operatorList, task) {
// Create an IR of the pattern code.
var tilingOpList = new OperatorList();
// Merge the available resources, to prevent issues when the patternDict
// is missing some /Resources entries (fixes issue6541.pdf).
var resourcesArray = [patternDict.get('Resources'), resources];
var patternResources = Dict.merge(this.xref, resourcesArray);
return this.getOperatorList(pattern, task, patternResources,
tilingOpList).then(function () {
// Add the dependencies to the parent operator list so they are
// resolved before sub operator list is executed synchronously.
operatorList.addDependencies(tilingOpList.dependencies);
operatorList.addOp(fn, getTilingPatternIR({
fnArray: tilingOpList.fnArray,
argsArray: tilingOpList.argsArray
}, patternDict, args));
});
},
handleSetFont:
function PartialEvaluator_handleSetFont(resources, fontArgs, fontRef,
operatorList, task, state) {
// TODO(mack): Not needed?
var fontName;
if (fontArgs) {
fontArgs = fontArgs.slice();
fontName = fontArgs[0].name;
}
var self = this;
return this.loadFont(fontName, fontRef, this.xref, resources).then(
function (translated) {
if (!translated.font.isType3Font) {
return translated;
}
return translated.loadType3Data(self, resources, operatorList, task).
then(function () {
return translated;
}, function (reason) {
// Error in the font data -- sending unsupported feature notification.
self.handler.send('UnsupportedFeature',
{featureId: UNSUPPORTED_FEATURES.font});
return new TranslatedFont('g_font_error',
new ErrorFont('Type3 font load error: ' + reason), translated.font);
});
}).then(function (translated) {
state.font = translated.font;
translated.send(self.handler);
return translated.loadedName;
});
},
handleText: function PartialEvaluator_handleText(chars, state) {
var font = state.font;
var glyphs = font.charsToGlyphs(chars);
var isAddToPathSet = !!(state.textRenderingMode &
TextRenderingMode.ADD_TO_PATH_FLAG);
if (font.data && (isAddToPathSet || PDFJS.disableFontFace)) {
var buildPath = function (fontChar) {
if (!font.renderer.hasBuiltPath(fontChar)) {
var path = font.renderer.getPathJs(fontChar);
this.handler.send('commonobj', [
font.loadedName + '_path_' + fontChar,
'FontPath',
path
]);
}
}.bind(this);
for (var i = 0, ii = glyphs.length; i < ii; i++) {
var glyph = glyphs[i];
buildPath(glyph.fontChar);
// If the glyph has an accent we need to build a path for its
// fontChar too, otherwise CanvasGraphics_paintChar will fail.
var accent = glyph.accent;
if (accent && accent.fontChar) {
buildPath(accent.fontChar);
}
}
}
return glyphs;
},
setGState: function PartialEvaluator_setGState(resources, gState,
operatorList, task,
xref, stateManager) {
// This array holds the converted/processed state data.
var gStateObj = [];
var gStateKeys = gState.getKeys();
var self = this;
var promise = Promise.resolve();
for (var i = 0, ii = gStateKeys.length; i < ii; i++) {
var key = gStateKeys[i];
var value = gState.get(key);
switch (key) {
case 'Type':
break;
case 'LW':
case 'LC':
case 'LJ':
case 'ML':
case 'D':
case 'RI':
case 'FL':
case 'CA':
case 'ca':
gStateObj.push([key, value]);
break;
case 'Font':
promise = promise.then(function () {
return self.handleSetFont(resources, null, value[0], operatorList,
task, stateManager.state).
then(function (loadedName) {
operatorList.addDependency(loadedName);
gStateObj.push([key, [loadedName, value[1]]]);
});
});
break;
case 'BM':
gStateObj.push([key, value]);
break;
case 'SMask':
if (isName(value) && value.name === 'None') {
gStateObj.push([key, false]);
break;
}
if (isDict(value)) {
promise = promise.then(function (dict) {
return self.handleSMask(dict, resources, operatorList,
task, stateManager);
}.bind(this, value));
gStateObj.push([key, true]);
} else {
warn('Unsupported SMask type');
}
break;
// Only generate info log messages for the following since
// they are unlikely to have a big impact on the rendering.
case 'OP':
case 'op':
case 'OPM':
case 'BG':
case 'BG2':
case 'UCR':
case 'UCR2':
case 'TR':
case 'TR2':
case 'HT':
case 'SM':
case 'SA':
case 'AIS':
case 'TK':
// TODO implement these operators.
info('graphic state operator ' + key);
break;
default:
info('Unknown graphic state operator ' + key);
break;
}
}
return promise.then(function () {
if (gStateObj.length > 0) {
operatorList.addOp(OPS.setGState, [gStateObj]);
}
});
},
loadFont: function PartialEvaluator_loadFont(fontName, font, xref,
resources) {
function errorFont() {
return Promise.resolve(new TranslatedFont('g_font_error',
new ErrorFont('Font ' + fontName + ' is not available'), font));
}
var fontRef;
if (font) { // Loading by ref.
assert(isRef(font));
fontRef = font;
} else { // Loading by name.
var fontRes = resources.get('Font');
if (fontRes) {
fontRef = fontRes.getRaw(fontName);
} else {
warn('fontRes not available');
return errorFont();
}
}
if (!fontRef) {
warn('fontRef not available');
return errorFont();
}
if (this.fontCache.has(fontRef)) {
return this.fontCache.get(fontRef);
}
font = xref.fetchIfRef(fontRef);
if (!isDict(font)) {
return errorFont();
}
// We are holding font.translated references just for fontRef that are not
// dictionaries (Dict). See explanation below.
if (font.translated) {
return font.translated;
}
var fontCapability = createPromiseCapability();
var preEvaluatedFont = this.preEvaluateFont(font, xref);
var descriptor = preEvaluatedFont.descriptor;
var fontID = fontRef.num + '_' + fontRef.gen;
if (isDict(descriptor)) {
if (!descriptor.fontAliases) {
descriptor.fontAliases = Object.create(null);
}
var fontAliases = descriptor.fontAliases;
var hash = preEvaluatedFont.hash;
if (fontAliases[hash]) {
var aliasFontRef = fontAliases[hash].aliasRef;
if (aliasFontRef && this.fontCache.has(aliasFontRef)) {
this.fontCache.putAlias(fontRef, aliasFontRef);
return this.fontCache.get(fontRef);
}
}
if (!fontAliases[hash]) {
fontAliases[hash] = {
fontID: Font.getFontID()
};
}
fontAliases[hash].aliasRef = fontRef;
fontID = fontAliases[hash].fontID;
}
// Workaround for bad PDF generators that don't reference fonts
// properly, i.e. by not using an object identifier.
// Check if the fontRef is a Dict (as opposed to a standard object),
// in which case we don't cache the font and instead reference it by
// fontName in font.loadedName below.
var fontRefIsDict = isDict(fontRef);
if (!fontRefIsDict) {
this.fontCache.put(fontRef, fontCapability.promise);
}
// Keep track of each font we translated so the caller can
// load them asynchronously before calling display on a page.
font.loadedName = 'g_' + this.pdfManager.docId + '_f' + (fontRefIsDict ?
fontName.replace(/\W/g, '') : fontID);
font.translated = fontCapability.promise;
// TODO move promises into translate font
var translatedPromise;
try {
translatedPromise = Promise.resolve(
this.translateFont(preEvaluatedFont, xref));
} catch (e) {
translatedPromise = Promise.reject(e);
}
var self = this;
translatedPromise.then(function (translatedFont) {
if (translatedFont.fontType !== undefined) {
var xrefFontStats = xref.stats.fontTypes;
xrefFontStats[translatedFont.fontType] = true;
}
fontCapability.resolve(new TranslatedFont(font.loadedName,
translatedFont, font));
}, function (reason) {
// TODO fontCapability.reject?
// Error in the font data -- sending unsupported feature notification.
self.handler.send('UnsupportedFeature',
{featureId: UNSUPPORTED_FEATURES.font});
try {
// error, but it's still nice to have font type reported
var descriptor = preEvaluatedFont.descriptor;
var fontFile3 = descriptor && descriptor.get('FontFile3');
var subtype = fontFile3 && fontFile3.get('Subtype');
var fontType = getFontType(preEvaluatedFont.type,
subtype && subtype.name);
var xrefFontStats = xref.stats.fontTypes;
xrefFontStats[fontType] = true;
} catch (ex) { }
fontCapability.resolve(new TranslatedFont(font.loadedName,
new ErrorFont(reason instanceof Error ? reason.message : reason),
font));
});
return fontCapability.promise;
},
buildPath: function PartialEvaluator_buildPath(operatorList, fn, args) {
var lastIndex = operatorList.length - 1;
if (!args) {
args = [];
}
if (lastIndex < 0 ||
operatorList.fnArray[lastIndex] !== OPS.constructPath) {
operatorList.addOp(OPS.constructPath, [[fn], args]);
} else {
var opArgs = operatorList.argsArray[lastIndex];
opArgs[0].push(fn);
Array.prototype.push.apply(opArgs[1], args);
}
},
handleColorN: function PartialEvaluator_handleColorN(operatorList, fn, args,
cs, patterns, resources, task, xref) {
// compile tiling patterns
var patternName = args[args.length - 1];
// SCN/scn applies patterns along with normal colors
var pattern;
if (isName(patternName) &&
(pattern = patterns.get(patternName.name))) {
var dict = (isStream(pattern) ? pattern.dict : pattern);
var typeNum = dict.get('PatternType');
if (typeNum === TILING_PATTERN) {
var color = cs.base ? cs.base.getRgb(args, 0) : null;
return this.handleTilingType(fn, color, resources, pattern,
dict, operatorList, task);
} else if (typeNum === SHADING_PATTERN) {
var shading = dict.get('Shading');
var matrix = dict.get('Matrix');
pattern = Pattern.parseShading(shading, matrix, xref, resources,
this.handler);
operatorList.addOp(fn, pattern.getIR());
return Promise.resolve();
} else {
return Promise.reject('Unknown PatternType: ' + typeNum);
}
}
// TODO shall we fail here?
operatorList.addOp(fn, args);
return Promise.resolve();
},
getOperatorList: function PartialEvaluator_getOperatorList(stream,
task,
resources,
operatorList,
initialState) {
var self = this;
var xref = this.xref;
var imageCache = Object.create(null);
assert(operatorList);
resources = (resources || Dict.empty);
var xobjs = (resources.get('XObject') || Dict.empty);
var patterns = (resources.get('Pattern') || Dict.empty);
var stateManager = new StateManager(initialState || new EvalState());
var preprocessor = new EvaluatorPreprocessor(stream, xref, stateManager);
var timeSlotManager = new TimeSlotManager();
return new Promise(function next(resolve, reject) {
task.ensureNotTerminated();
timeSlotManager.reset();
var stop, operation = {}, i, ii, cs;
while (!(stop = timeSlotManager.check())) {
// The arguments parsed by read() are used beyond this loop, so we
// cannot reuse the same array on each iteration. Therefore we pass
// in |null| as the initial value (see the comment on
// EvaluatorPreprocessor_read() for why).
operation.args = null;
if (!(preprocessor.read(operation))) {
break;
}
var args = operation.args;
var fn = operation.fn;
switch (fn | 0) {
case OPS.paintXObject:
if (args[0].code) {
break;
}
// eagerly compile XForm objects
var name = args[0].name;
if (!name) {
warn('XObject must be referred to by name.');
continue;
}
if (imageCache[name] !== undefined) {
operatorList.addOp(imageCache[name].fn, imageCache[name].args);
args = null;
continue;
}
var xobj = xobjs.get(name);
if (xobj) {
assert(isStream(xobj), 'XObject should be a stream');
var type = xobj.dict.get('Subtype');
assert(isName(type),
'XObject should have a Name subtype');
if (type.name === 'Form') {
stateManager.save();
return self.buildFormXObject(resources, xobj, null,
operatorList, task,
stateManager.state.clone()).
then(function () {
stateManager.restore();
next(resolve, reject);
}, reject);
} else if (type.name === 'Image') {
self.buildPaintImageXObject(resources, xobj, false,
operatorList, name, imageCache);
args = null;
continue;
} else if (type.name === 'PS') {
// PostScript XObjects are unused when viewing documents.
// See section 4.7.1 of Adobe's PDF reference.
info('Ignored XObject subtype PS');
continue;
} else {
error('Unhandled XObject subtype ' + type.name);
}
}
break;
case OPS.setFont:
var fontSize = args[1];
// eagerly collect all fonts
return self.handleSetFont(resources, args, null, operatorList,
task, stateManager.state).
then(function (loadedName) {
operatorList.addDependency(loadedName);
operatorList.addOp(OPS.setFont, [loadedName, fontSize]);
next(resolve, reject);
}, reject);
case OPS.endInlineImage:
var cacheKey = args[0].cacheKey;
if (cacheKey) {
var cacheEntry = imageCache[cacheKey];
if (cacheEntry !== undefined) {
operatorList.addOp(cacheEntry.fn, cacheEntry.args);
args = null;
continue;
}
}
self.buildPaintImageXObject(resources, args[0], true,
operatorList, cacheKey, imageCache);
args = null;
continue;
case OPS.showText:
args[0] = self.handleText(args[0], stateManager.state);
break;
case OPS.showSpacedText:
var arr = args[0];
var combinedGlyphs = [];
var arrLength = arr.length;
var state = stateManager.state;
for (i = 0; i < arrLength; ++i) {
var arrItem = arr[i];
if (isString(arrItem)) {
Array.prototype.push.apply(combinedGlyphs,
self.handleText(arrItem, state));
} else if (isNum(arrItem)) {
combinedGlyphs.push(arrItem);
}
}
args[0] = combinedGlyphs;
fn = OPS.showText;
break;
case OPS.nextLineShowText:
operatorList.addOp(OPS.nextLine);
args[0] = self.handleText(args[0], stateManager.state);
fn = OPS.showText;
break;
case OPS.nextLineSetSpacingShowText:
operatorList.addOp(OPS.nextLine);
operatorList.addOp(OPS.setWordSpacing, [args.shift()]);
operatorList.addOp(OPS.setCharSpacing, [args.shift()]);
args[0] = self.handleText(args[0], stateManager.state);
fn = OPS.showText;
break;
case OPS.setTextRenderingMode:
stateManager.state.textRenderingMode = args[0];
break;
case OPS.setFillColorSpace:
stateManager.state.fillColorSpace =
ColorSpace.parse(args[0], xref, resources);
continue;
case OPS.setStrokeColorSpace:
stateManager.state.strokeColorSpace =
ColorSpace.parse(args[0], xref, resources);
continue;
case OPS.setFillColor:
cs = stateManager.state.fillColorSpace;
args = cs.getRgb(args, 0);
fn = OPS.setFillRGBColor;
break;
case OPS.setStrokeColor:
cs = stateManager.state.strokeColorSpace;
args = cs.getRgb(args, 0);
fn = OPS.setStrokeRGBColor;
break;
case OPS.setFillGray:
stateManager.state.fillColorSpace = ColorSpace.singletons.gray;
args = ColorSpace.singletons.gray.getRgb(args, 0);
fn = OPS.setFillRGBColor;
break;
case OPS.setStrokeGray:
stateManager.state.strokeColorSpace = ColorSpace.singletons.gray;
args = ColorSpace.singletons.gray.getRgb(args, 0);
fn = OPS.setStrokeRGBColor;
break;
case OPS.setFillCMYKColor:
stateManager.state.fillColorSpace = ColorSpace.singletons.cmyk;
args = ColorSpace.singletons.cmyk.getRgb(args, 0);
fn = OPS.setFillRGBColor;
break;
case OPS.setStrokeCMYKColor:
stateManager.state.strokeColorSpace = ColorSpace.singletons.cmyk;
args = ColorSpace.singletons.cmyk.getRgb(args, 0);
fn = OPS.setStrokeRGBColor;
break;
case OPS.setFillRGBColor:
stateManager.state.fillColorSpace = ColorSpace.singletons.rgb;
args = ColorSpace.singletons.rgb.getRgb(args, 0);
break;
case OPS.setStrokeRGBColor:
stateManager.state.strokeColorSpace = ColorSpace.singletons.rgb;
args = ColorSpace.singletons.rgb.getRgb(args, 0);
break;
case OPS.setFillColorN:
cs = stateManager.state.fillColorSpace;
if (cs.name === 'Pattern') {
return self.handleColorN(operatorList, OPS.setFillColorN,
args, cs, patterns, resources, task, xref).then(function() {
next(resolve, reject);
}, reject);
}
args = cs.getRgb(args, 0);
fn = OPS.setFillRGBColor;
break;
case OPS.setStrokeColorN:
cs = stateManager.state.strokeColorSpace;
if (cs.name === 'Pattern') {
return self.handleColorN(operatorList, OPS.setStrokeColorN,
args, cs, patterns, resources, task, xref).then(function() {
next(resolve, reject);
}, reject);
}
args = cs.getRgb(args, 0);
fn = OPS.setStrokeRGBColor;
break;
case OPS.shadingFill:
var shadingRes = resources.get('Shading');
if (!shadingRes) {
error('No shading resource found');
}
var shading = shadingRes.get(args[0].name);
if (!shading) {
error('No shading object found');
}
var shadingFill = Pattern.parseShading(shading, null, xref,
resources, self.handler);
var patternIR = shadingFill.getIR();
args = [patternIR];
fn = OPS.shadingFill;
break;
case OPS.setGState:
var dictName = args[0];
var extGState = resources.get('ExtGState');
if (!isDict(extGState) || !extGState.has(dictName.name)) {
break;
}
var gState = extGState.get(dictName.name);
return self.setGState(resources, gState, operatorList, task,
xref, stateManager).then(function() {
next(resolve, reject);
}, reject);
case OPS.moveTo:
case OPS.lineTo:
case OPS.curveTo:
case OPS.curveTo2:
case OPS.curveTo3:
case OPS.closePath:
self.buildPath(operatorList, fn, args);
continue;
case OPS.rectangle:
self.buildPath(operatorList, fn, args);
continue;
case OPS.markPoint:
case OPS.markPointProps:
case OPS.beginMarkedContent:
case OPS.beginMarkedContentProps:
case OPS.endMarkedContent:
case OPS.beginCompat:
case OPS.endCompat:
// Ignore operators where the corresponding handlers are known to
// be no-op in CanvasGraphics (display/canvas.js). This prevents
// serialization errors and is also a bit more efficient.
// We could also try to serialize all objects in a general way,
// e.g. as done in https://github.com/mozilla/pdf.js/pull/6266,
// but doing so is meaningless without knowing the semantics.
continue;
default:
// Note: Let's hope that the ignored operator does not have any
// non-serializable arguments, otherwise postMessage will throw
// "An object could not be cloned.".
}
operatorList.addOp(fn, args);
}
if (stop) {
deferred.then(function () {
next(resolve, reject);
}, reject);
return;
}
// Some PDFs don't close all restores inside object/form.
// Closing those for them.
for (i = 0, ii = preprocessor.savedStatesDepth; i < ii; i++) {
operatorList.addOp(OPS.restore, []);
}
resolve();
});
},
getTextContent:
function PartialEvaluator_getTextContent(stream, task, resources,
stateManager,
normalizeWhitespace) {
stateManager = (stateManager || new StateManager(new TextState()));
var WhitespaceRegexp = /\s/g;
var textContent = {
items: [],
styles: Object.create(null)
};
var textContentItem = {
initialized: false,
str: [],
width: 0,
height: 0,
vertical: false,
lastAdvanceWidth: 0,
lastAdvanceHeight: 0,
textAdvanceScale: 0,
spaceWidth: 0,
fakeSpaceMin: Infinity,
fakeMultiSpaceMin: Infinity,
fakeMultiSpaceMax: -0,
textRunBreakAllowed: false,
transform: null,
fontName: null
};
var SPACE_FACTOR = 0.3;
var MULTI_SPACE_FACTOR = 1.5;
var MULTI_SPACE_FACTOR_MAX = 4;
var self = this;
var xref = this.xref;
resources = (xref.fetchIfRef(resources) || Dict.empty);
// The xobj is parsed iff it's needed, e.g. if there is a `DO` cmd.
var xobjs = null;
var xobjsCache = Object.create(null);
var preprocessor = new EvaluatorPreprocessor(stream, xref, stateManager);
var textState;
function ensureTextContentItem() {
if (textContentItem.initialized) {
return textContentItem;
}
var font = textState.font;
if (!(font.loadedName in textContent.styles)) {
textContent.styles[font.loadedName] = {
fontFamily: font.fallbackName,
ascent: font.ascent,
descent: font.descent,
vertical: font.vertical
};
}
textContentItem.fontName = font.loadedName;
// 9.4.4 Text Space Details
var tsm = [textState.fontSize * textState.textHScale, 0,
0, textState.fontSize,
0, textState.textRise];
if (font.isType3Font &&
textState.fontMatrix !== FONT_IDENTITY_MATRIX &&
textState.fontSize === 1) {
var glyphHeight = font.bbox[3] - font.bbox[1];
if (glyphHeight > 0) {
glyphHeight = glyphHeight * textState.fontMatrix[3];
tsm[3] *= glyphHeight;
}
}
var trm = Util.transform(textState.ctm,
Util.transform(textState.textMatrix, tsm));
textContentItem.transform = trm;
if (!font.vertical) {
textContentItem.width = 0;
textContentItem.height = Math.sqrt(trm[2] * trm[2] + trm[3] * trm[3]);
textContentItem.vertical = false;
} else {
textContentItem.width = Math.sqrt(trm[0] * trm[0] + trm[1] * trm[1]);
textContentItem.height = 0;
textContentItem.vertical = true;
}
var a = textState.textLineMatrix[0];
var b = textState.textLineMatrix[1];
var scaleLineX = Math.sqrt(a * a + b * b);
a = textState.ctm[0];
b = textState.ctm[1];
var scaleCtmX = Math.sqrt(a * a + b * b);
textContentItem.textAdvanceScale = scaleCtmX * scaleLineX;
textContentItem.lastAdvanceWidth = 0;
textContentItem.lastAdvanceHeight = 0;
var spaceWidth = font.spaceWidth / 1000 * textState.fontSize;
if (spaceWidth) {
textContentItem.spaceWidth = spaceWidth;
textContentItem.fakeSpaceMin = spaceWidth * SPACE_FACTOR;
textContentItem.fakeMultiSpaceMin = spaceWidth * MULTI_SPACE_FACTOR;
textContentItem.fakeMultiSpaceMax =
spaceWidth * MULTI_SPACE_FACTOR_MAX;
// It's okay for monospace fonts to fake as much space as needed.
textContentItem.textRunBreakAllowed = !font.isMonospace;
} else {
textContentItem.spaceWidth = 0;
textContentItem.fakeSpaceMin = Infinity;
textContentItem.fakeMultiSpaceMin = Infinity;
textContentItem.fakeMultiSpaceMax = 0;
textContentItem.textRunBreakAllowed = false;
}
textContentItem.initialized = true;
return textContentItem;
}
function replaceWhitespace(str) {
// Replaces all whitespaces with standard spaces (0x20), to avoid
// alignment issues between the textLayer and the canvas if the text
// contains e.g. tabs (fixes issue6612.pdf).
var i = 0, ii = str.length, code;
while (i < ii && (code = str.charCodeAt(i)) >= 0x20 && code <= 0x7F) {
i++;
}
return (i < ii ? str.replace(WhitespaceRegexp, ' ') : str);
}
function runBidiTransform(textChunk) {
var str = textChunk.str.join('');
var bidiResult = PDFJS.bidi(str, -1, textChunk.vertical);
return {
str: (normalizeWhitespace ? replaceWhitespace(bidiResult.str) :
bidiResult.str),
dir: bidiResult.dir,
width: textChunk.width,
height: textChunk.height,
transform: textChunk.transform,
fontName: textChunk.fontName
};
}
function handleSetFont(fontName, fontRef) {
return self.loadFont(fontName, fontRef, xref, resources).
then(function (translated) {
textState.font = translated.font;
textState.fontMatrix = translated.font.fontMatrix ||
FONT_IDENTITY_MATRIX;
});
}
function buildTextContentItem(chars) {
var font = textState.font;
var textChunk = ensureTextContentItem();
var width = 0;
var height = 0;
var glyphs = font.charsToGlyphs(chars);
var defaultVMetrics = font.defaultVMetrics;
for (var i = 0; i < glyphs.length; i++) {
var glyph = glyphs[i];
var vMetricX = null;
var vMetricY = null;
var glyphWidth = null;
if (font.vertical) {
if (glyph.vmetric) {
glyphWidth = glyph.vmetric[0];
vMetricX = glyph.vmetric[1];
vMetricY = glyph.vmetric[2];
} else {
glyphWidth = glyph.width;
vMetricX = glyph.width * 0.5;
vMetricY = defaultVMetrics[2];
}
} else {
glyphWidth = glyph.width;
}
var glyphUnicode = glyph.unicode;
var NormalizedUnicodes = getNormalizedUnicodes();
if (NormalizedUnicodes[glyphUnicode] !== undefined) {
glyphUnicode = NormalizedUnicodes[glyphUnicode];
}
glyphUnicode = reverseIfRtl(glyphUnicode);
// The following will calculate the x and y of the individual glyphs.
// if (font.vertical) {
// tsm[4] -= vMetricX * Math.abs(textState.fontSize) *
// textState.fontMatrix[0];
// tsm[5] -= vMetricY * textState.fontSize *
// textState.fontMatrix[0];
// }
// var trm = Util.transform(textState.textMatrix, tsm);
// var pt = Util.applyTransform([trm[4], trm[5]], textState.ctm);
// var x = pt[0];
// var y = pt[1];
var charSpacing = textState.charSpacing;
if (glyph.isSpace) {
var wordSpacing = textState.wordSpacing;
charSpacing += wordSpacing;
if (wordSpacing > 0) {
addFakeSpaces(wordSpacing, textChunk.str);
}
}
var tx = 0;
var ty = 0;
if (!font.vertical) {
var w0 = glyphWidth * textState.fontMatrix[0];
tx = (w0 * textState.fontSize + charSpacing) *
textState.textHScale;
width += tx;
} else {
var w1 = glyphWidth * textState.fontMatrix[0];
ty = w1 * textState.fontSize + charSpacing;
height += ty;
}
textState.translateTextMatrix(tx, ty);
textChunk.str.push(glyphUnicode);
}
if (!font.vertical) {
textChunk.lastAdvanceWidth = width;
textChunk.width += width * textChunk.textAdvanceScale;
} else {
textChunk.lastAdvanceHeight = height;
textChunk.height += Math.abs(height * textChunk.textAdvanceScale);
}
return textChunk;
}
function addFakeSpaces(width, strBuf) {
if (width < textContentItem.fakeSpaceMin) {
return;
}
if (width < textContentItem.fakeMultiSpaceMin) {
strBuf.push(' ');
return;
}
var fakeSpaces = Math.round(width / textContentItem.spaceWidth);
while (fakeSpaces-- > 0) {
strBuf.push(' ');
}
}
function flushTextContentItem() {
if (!textContentItem.initialized) {
return;
}
textContent.items.push(runBidiTransform(textContentItem));
textContentItem.initialized = false;
textContentItem.str.length = 0;
}
var timeSlotManager = new TimeSlotManager();
return new Promise(function next(resolve, reject) {
task.ensureNotTerminated();
timeSlotManager.reset();
var stop, operation = {}, args = [];
while (!(stop = timeSlotManager.check())) {
// The arguments parsed by read() are not used beyond this loop, so
// we can reuse the same array on every iteration, thus avoiding
// unnecessary allocations.
args.length = 0;
operation.args = args;
if (!(preprocessor.read(operation))) {
break;
}
textState = stateManager.state;
var fn = operation.fn;
args = operation.args;
var advance;
switch (fn | 0) {
case OPS.setFont:
flushTextContentItem();
textState.fontSize = args[1];
return handleSetFont(args[0].name).then(function() {
next(resolve, reject);
}, reject);
case OPS.setTextRise:
flushTextContentItem();
textState.textRise = args[0];
break;
case OPS.setHScale:
flushTextContentItem();
textState.textHScale = args[0] / 100;
break;
case OPS.setLeading:
flushTextContentItem();
textState.leading = args[0];
break;
case OPS.moveText:
// Optimization to treat same line movement as advance
var isSameTextLine = !textState.font ? false :
((textState.font.vertical ? args[0] : args[1]) === 0);
advance = args[0] - args[1];
if (isSameTextLine && textContentItem.initialized &&
advance > 0 &&
advance <= textContentItem.fakeMultiSpaceMax) {
textState.translateTextLineMatrix(args[0], args[1]);
textContentItem.width +=
(args[0] - textContentItem.lastAdvanceWidth);
textContentItem.height +=
(args[1] - textContentItem.lastAdvanceHeight);
var diff = (args[0] - textContentItem.lastAdvanceWidth) -
(args[1] - textContentItem.lastAdvanceHeight);
addFakeSpaces(diff, textContentItem.str);
break;
}
flushTextContentItem();
textState.translateTextLineMatrix(args[0], args[1]);
textState.textMatrix = textState.textLineMatrix.slice();
break;
case OPS.setLeadingMoveText:
flushTextContentItem();
textState.leading = -args[1];
textState.translateTextLineMatrix(args[0], args[1]);
textState.textMatrix = textState.textLineMatrix.slice();
break;
case OPS.nextLine:
flushTextContentItem();
textState.carriageReturn();
break;
case OPS.setTextMatrix:
flushTextContentItem();
textState.setTextMatrix(args[0], args[1], args[2], args[3],
args[4], args[5]);
textState.setTextLineMatrix(args[0], args[1], args[2], args[3],
args[4], args[5]);
break;
case OPS.setCharSpacing:
textState.charSpacing = args[0];
break;
case OPS.setWordSpacing:
textState.wordSpacing = args[0];
break;
case OPS.beginText:
flushTextContentItem();
textState.textMatrix = IDENTITY_MATRIX.slice();
textState.textLineMatrix = IDENTITY_MATRIX.slice();
break;
case OPS.showSpacedText:
var items = args[0];
var offset;
for (var j = 0, jj = items.length; j < jj; j++) {
if (typeof items[j] === 'string') {
buildTextContentItem(items[j]);
} else {
ensureTextContentItem();
// PDF Specification 5.3.2 states:
// The number is expressed in thousandths of a unit of text
// space.
// This amount is subtracted from the current horizontal or
// vertical coordinate, depending on the writing mode.
// In the default coordinate system, a positive adjustment
// has the effect of moving the next glyph painted either to
// the left or down by the given amount.
advance = items[j] * textState.fontSize / 1000;
var breakTextRun = false;
if (textState.font.vertical) {
offset = advance *
(textState.textHScale * textState.textMatrix[2] +
textState.textMatrix[3]);
textState.translateTextMatrix(0, advance);
breakTextRun = textContentItem.textRunBreakAllowed &&
advance > textContentItem.fakeMultiSpaceMax;
if (!breakTextRun) {
// Value needs to be added to height to paint down.
textContentItem.height += offset;
}
} else {
advance = -advance;
offset = advance * (
textState.textHScale * textState.textMatrix[0] +
textState.textMatrix[1]);
textState.translateTextMatrix(advance, 0);
breakTextRun = textContentItem.textRunBreakAllowed &&
advance > textContentItem.fakeMultiSpaceMax;
if (!breakTextRun) {
// Value needs to be subtracted from width to paint left.
textContentItem.width += offset;
}
}
if (breakTextRun) {
flushTextContentItem();
} else if (advance > 0) {
addFakeSpaces(advance, textContentItem.str);
}
}
}
break;
case OPS.showText:
buildTextContentItem(args[0]);
break;
case OPS.nextLineShowText:
flushTextContentItem();
textState.carriageReturn();
buildTextContentItem(args[0]);
break;
case OPS.nextLineSetSpacingShowText:
flushTextContentItem();
textState.wordSpacing = args[0];
textState.charSpacing = args[1];
textState.carriageReturn();
buildTextContentItem(args[2]);
break;
case OPS.paintXObject:
flushTextContentItem();
if (args[0].code) {
break;
}
if (!xobjs) {
xobjs = (resources.get('XObject') || Dict.empty);
}
var name = args[0].name;
if (xobjsCache.key === name) {
if (xobjsCache.texts) {
Util.appendToArray(textContent.items, xobjsCache.texts.items);
Util.extendObj(textContent.styles, xobjsCache.texts.styles);
}
break;
}
var xobj = xobjs.get(name);
if (!xobj) {
break;
}
assert(isStream(xobj), 'XObject should be a stream');
var type = xobj.dict.get('Subtype');
assert(isName(type),
'XObject should have a Name subtype');
if ('Form' !== type.name) {
xobjsCache.key = name;
xobjsCache.texts = null;
break;
}
stateManager.save();
var matrix = xobj.dict.get('Matrix');
if (isArray(matrix) && matrix.length === 6) {
stateManager.transform(matrix);
}
return self.getTextContent(xobj, task,
xobj.dict.get('Resources') || resources, stateManager,
normalizeWhitespace).then(function (formTextContent) {
Util.appendToArray(textContent.items, formTextContent.items);
Util.extendObj(textContent.styles, formTextContent.styles);
stateManager.restore();
xobjsCache.key = name;
xobjsCache.texts = formTextContent;
next(resolve, reject);
}, reject);
case OPS.setGState:
flushTextContentItem();
var dictName = args[0];
var extGState = resources.get('ExtGState');
if (!isDict(extGState) || !extGState.has(dictName.name)) {
break;
}
var gsStateMap = extGState.get(dictName.name);
var gsStateFont = null;
for (var key in gsStateMap) {
if (key === 'Font') {
assert(!gsStateFont);
gsStateFont = gsStateMap[key];
}
}
if (gsStateFont) {
textState.fontSize = gsStateFont[1];
return handleSetFont(gsStateFont[0]).then(function() {
next(resolve, reject);
}, reject);
}
break;
} // switch
} // while
if (stop) {
deferred.then(function () {
next(resolve, reject);
}, reject);
return;
}
flushTextContentItem();
resolve(textContent);
});
},
extractDataStructures: function
partialEvaluatorExtractDataStructures(dict, baseDict,
xref, properties) {
// 9.10.2
var toUnicode = (dict.get('ToUnicode') || baseDict.get('ToUnicode'));
if (toUnicode) {
properties.toUnicode = this.readToUnicode(toUnicode);
}
if (properties.composite) {
// CIDSystemInfo helps to match CID to glyphs
var cidSystemInfo = dict.get('CIDSystemInfo');
if (isDict(cidSystemInfo)) {
properties.cidSystemInfo = {
registry: cidSystemInfo.get('Registry'),
ordering: cidSystemInfo.get('Ordering'),
supplement: cidSystemInfo.get('Supplement')
};
}
var cidToGidMap = dict.get('CIDToGIDMap');
if (isStream(cidToGidMap)) {
properties.cidToGidMap = this.readCidToGidMap(cidToGidMap);
}
}
// Based on 9.6.6 of the spec the encoding can come from multiple places
// and depends on the font type. The base encoding and differences are
// read here, but the encoding that is actually used is chosen during
// glyph mapping in the font.
// TODO: Loading the built in encoding in the font would allow the
// differences to be merged in here not require us to hold on to it.
var differences = [];
var baseEncodingName = null;
var encoding;
if (dict.has('Encoding')) {
encoding = dict.get('Encoding');
if (isDict(encoding)) {
baseEncodingName = encoding.get('BaseEncoding');
baseEncodingName = (isName(baseEncodingName) ?
baseEncodingName.name : null);
// Load the differences between the base and original
if (encoding.has('Differences')) {
var diffEncoding = encoding.get('Differences');
var index = 0;
for (var j = 0, jj = diffEncoding.length; j < jj; j++) {
var data = xref.fetchIfRef(diffEncoding[j]);
if (isNum(data)) {
index = data;
} else if (isName(data)) {
differences[index++] = data.name;
} else {
error('Invalid entry in \'Differences\' array: ' + data);
}
}
}
} else if (isName(encoding)) {
baseEncodingName = encoding.name;
} else {
error('Encoding is not a Name nor a Dict');
}
// According to table 114 if the encoding is a named encoding it must be
// one of these predefined encodings.
if ((baseEncodingName !== 'MacRomanEncoding' &&
baseEncodingName !== 'MacExpertEncoding' &&
baseEncodingName !== 'WinAnsiEncoding')) {
baseEncodingName = null;
}
}
if (baseEncodingName) {
properties.defaultEncoding = getEncoding(baseEncodingName).slice();
} else {
encoding = (properties.type === 'TrueType' ?
WinAnsiEncoding : StandardEncoding);
// The Symbolic attribute can be misused for regular fonts
// Heuristic: we have to check if the font is a standard one also
if (!!(properties.flags & FontFlags.Symbolic)) {
encoding = MacRomanEncoding;
if (!properties.file) {
if (/Symbol/i.test(properties.name)) {
encoding = SymbolSetEncoding;
} else if (/Dingbats/i.test(properties.name)) {
encoding = ZapfDingbatsEncoding;
}
}
}
properties.defaultEncoding = encoding;
}
properties.differences = differences;
properties.baseEncodingName = baseEncodingName;
properties.dict = dict;
},
readToUnicode: function PartialEvaluator_readToUnicode(toUnicode) {
var cmap, cmapObj = toUnicode;
if (isName(cmapObj)) {
cmap = CMapFactory.create(cmapObj,
{ url: PDFJS.cMapUrl, packed: PDFJS.cMapPacked }, null);
if (cmap instanceof IdentityCMap) {
return new IdentityToUnicodeMap(0, 0xFFFF);
}
return new ToUnicodeMap(cmap.getMap());
} else if (isStream(cmapObj)) {
cmap = CMapFactory.create(cmapObj,
{ url: PDFJS.cMapUrl, packed: PDFJS.cMapPacked }, null);
if (cmap instanceof IdentityCMap) {
return new IdentityToUnicodeMap(0, 0xFFFF);
}
var map = new Array(cmap.length);
// Convert UTF-16BE
// NOTE: cmap can be a sparse array, so use forEach instead of for(;;)
// to iterate over all keys.
cmap.forEach(function(charCode, token) {
var str = [];
for (var k = 0; k < token.length; k += 2) {
var w1 = (token.charCodeAt(k) << 8) | token.charCodeAt(k + 1);
if ((w1 & 0xF800) !== 0xD800) { // w1 < 0xD800 || w1 > 0xDFFF
str.push(w1);
continue;
}
k += 2;
var w2 = (token.charCodeAt(k) << 8) | token.charCodeAt(k + 1);
str.push(((w1 & 0x3ff) << 10) + (w2 & 0x3ff) + 0x10000);
}
map[charCode] = String.fromCharCode.apply(String, str);
});
return new ToUnicodeMap(map);
}
return null;
},
readCidToGidMap: function PartialEvaluator_readCidToGidMap(cidToGidStream) {
// Extract the encoding from the CIDToGIDMap
var glyphsData = cidToGidStream.getBytes();
// Set encoding 0 to later verify the font has an encoding
var result = [];
for (var j = 0, jj = glyphsData.length; j < jj; j++) {
var glyphID = (glyphsData[j++] << 8) | glyphsData[j];
if (glyphID === 0) {
continue;
}
var code = j >> 1;
result[code] = glyphID;
}
return result;
},
extractWidths: function PartialEvaluator_extractWidths(dict, xref,
descriptor,
properties) {
var glyphsWidths = [];
var defaultWidth = 0;
var glyphsVMetrics = [];
var defaultVMetrics;
var i, ii, j, jj, start, code, widths;
if (properties.composite) {
defaultWidth = dict.get('DW') || 1000;
widths = dict.get('W');
if (widths) {
for (i = 0, ii = widths.length; i < ii; i++) {
start = widths[i++];
code = xref.fetchIfRef(widths[i]);
if (isArray(code)) {
for (j = 0, jj = code.length; j < jj; j++) {
glyphsWidths[start++] = code[j];
}
} else {
var width = widths[++i];
for (j = start; j <= code; j++) {
glyphsWidths[j] = width;
}
}
}
}
if (properties.vertical) {
var vmetrics = (dict.get('DW2') || [880, -1000]);
defaultVMetrics = [vmetrics[1], defaultWidth * 0.5, vmetrics[0]];
vmetrics = dict.get('W2');
if (vmetrics) {
for (i = 0, ii = vmetrics.length; i < ii; i++) {
start = vmetrics[i++];
code = xref.fetchIfRef(vmetrics[i]);
if (isArray(code)) {
for (j = 0, jj = code.length; j < jj; j++) {
glyphsVMetrics[start++] = [code[j++], code[j++], code[j]];
}
} else {
var vmetric = [vmetrics[++i], vmetrics[++i], vmetrics[++i]];
for (j = start; j <= code; j++) {
glyphsVMetrics[j] = vmetric;
}
}
}
}
}
} else {
var firstChar = properties.firstChar;
widths = dict.get('Widths');
if (widths) {
j = firstChar;
for (i = 0, ii = widths.length; i < ii; i++) {
glyphsWidths[j++] = widths[i];
}
defaultWidth = (parseFloat(descriptor.get('MissingWidth')) || 0);
} else {
// Trying get the BaseFont metrics (see comment above).
var baseFontName = dict.get('BaseFont');
if (isName(baseFontName)) {
var metrics = this.getBaseFontMetrics(baseFontName.name);
glyphsWidths = this.buildCharCodeToWidth(metrics.widths,
properties);
defaultWidth = metrics.defaultWidth;
}
}
}
// Heuristic: detection of monospace font by checking all non-zero widths
var isMonospace = true;
var firstWidth = defaultWidth;
for (var glyph in glyphsWidths) {
var glyphWidth = glyphsWidths[glyph];
if (!glyphWidth) {
continue;
}
if (!firstWidth) {
firstWidth = glyphWidth;
continue;
}
if (firstWidth !== glyphWidth) {
isMonospace = false;
break;
}
}
if (isMonospace) {
properties.flags |= FontFlags.FixedPitch;
}
properties.defaultWidth = defaultWidth;
properties.widths = glyphsWidths;
properties.defaultVMetrics = defaultVMetrics;
properties.vmetrics = glyphsVMetrics;
},
isSerifFont: function PartialEvaluator_isSerifFont(baseFontName) {
// Simulating descriptor flags attribute
var fontNameWoStyle = baseFontName.split('-')[0];
return (fontNameWoStyle in getSerifFonts()) ||
(fontNameWoStyle.search(/serif/gi) !== -1);
},
getBaseFontMetrics: function PartialEvaluator_getBaseFontMetrics(name) {
var defaultWidth = 0;
var widths = [];
var monospace = false;
var stdFontMap = getStdFontMap();
var lookupName = (stdFontMap[name] || name);
var Metrics = getMetrics();
if (!(lookupName in Metrics)) {
// Use default fonts for looking up font metrics if the passed
// font is not a base font
if (this.isSerifFont(name)) {
lookupName = 'Times-Roman';
} else {
lookupName = 'Helvetica';
}
}
var glyphWidths = Metrics[lookupName];
if (isNum(glyphWidths)) {
defaultWidth = glyphWidths;
monospace = true;
} else {
widths = glyphWidths(); // expand lazy widths array
}
return {
defaultWidth: defaultWidth,
monospace: monospace,
widths: widths
};
},
buildCharCodeToWidth:
function PartialEvaluator_bulildCharCodeToWidth(widthsByGlyphName,
properties) {
var widths = Object.create(null);
var differences = properties.differences;
var encoding = properties.defaultEncoding;
for (var charCode = 0; charCode < 256; charCode++) {
if (charCode in differences &&
widthsByGlyphName[differences[charCode]]) {
widths[charCode] = widthsByGlyphName[differences[charCode]];
continue;
}
if (charCode in encoding && widthsByGlyphName[encoding[charCode]]) {
widths[charCode] = widthsByGlyphName[encoding[charCode]];
continue;
}
}
return widths;
},
preEvaluateFont: function PartialEvaluator_preEvaluateFont(dict, xref) {
var baseDict = dict;
var type = dict.get('Subtype');
assert(isName(type), 'invalid font Subtype');
var composite = false;
var uint8array;
if (type.name === 'Type0') {
// If font is a composite
// - get the descendant font
// - set the type according to the descendant font
// - get the FontDescriptor from the descendant font
var df = dict.get('DescendantFonts');
if (!df) {
error('Descendant fonts are not specified');
}
dict = (isArray(df) ? xref.fetchIfRef(df[0]) : df);
type = dict.get('Subtype');
assert(isName(type), 'invalid font Subtype');
composite = true;
}
var descriptor = dict.get('FontDescriptor');
if (descriptor) {
var hash = new MurmurHash3_64();
var encoding = baseDict.getRaw('Encoding');
if (isName(encoding)) {
hash.update(encoding.name);
} else if (isRef(encoding)) {
hash.update(encoding.num + '_' + encoding.gen);
} else if (isDict(encoding)) {
var keys = encoding.getKeys();
for (var i = 0, ii = keys.length; i < ii; i++) {
var entry = encoding.getRaw(keys[i]);
if (isName(entry)) {
hash.update(entry.name);
} else if (isRef(entry)) {
hash.update(entry.num + '_' + entry.gen);
} else if (isArray(entry)) { // 'Differences' entry.
// Ideally we should check the contents of the array, but to avoid
// parsing it here and then again in |extractDataStructures|,
// we only use the array length for now (fixes bug1157493.pdf).
hash.update(entry.length.toString());
}
}
}
var toUnicode = dict.get('ToUnicode') || baseDict.get('ToUnicode');
if (isStream(toUnicode)) {
var stream = toUnicode.str || toUnicode;
uint8array = stream.buffer ?
new Uint8Array(stream.buffer.buffer, 0, stream.bufferLength) :
new Uint8Array(stream.bytes.buffer,
stream.start, stream.end - stream.start);
hash.update(uint8array);
} else if (isName(toUnicode)) {
hash.update(toUnicode.name);
}
var widths = dict.get('Widths') || baseDict.get('Widths');
if (widths) {
uint8array = new Uint8Array(new Uint32Array(widths).buffer);
hash.update(uint8array);
}
}
return {
descriptor: descriptor,
dict: dict,
baseDict: baseDict,
composite: composite,
type: type.name,
hash: hash ? hash.hexdigest() : ''
};
},
translateFont: function PartialEvaluator_translateFont(preEvaluatedFont,
xref) {
var baseDict = preEvaluatedFont.baseDict;
var dict = preEvaluatedFont.dict;
var composite = preEvaluatedFont.composite;
var descriptor = preEvaluatedFont.descriptor;
var type = preEvaluatedFont.type;
var maxCharIndex = (composite ? 0xFFFF : 0xFF);
var properties;
if (!descriptor) {
if (type === 'Type3') {
// FontDescriptor is only required for Type3 fonts when the document
// is a tagged pdf. Create a barbebones one to get by.
descriptor = new Dict(null);
descriptor.set('FontName', Name.get(type));
descriptor.set('FontBBox', dict.get('FontBBox'));
} else {
// Before PDF 1.5 if the font was one of the base 14 fonts, having a
// FontDescriptor was not required.
// This case is here for compatibility.
var baseFontName = dict.get('BaseFont');
if (!isName(baseFontName)) {
error('Base font is not specified');
}
// Using base font name as a font name.
baseFontName = baseFontName.name.replace(/[,_]/g, '-');
var metrics = this.getBaseFontMetrics(baseFontName);
// Simulating descriptor flags attribute
var fontNameWoStyle = baseFontName.split('-')[0];
var flags =
(this.isSerifFont(fontNameWoStyle) ? FontFlags.Serif : 0) |
(metrics.monospace ? FontFlags.FixedPitch : 0) |
(getSymbolsFonts()[fontNameWoStyle] ? FontFlags.Symbolic :
FontFlags.Nonsymbolic);
properties = {
type: type,
name: baseFontName,
widths: metrics.widths,
defaultWidth: metrics.defaultWidth,
flags: flags,
firstChar: 0,
lastChar: maxCharIndex
};
this.extractDataStructures(dict, dict, xref, properties);
properties.widths = this.buildCharCodeToWidth(metrics.widths,
properties);
return new Font(baseFontName, null, properties);
}
}
// According to the spec if 'FontDescriptor' is declared, 'FirstChar',
// 'LastChar' and 'Widths' should exist too, but some PDF encoders seem
// to ignore this rule when a variant of a standart font is used.
// TODO Fill the width array depending on which of the base font this is
// a variant.
var firstChar = (dict.get('FirstChar') || 0);
var lastChar = (dict.get('LastChar') || maxCharIndex);
var fontName = descriptor.get('FontName');
var baseFont = dict.get('BaseFont');
// Some bad PDFs have a string as the font name.
if (isString(fontName)) {
fontName = Name.get(fontName);
}
if (isString(baseFont)) {
baseFont = Name.get(baseFont);
}
if (type !== 'Type3') {
var fontNameStr = fontName && fontName.name;
var baseFontStr = baseFont && baseFont.name;
if (fontNameStr !== baseFontStr) {
info('The FontDescriptor\'s FontName is "' + fontNameStr +
'" but should be the same as the Font\'s BaseFont "' +
baseFontStr + '"');
// Workaround for cases where e.g. fontNameStr = 'Arial' and
// baseFontStr = 'Arial,Bold' (needed when no font file is embedded).
if (fontNameStr && baseFontStr &&
baseFontStr.indexOf(fontNameStr) === 0) {
fontName = baseFont;
}
}
}
fontName = (fontName || baseFont);
assert(isName(fontName), 'invalid font name');
var fontFile = descriptor.get('FontFile', 'FontFile2', 'FontFile3');
if (fontFile) {
if (fontFile.dict) {
var subtype = fontFile.dict.get('Subtype');
if (subtype) {
subtype = subtype.name;
}
var length1 = fontFile.dict.get('Length1');
var length2 = fontFile.dict.get('Length2');
}
}
properties = {
type: type,
name: fontName.name,
subtype: subtype,
file: fontFile,
length1: length1,
length2: length2,
loadedName: baseDict.loadedName,
composite: composite,
wideChars: composite,
fixedPitch: false,
fontMatrix: (dict.get('FontMatrix') || FONT_IDENTITY_MATRIX),
firstChar: firstChar || 0,
lastChar: (lastChar || maxCharIndex),
bbox: descriptor.get('FontBBox'),
ascent: descriptor.get('Ascent'),
descent: descriptor.get('Descent'),
xHeight: descriptor.get('XHeight'),
capHeight: descriptor.get('CapHeight'),
flags: descriptor.get('Flags'),
italicAngle: descriptor.get('ItalicAngle'),
coded: false
};
if (composite) {
var cidEncoding = baseDict.get('Encoding');
if (isName(cidEncoding)) {
properties.cidEncoding = cidEncoding.name;
}
properties.cMap = CMapFactory.create(cidEncoding,
{ url: PDFJS.cMapUrl, packed: PDFJS.cMapPacked }, null);
properties.vertical = properties.cMap.vertical;
}
this.extractDataStructures(dict, baseDict, xref, properties);
this.extractWidths(dict, xref, descriptor, properties);
if (type === 'Type3') {
properties.isType3Font = true;
}
return new Font(fontName.name, fontFile, properties);
}
};
return PartialEvaluator;
})();
var TranslatedFont = (function TranslatedFontClosure() {
function TranslatedFont(loadedName, font, dict) {
this.loadedName = loadedName;
this.font = font;
this.dict = dict;
this.type3Loaded = null;
this.sent = false;
}
TranslatedFont.prototype = {
send: function (handler) {
if (this.sent) {
return;
}
var fontData = this.font.exportData();
handler.send('commonobj', [
this.loadedName,
'Font',
fontData
]);
this.sent = true;
},
loadType3Data: function (evaluator, resources, parentOperatorList, task) {
assert(this.font.isType3Font);
if (this.type3Loaded) {
return this.type3Loaded;
}
var translatedFont = this.font;
var loadCharProcsPromise = Promise.resolve();
var charProcs = this.dict.get('CharProcs');
var fontResources = this.dict.get('Resources') || resources;
var charProcKeys = charProcs.getKeys();
var charProcOperatorList = Object.create(null);
for (var i = 0, n = charProcKeys.length; i < n; ++i) {
loadCharProcsPromise = loadCharProcsPromise.then(function (key) {
var glyphStream = charProcs.get(key);
var operatorList = new OperatorList();
return evaluator.getOperatorList(glyphStream, task, fontResources,
operatorList).then(function () {
charProcOperatorList[key] = operatorList.getIR();
// Add the dependencies to the parent operator list so they are
// resolved before sub operator list is executed synchronously.
parentOperatorList.addDependencies(operatorList.dependencies);
}, function (reason) {
warn('Type3 font resource \"' + key + '\" is not available');
var operatorList = new OperatorList();
charProcOperatorList[key] = operatorList.getIR();
});
}.bind(this, charProcKeys[i]));
}
this.type3Loaded = loadCharProcsPromise.then(function () {
translatedFont.charProcOperatorList = charProcOperatorList;
});
return this.type3Loaded;
}
};
return TranslatedFont;
})();
var OperatorList = (function OperatorListClosure() {
var CHUNK_SIZE = 1000;
var CHUNK_SIZE_ABOUT = CHUNK_SIZE - 5; // close to chunk size
function getTransfers(queue) {
var transfers = [];
var fnArray = queue.fnArray, argsArray = queue.argsArray;
for (var i = 0, ii = queue.length; i < ii; i++) {
switch (fnArray[i]) {
case OPS.paintInlineImageXObject:
case OPS.paintInlineImageXObjectGroup:
case OPS.paintImageMaskXObject:
var arg = argsArray[i][0]; // first param in imgData
if (!arg.cached) {
transfers.push(arg.data.buffer);
}
break;
}
}
return transfers;
}
function OperatorList(intent, messageHandler, pageIndex) {
this.messageHandler = messageHandler;
this.fnArray = [];
this.argsArray = [];
this.dependencies = Object.create(null);
this._totalLength = 0;
this.pageIndex = pageIndex;
this.intent = intent;
}
OperatorList.prototype = {
get length() {
return this.argsArray.length;
},
/**
* @returns {number} The total length of the entire operator list,
* since `this.length === 0` after flushing.
*/
get totalLength() {
return (this._totalLength + this.length);
},
addOp: function(fn, args) {
this.fnArray.push(fn);
this.argsArray.push(args);
if (this.messageHandler) {
if (this.fnArray.length >= CHUNK_SIZE) {
this.flush();
} else if (this.fnArray.length >= CHUNK_SIZE_ABOUT &&
(fn === OPS.restore || fn === OPS.endText)) {
// heuristic to flush on boundary of restore or endText
this.flush();
}
}
},
addDependency: function(dependency) {
if (dependency in this.dependencies) {
return;
}
this.dependencies[dependency] = true;
this.addOp(OPS.dependency, [dependency]);
},
addDependencies: function(dependencies) {
for (var key in dependencies) {
this.addDependency(key);
}
},
addOpList: function(opList) {
Util.extendObj(this.dependencies, opList.dependencies);
for (var i = 0, ii = opList.length; i < ii; i++) {
this.addOp(opList.fnArray[i], opList.argsArray[i]);
}
},
getIR: function() {
return {
fnArray: this.fnArray,
argsArray: this.argsArray,
length: this.length
};
},
flush: function(lastChunk) {
if (this.intent !== 'oplist') {
new QueueOptimizer().optimize(this);
}
var transfers = getTransfers(this);
var length = this.length;
this._totalLength += length;
this.messageHandler.send('RenderPageChunk', {
operatorList: {
fnArray: this.fnArray,
argsArray: this.argsArray,
lastChunk: lastChunk,
length: length
},
pageIndex: this.pageIndex,
intent: this.intent
}, transfers);
this.dependencies = Object.create(null);
this.fnArray.length = 0;
this.argsArray.length = 0;
}
};
return OperatorList;
})();
var StateManager = (function StateManagerClosure() {
function StateManager(initialState) {
this.state = initialState;
this.stateStack = [];
}
StateManager.prototype = {
save: function () {
var old = this.state;
this.stateStack.push(this.state);
this.state = old.clone();
},
restore: function () {
var prev = this.stateStack.pop();
if (prev) {
this.state = prev;
}
},
transform: function (args) {
this.state.ctm = Util.transform(this.state.ctm, args);
}
};
return StateManager;
})();
var TextState = (function TextStateClosure() {
function TextState() {
this.ctm = new Float32Array(IDENTITY_MATRIX);
this.fontSize = 0;
this.font = null;
this.fontMatrix = FONT_IDENTITY_MATRIX;
this.textMatrix = IDENTITY_MATRIX.slice();
this.textLineMatrix = IDENTITY_MATRIX.slice();
this.charSpacing = 0;
this.wordSpacing = 0;
this.leading = 0;
this.textHScale = 1;
this.textRise = 0;
}
TextState.prototype = {
setTextMatrix: function TextState_setTextMatrix(a, b, c, d, e, f) {
var m = this.textMatrix;
m[0] = a; m[1] = b; m[2] = c; m[3] = d; m[4] = e; m[5] = f;
},
setTextLineMatrix: function TextState_setTextMatrix(a, b, c, d, e, f) {
var m = this.textLineMatrix;
m[0] = a; m[1] = b; m[2] = c; m[3] = d; m[4] = e; m[5] = f;
},
translateTextMatrix: function TextState_translateTextMatrix(x, y) {
var m = this.textMatrix;
m[4] = m[0] * x + m[2] * y + m[4];
m[5] = m[1] * x + m[3] * y + m[5];
},
translateTextLineMatrix: function TextState_translateTextMatrix(x, y) {
var m = this.textLineMatrix;
m[4] = m[0] * x + m[2] * y + m[4];
m[5] = m[1] * x + m[3] * y + m[5];
},
calcRenderMatrix: function TextState_calcRendeMatrix(ctm) {
// 9.4.4 Text Space Details
var tsm = [this.fontSize * this.textHScale, 0,
0, this.fontSize,
0, this.textRise];
return Util.transform(ctm, Util.transform(this.textMatrix, tsm));
},
carriageReturn: function TextState_carriageReturn() {
this.translateTextLineMatrix(0, -this.leading);
this.textMatrix = this.textLineMatrix.slice();
},
clone: function TextState_clone() {
var clone = Object.create(this);
clone.textMatrix = this.textMatrix.slice();
clone.textLineMatrix = this.textLineMatrix.slice();
clone.fontMatrix = this.fontMatrix.slice();
return clone;
}
};
return TextState;
})();
var EvalState = (function EvalStateClosure() {
function EvalState() {
this.ctm = new Float32Array(IDENTITY_MATRIX);
this.font = null;
this.textRenderingMode = TextRenderingMode.FILL;
this.fillColorSpace = ColorSpace.singletons.gray;
this.strokeColorSpace = ColorSpace.singletons.gray;
}
EvalState.prototype = {
clone: function CanvasExtraState_clone() {
return Object.create(this);
},
};
return EvalState;
})();
var EvaluatorPreprocessor = (function EvaluatorPreprocessorClosure() {
// Specifies properties for each command
//
// If variableArgs === true: [0, `numArgs`] expected
// If variableArgs === false: exactly `numArgs` expected
var getOPMap = getLookupTableFactory(function (t) {
// Graphic state
t['w'] = { id: OPS.setLineWidth, numArgs: 1, variableArgs: false };
t['J'] = { id: OPS.setLineCap, numArgs: 1, variableArgs: false };
t['j'] = { id: OPS.setLineJoin, numArgs: 1, variableArgs: false };
t['M'] = { id: OPS.setMiterLimit, numArgs: 1, variableArgs: false };
t['d'] = { id: OPS.setDash, numArgs: 2, variableArgs: false };
t['ri'] = { id: OPS.setRenderingIntent, numArgs: 1, variableArgs: false };
t['i'] = { id: OPS.setFlatness, numArgs: 1, variableArgs: false };
t['gs'] = { id: OPS.setGState, numArgs: 1, variableArgs: false };
t['q'] = { id: OPS.save, numArgs: 0, variableArgs: false };
t['Q'] = { id: OPS.restore, numArgs: 0, variableArgs: false };
t['cm'] = { id: OPS.transform, numArgs: 6, variableArgs: false };
// Path
t['m'] = { id: OPS.moveTo, numArgs: 2, variableArgs: false };
t['l'] = { id: OPS.lineTo, numArgs: 2, variableArgs: false };
t['c'] = { id: OPS.curveTo, numArgs: 6, variableArgs: false };
t['v'] = { id: OPS.curveTo2, numArgs: 4, variableArgs: false };
t['y'] = { id: OPS.curveTo3, numArgs: 4, variableArgs: false };
t['h'] = { id: OPS.closePath, numArgs: 0, variableArgs: false };
t['re'] = { id: OPS.rectangle, numArgs: 4, variableArgs: false };
t['S'] = { id: OPS.stroke, numArgs: 0, variableArgs: false };
t['s'] = { id: OPS.closeStroke, numArgs: 0, variableArgs: false };
t['f'] = { id: OPS.fill, numArgs: 0, variableArgs: false };
t['F'] = { id: OPS.fill, numArgs: 0, variableArgs: false };
t['f*'] = { id: OPS.eoFill, numArgs: 0, variableArgs: false };
t['B'] = { id: OPS.fillStroke, numArgs: 0, variableArgs: false };
t['B*'] = { id: OPS.eoFillStroke, numArgs: 0, variableArgs: false };
t['b'] = { id: OPS.closeFillStroke, numArgs: 0, variableArgs: false };
t['b*'] = { id: OPS.closeEOFillStroke, numArgs: 0, variableArgs: false };
t['n'] = { id: OPS.endPath, numArgs: 0, variableArgs: false };
// Clipping
t['W'] = { id: OPS.clip, numArgs: 0, variableArgs: false };
t['W*'] = { id: OPS.eoClip, numArgs: 0, variableArgs: false };
// Text
t['BT'] = { id: OPS.beginText, numArgs: 0, variableArgs: false };
t['ET'] = { id: OPS.endText, numArgs: 0, variableArgs: false };
t['Tc'] = { id: OPS.setCharSpacing, numArgs: 1, variableArgs: false };
t['Tw'] = { id: OPS.setWordSpacing, numArgs: 1, variableArgs: false };
t['Tz'] = { id: OPS.setHScale, numArgs: 1, variableArgs: false };
t['TL'] = { id: OPS.setLeading, numArgs: 1, variableArgs: false };
t['Tf'] = { id: OPS.setFont, numArgs: 2, variableArgs: false };
t['Tr'] = { id: OPS.setTextRenderingMode, numArgs: 1, variableArgs: false };
t['Ts'] = { id: OPS.setTextRise, numArgs: 1, variableArgs: false };
t['Td'] = { id: OPS.moveText, numArgs: 2, variableArgs: false };
t['TD'] = { id: OPS.setLeadingMoveText, numArgs: 2, variableArgs: false };
t['Tm'] = { id: OPS.setTextMatrix, numArgs: 6, variableArgs: false };
t['T*'] = { id: OPS.nextLine, numArgs: 0, variableArgs: false };
t['Tj'] = { id: OPS.showText, numArgs: 1, variableArgs: false };
t['TJ'] = { id: OPS.showSpacedText, numArgs: 1, variableArgs: false };
t['\''] = { id: OPS.nextLineShowText, numArgs: 1, variableArgs: false };
t['"'] = { id: OPS.nextLineSetSpacingShowText, numArgs: 3,
variableArgs: false };
// Type3 fonts
t['d0'] = { id: OPS.setCharWidth, numArgs: 2, variableArgs: false };
t['d1'] = { id: OPS.setCharWidthAndBounds, numArgs: 6,
variableArgs: false };
// Color
t['CS'] = { id: OPS.setStrokeColorSpace, numArgs: 1, variableArgs: false };
t['cs'] = { id: OPS.setFillColorSpace, numArgs: 1, variableArgs: false };
t['SC'] = { id: OPS.setStrokeColor, numArgs: 4, variableArgs: true };
t['SCN'] = { id: OPS.setStrokeColorN, numArgs: 33, variableArgs: true };
t['sc'] = { id: OPS.setFillColor, numArgs: 4, variableArgs: true };
t['scn'] = { id: OPS.setFillColorN, numArgs: 33, variableArgs: true };
t['G'] = { id: OPS.setStrokeGray, numArgs: 1, variableArgs: false };
t['g'] = { id: OPS.setFillGray, numArgs: 1, variableArgs: false };
t['RG'] = { id: OPS.setStrokeRGBColor, numArgs: 3, variableArgs: false };
t['rg'] = { id: OPS.setFillRGBColor, numArgs: 3, variableArgs: false };
t['K'] = { id: OPS.setStrokeCMYKColor, numArgs: 4, variableArgs: false };
t['k'] = { id: OPS.setFillCMYKColor, numArgs: 4, variableArgs: false };
// Shading
t['sh'] = { id: OPS.shadingFill, numArgs: 1, variableArgs: false };
// Images
t['BI'] = { id: OPS.beginInlineImage, numArgs: 0, variableArgs: false };
t['ID'] = { id: OPS.beginImageData, numArgs: 0, variableArgs: false };
t['EI'] = { id: OPS.endInlineImage, numArgs: 1, variableArgs: false };
// XObjects
t['Do'] = { id: OPS.paintXObject, numArgs: 1, variableArgs: false };
t['MP'] = { id: OPS.markPoint, numArgs: 1, variableArgs: false };
t['DP'] = { id: OPS.markPointProps, numArgs: 2, variableArgs: false };
t['BMC'] = { id: OPS.beginMarkedContent, numArgs: 1, variableArgs: false };
t['BDC'] = { id: OPS.beginMarkedContentProps, numArgs: 2,
variableArgs: false };
t['EMC'] = { id: OPS.endMarkedContent, numArgs: 0, variableArgs: false };
// Compatibility
t['BX'] = { id: OPS.beginCompat, numArgs: 0, variableArgs: false };
t['EX'] = { id: OPS.endCompat, numArgs: 0, variableArgs: false };
// (reserved partial commands for the lexer)
t['BM'] = null;
t['BD'] = null;
t['true'] = null;
t['fa'] = null;
t['fal'] = null;
t['fals'] = null;
t['false'] = null;
t['nu'] = null;
t['nul'] = null;
t['null'] = null;
});
function EvaluatorPreprocessor(stream, xref, stateManager) {
this.opMap = getOPMap();
// TODO(mduan): pass array of knownCommands rather than this.opMap
// dictionary
this.parser = new Parser(new Lexer(stream, this.opMap), false, xref);
this.stateManager = stateManager;
this.nonProcessedArgs = [];
}
EvaluatorPreprocessor.prototype = {
get savedStatesDepth() {
return this.stateManager.stateStack.length;
},
// |operation| is an object with two fields:
//
// - |fn| is an out param.
//
// - |args| is an inout param. On entry, it should have one of two values.
//
// - An empty array. This indicates that the caller is providing the
// array in which the args will be stored in. The caller should use
// this value if it can reuse a single array for each call to read().
//
// - |null|. This indicates that the caller needs this function to create
// the array in which any args are stored in. If there are zero args,
// this function will leave |operation.args| as |null| (thus avoiding
// allocations that would occur if we used an empty array to represent
// zero arguments). Otherwise, it will replace |null| with a new array
// containing the arguments. The caller should use this value if it
// cannot reuse an array for each call to read().
//
// These two modes are present because this function is very hot and so
// avoiding allocations where possible is worthwhile.
//
read: function EvaluatorPreprocessor_read(operation) {
var args = operation.args;
while (true) {
var obj = this.parser.getObj();
if (isCmd(obj)) {
var cmd = obj.cmd;
// Check that the command is valid
var opSpec = this.opMap[cmd];
if (!opSpec) {
warn('Unknown command "' + cmd + '"');
continue;
}
var fn = opSpec.id;
var numArgs = opSpec.numArgs;
var argsLength = args !== null ? args.length : 0;
if (!opSpec.variableArgs) {
// Postscript commands can be nested, e.g. /F2 /GS2 gs 5.711 Tf
if (argsLength !== numArgs) {
var nonProcessedArgs = this.nonProcessedArgs;
while (argsLength > numArgs) {
nonProcessedArgs.push(args.shift());
argsLength--;
}
while (argsLength < numArgs && nonProcessedArgs.length !== 0) {
if (!args) {
args = [];
}
args.unshift(nonProcessedArgs.pop());
argsLength++;
}
}
if (argsLength < numArgs) {
// If we receive too few args, it's not possible to possible
// to execute the command, so skip the command
info('Command ' + fn + ': because expected ' +
numArgs + ' args, but received ' + argsLength +
' args; skipping');
args = null;
continue;
}
} else if (argsLength > numArgs) {
info('Command ' + fn + ': expected [0,' + numArgs +
'] args, but received ' + argsLength + ' args');
}
// TODO figure out how to type-check vararg functions
this.preprocessCommand(fn, args);
operation.fn = fn;
operation.args = args;
return true;
} else {
if (isEOF(obj)) {
return false; // no more commands
}
// argument
if (obj !== null) {
if (!args) {
args = [];
}
args.push((obj instanceof Dict ? obj.getAll() : obj));
assert(args.length <= 33, 'Too many arguments');
}
}
}
},
preprocessCommand:
function EvaluatorPreprocessor_preprocessCommand(fn, args) {
switch (fn | 0) {
case OPS.save:
this.stateManager.save();
break;
case OPS.restore:
this.stateManager.restore();
break;
case OPS.transform:
this.stateManager.transform(args);
break;
}
}
};
return EvaluatorPreprocessor;
})();
var QueueOptimizer = (function QueueOptimizerClosure() {
function addState(parentState, pattern, fn) {
var state = parentState;
for (var i = 0, ii = pattern.length - 1; i < ii; i++) {
var item = pattern[i];
state = (state[item] || (state[item] = []));
}
state[pattern[pattern.length - 1]] = fn;
}
function handlePaintSolidColorImageMask(iFirstSave, count, fnArray,
argsArray) {
// Handles special case of mainly LaTeX documents which use image masks to
// draw lines with the current fill style.
// 'count' groups of (save, transform, paintImageMaskXObject, restore)+
// have been found at iFirstSave.
var iFirstPIMXO = iFirstSave + 2;
for (var i = 0; i < count; i++) {
var arg = argsArray[iFirstPIMXO + 4 * i];
var imageMask = arg.length === 1 && arg[0];
if (imageMask && imageMask.width === 1 && imageMask.height === 1 &&
(!imageMask.data.length ||
(imageMask.data.length === 1 && imageMask.data[0] === 0))) {
fnArray[iFirstPIMXO + 4 * i] = OPS.paintSolidColorImageMask;
continue;
}
break;
}
return count - i;
}
var InitialState = [];
// This replaces (save, transform, paintInlineImageXObject, restore)+
// sequences with one |paintInlineImageXObjectGroup| operation.
addState(InitialState,
[OPS.save, OPS.transform, OPS.paintInlineImageXObject, OPS.restore],
function foundInlineImageGroup(context) {
var MIN_IMAGES_IN_INLINE_IMAGES_BLOCK = 10;
var MAX_IMAGES_IN_INLINE_IMAGES_BLOCK = 200;
var MAX_WIDTH = 1000;
var IMAGE_PADDING = 1;
var fnArray = context.fnArray, argsArray = context.argsArray;
var curr = context.iCurr;
var iFirstSave = curr - 3;
var iFirstTransform = curr - 2;
var iFirstPIIXO = curr - 1;
// Look for the quartets.
var i = iFirstSave + 4;
var ii = fnArray.length;
while (i + 3 < ii) {
if (fnArray[i] !== OPS.save ||
fnArray[i + 1] !== OPS.transform ||
fnArray[i + 2] !== OPS.paintInlineImageXObject ||
fnArray[i + 3] !== OPS.restore) {
break; // ops don't match
}
i += 4;
}
// At this point, i is the index of the first op past the last valid
// quartet.
var count = Math.min((i - iFirstSave) / 4,
MAX_IMAGES_IN_INLINE_IMAGES_BLOCK);
if (count < MIN_IMAGES_IN_INLINE_IMAGES_BLOCK) {
return i;
}
// assuming that heights of those image is too small (~1 pixel)
// packing as much as possible by lines
var maxX = 0;
var map = [], maxLineHeight = 0;
var currentX = IMAGE_PADDING, currentY = IMAGE_PADDING;
var q;
for (q = 0; q < count; q++) {
var transform = argsArray[iFirstTransform + (q << 2)];
var img = argsArray[iFirstPIIXO + (q << 2)][0];
if (currentX + img.width > MAX_WIDTH) {
// starting new line
maxX = Math.max(maxX, currentX);
currentY += maxLineHeight + 2 * IMAGE_PADDING;
currentX = 0;
maxLineHeight = 0;
}
map.push({
transform: transform,
x: currentX, y: currentY,
w: img.width, h: img.height
});
currentX += img.width + 2 * IMAGE_PADDING;
maxLineHeight = Math.max(maxLineHeight, img.height);
}
var imgWidth = Math.max(maxX, currentX) + IMAGE_PADDING;
var imgHeight = currentY + maxLineHeight + IMAGE_PADDING;
var imgData = new Uint8Array(imgWidth * imgHeight * 4);
var imgRowSize = imgWidth << 2;
for (q = 0; q < count; q++) {
var data = argsArray[iFirstPIIXO + (q << 2)][0].data;
// Copy image by lines and extends pixels into padding.
var rowSize = map[q].w << 2;
var dataOffset = 0;
var offset = (map[q].x + map[q].y * imgWidth) << 2;
imgData.set(data.subarray(0, rowSize), offset - imgRowSize);
for (var k = 0, kk = map[q].h; k < kk; k++) {
imgData.set(data.subarray(dataOffset, dataOffset + rowSize), offset);
dataOffset += rowSize;
offset += imgRowSize;
}
imgData.set(data.subarray(dataOffset - rowSize, dataOffset), offset);
while (offset >= 0) {
data[offset - 4] = data[offset];
data[offset - 3] = data[offset + 1];
data[offset - 2] = data[offset + 2];
data[offset - 1] = data[offset + 3];
data[offset + rowSize] = data[offset + rowSize - 4];
data[offset + rowSize + 1] = data[offset + rowSize - 3];
data[offset + rowSize + 2] = data[offset + rowSize - 2];
data[offset + rowSize + 3] = data[offset + rowSize - 1];
offset -= imgRowSize;
}
}
// Replace queue items.
fnArray.splice(iFirstSave, count * 4, OPS.paintInlineImageXObjectGroup);
argsArray.splice(iFirstSave, count * 4,
[{ width: imgWidth, height: imgHeight, kind: ImageKind.RGBA_32BPP,
data: imgData }, map]);
return iFirstSave + 1;
});
// This replaces (save, transform, paintImageMaskXObject, restore)+
// sequences with one |paintImageMaskXObjectGroup| or one
// |paintImageMaskXObjectRepeat| operation.
addState(InitialState,
[OPS.save, OPS.transform, OPS.paintImageMaskXObject, OPS.restore],
function foundImageMaskGroup(context) {
var MIN_IMAGES_IN_MASKS_BLOCK = 10;
var MAX_IMAGES_IN_MASKS_BLOCK = 100;
var MAX_SAME_IMAGES_IN_MASKS_BLOCK = 1000;
var fnArray = context.fnArray, argsArray = context.argsArray;
var curr = context.iCurr;
var iFirstSave = curr - 3;
var iFirstTransform = curr - 2;
var iFirstPIMXO = curr - 1;
// Look for the quartets.
var i = iFirstSave + 4;
var ii = fnArray.length;
while (i + 3 < ii) {
if (fnArray[i] !== OPS.save ||
fnArray[i + 1] !== OPS.transform ||
fnArray[i + 2] !== OPS.paintImageMaskXObject ||
fnArray[i + 3] !== OPS.restore) {
break; // ops don't match
}
i += 4;
}
// At this point, i is the index of the first op past the last valid
// quartet.
var count = (i - iFirstSave) / 4;
count = handlePaintSolidColorImageMask(iFirstSave, count, fnArray,
argsArray);
if (count < MIN_IMAGES_IN_MASKS_BLOCK) {
return i;
}
var q;
var isSameImage = false;
var iTransform, transformArgs;
var firstPIMXOArg0 = argsArray[iFirstPIMXO][0];
if (argsArray[iFirstTransform][1] === 0 &&
argsArray[iFirstTransform][2] === 0) {
isSameImage = true;
var firstTransformArg0 = argsArray[iFirstTransform][0];
var firstTransformArg3 = argsArray[iFirstTransform][3];
iTransform = iFirstTransform + 4;
var iPIMXO = iFirstPIMXO + 4;
for (q = 1; q < count; q++, iTransform += 4, iPIMXO += 4) {
transformArgs = argsArray[iTransform];
if (argsArray[iPIMXO][0] !== firstPIMXOArg0 ||
transformArgs[0] !== firstTransformArg0 ||
transformArgs[1] !== 0 ||
transformArgs[2] !== 0 ||
transformArgs[3] !== firstTransformArg3) {
if (q < MIN_IMAGES_IN_MASKS_BLOCK) {
isSameImage = false;
} else {
count = q;
}
break; // different image or transform
}
}
}
if (isSameImage) {
count = Math.min(count, MAX_SAME_IMAGES_IN_MASKS_BLOCK);
var positions = new Float32Array(count * 2);
iTransform = iFirstTransform;
for (q = 0; q < count; q++, iTransform += 4) {
transformArgs = argsArray[iTransform];
positions[(q << 1)] = transformArgs[4];
positions[(q << 1) + 1] = transformArgs[5];
}
// Replace queue items.
fnArray.splice(iFirstSave, count * 4, OPS.paintImageMaskXObjectRepeat);
argsArray.splice(iFirstSave, count * 4,
[firstPIMXOArg0, firstTransformArg0, firstTransformArg3, positions]);
} else {
count = Math.min(count, MAX_IMAGES_IN_MASKS_BLOCK);
var images = [];
for (q = 0; q < count; q++) {
transformArgs = argsArray[iFirstTransform + (q << 2)];
var maskParams = argsArray[iFirstPIMXO + (q << 2)][0];
images.push({ data: maskParams.data, width: maskParams.width,
height: maskParams.height,
transform: transformArgs });
}
// Replace queue items.
fnArray.splice(iFirstSave, count * 4, OPS.paintImageMaskXObjectGroup);
argsArray.splice(iFirstSave, count * 4, [images]);
}
return iFirstSave + 1;
});
// This replaces (save, transform, paintImageXObject, restore)+ sequences
// with one paintImageXObjectRepeat operation, if the |transform| and
// |paintImageXObjectRepeat| ops are appropriate.
addState(InitialState,
[OPS.save, OPS.transform, OPS.paintImageXObject, OPS.restore],
function (context) {
var MIN_IMAGES_IN_BLOCK = 3;
var MAX_IMAGES_IN_BLOCK = 1000;
var fnArray = context.fnArray, argsArray = context.argsArray;
var curr = context.iCurr;
var iFirstSave = curr - 3;
var iFirstTransform = curr - 2;
var iFirstPIXO = curr - 1;
var iFirstRestore = curr;
if (argsArray[iFirstTransform][1] !== 0 ||
argsArray[iFirstTransform][2] !== 0) {
return iFirstRestore + 1; // transform has the wrong form
}
// Look for the quartets.
var firstPIXOArg0 = argsArray[iFirstPIXO][0];
var firstTransformArg0 = argsArray[iFirstTransform][0];
var firstTransformArg3 = argsArray[iFirstTransform][3];
var i = iFirstSave + 4;
var ii = fnArray.length;
while (i + 3 < ii) {
if (fnArray[i] !== OPS.save ||
fnArray[i + 1] !== OPS.transform ||
fnArray[i + 2] !== OPS.paintImageXObject ||
fnArray[i + 3] !== OPS.restore) {
break; // ops don't match
}
if (argsArray[i + 1][0] !== firstTransformArg0 ||
argsArray[i + 1][1] !== 0 ||
argsArray[i + 1][2] !== 0 ||
argsArray[i + 1][3] !== firstTransformArg3) {
break; // transforms don't match
}
if (argsArray[i + 2][0] !== firstPIXOArg0) {
break; // images don't match
}
i += 4;
}
// At this point, i is the index of the first op past the last valid
// quartet.
var count = Math.min((i - iFirstSave) / 4, MAX_IMAGES_IN_BLOCK);
if (count < MIN_IMAGES_IN_BLOCK) {
return i;
}
// Extract the (x,y) positions from all of the matching transforms.
var positions = new Float32Array(count * 2);
var iTransform = iFirstTransform;
for (var q = 0; q < count; q++, iTransform += 4) {
var transformArgs = argsArray[iTransform];
positions[(q << 1)] = transformArgs[4];
positions[(q << 1) + 1] = transformArgs[5];
}
// Replace queue items.
var args = [firstPIXOArg0, firstTransformArg0, firstTransformArg3,
positions];
fnArray.splice(iFirstSave, count * 4, OPS.paintImageXObjectRepeat);
argsArray.splice(iFirstSave, count * 4, args);
return iFirstSave + 1;
});
// This replaces (beginText, setFont, setTextMatrix, showText, endText)+
// sequences with (beginText, setFont, (setTextMatrix, showText)+, endText)+
// sequences, if the font for each one is the same.
addState(InitialState,
[OPS.beginText, OPS.setFont, OPS.setTextMatrix, OPS.showText, OPS.endText],
function (context) {
var MIN_CHARS_IN_BLOCK = 3;
var MAX_CHARS_IN_BLOCK = 1000;
var fnArray = context.fnArray, argsArray = context.argsArray;
var curr = context.iCurr;
var iFirstBeginText = curr - 4;
var iFirstSetFont = curr - 3;
var iFirstSetTextMatrix = curr - 2;
var iFirstShowText = curr - 1;
var iFirstEndText = curr;
// Look for the quintets.
var firstSetFontArg0 = argsArray[iFirstSetFont][0];
var firstSetFontArg1 = argsArray[iFirstSetFont][1];
var i = iFirstBeginText + 5;
var ii = fnArray.length;
while (i + 4 < ii) {
if (fnArray[i] !== OPS.beginText ||
fnArray[i + 1] !== OPS.setFont ||
fnArray[i + 2] !== OPS.setTextMatrix ||
fnArray[i + 3] !== OPS.showText ||
fnArray[i + 4] !== OPS.endText) {
break; // ops don't match
}
if (argsArray[i + 1][0] !== firstSetFontArg0 ||
argsArray[i + 1][1] !== firstSetFontArg1) {
break; // fonts don't match
}
i += 5;
}
// At this point, i is the index of the first op past the last valid
// quintet.
var count = Math.min(((i - iFirstBeginText) / 5), MAX_CHARS_IN_BLOCK);
if (count < MIN_CHARS_IN_BLOCK) {
return i;
}
// If the preceding quintet is (<something>, setFont, setTextMatrix,
// showText, endText), include that as well. (E.g. <something> might be
// |dependency|.)
var iFirst = iFirstBeginText;
if (iFirstBeginText >= 4 &&
fnArray[iFirstBeginText - 4] === fnArray[iFirstSetFont] &&
fnArray[iFirstBeginText - 3] === fnArray[iFirstSetTextMatrix] &&
fnArray[iFirstBeginText - 2] === fnArray[iFirstShowText] &&
fnArray[iFirstBeginText - 1] === fnArray[iFirstEndText] &&
argsArray[iFirstBeginText - 4][0] === firstSetFontArg0 &&
argsArray[iFirstBeginText - 4][1] === firstSetFontArg1) {
count++;
iFirst -= 5;
}
// Remove (endText, beginText, setFont) trios.
var iEndText = iFirst + 4;
for (var q = 1; q < count; q++) {
fnArray.splice(iEndText, 3);
argsArray.splice(iEndText, 3);
iEndText += 2;
}
return iEndText + 1;
});
function QueueOptimizer() {}
QueueOptimizer.prototype = {
optimize: function QueueOptimizer_optimize(queue) {
var fnArray = queue.fnArray, argsArray = queue.argsArray;
var context = {
iCurr: 0,
fnArray: fnArray,
argsArray: argsArray
};
var state;
var i = 0, ii = fnArray.length;
while (i < ii) {
state = (state || InitialState)[fnArray[i]];
if (typeof state === 'function') { // we found some handler
context.iCurr = i;
// state() returns the index of the first non-matching op (if we
// didn't match) or the first op past the modified ops (if we did
// match and replace).
i = state(context);
state = undefined; // reset the state machine
ii = context.fnArray.length;
} else {
i++;
}
}
}
};
return QueueOptimizer;
})();
exports.OperatorList = OperatorList;
exports.PartialEvaluator = PartialEvaluator;
}));
| {
"content_hash": "f089fb7453d16d1b73cdd80545c1a281",
"timestamp": "",
"source": "github",
"line_count": 2976,
"max_line_length": 80,
"avg_line_length": 37.53595430107527,
"alnum_prop": 0.5549338895503415,
"repo_name": "thepulkitagarwal/pdf.js",
"id": "2fa42d7b6474407b28217638544cbc89732c95f8",
"size": "112305",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/core/evaluator.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "61734"
},
{
"name": "HTML",
"bytes": "55526"
},
{
"name": "JavaScript",
"bytes": "2540113"
}
],
"symlink_target": ""
} |
<!--suppress XmlUnboundNsPrefix -->
<idea-plugin version="2">
<id>com.basisjs</id>
<name>Basis.js framework plugin</name>
<version>0.1.5</version>
<vendor email="s.melukov@gmail.com" url="http://basisjs.com">Basis.js</vendor>
<description><![CDATA[
<a href="http://basisjs.com">Basis.js</a> integration plugin for JetBrains IntelliJ platform.<br/>
<b>Features (there are not much features <i>for now</i>):</b><br/>
<i>- remote inspector integration:</i><br/>
Allows you to inspect basis.js apps from your favorite IntelliJ-base IDE<br/>
<a href="https://youtu.be/JBPkF5beq0g">Video demo</a><br/>
<br/>
Feel free to contribute!
]]></description>
<!-- please see http://www.jetbrains.org/intellij/sdk/docs/basics/getting_started/build_number_ranges.html for description -->
<idea-version since-build="162.0"/>
<!-- please see http://www.jetbrains.org/intellij/sdk/docs/basics/getting_started/plugin_compatibility.html
on how to target different products -->
<depends>com.intellij.modules.lang</depends>
<extensions defaultExtensionNs="com.intellij">
<!-- Add your extensions here -->
<applicationService serviceImplementation="com.basisjs.services.Storage"/>
<applicationConfigurable instance="com.basisjs.ui.windows.Configuration" groupId="language"/>
<fileTypeFactory implementation="com.basisjs.fileTypes.L10nFileTypeFactory"/>
<fileTypeFactory implementation="com.basisjs.fileTypes.TmplFileTypeFactory"/>
<fileTemplateGroup implementation="com.basisjs.fileTypes.FileTemplateGroup"/>
</extensions>
<project-components>
<component>
<implementation-class>com.basisjs.components.RemoteInspector</implementation-class>
</component>
</project-components>
<actions>
<!-- Add your actions here -->
<group id="BasisJS" text="Basis.js" description="Basis.js framework">
<add-to-group group-id="MainMenu" relative-to-action="WindowMenu" anchor="before"/>
<action class="com.basisjs.actions.BasisJSShowInspectorAction" id="BasisJS.ShowInspector"
text="Show Inspector"/>
</group>
<group id="BasisJS.Create" text="_Create" description="Create new Basis.js file" popup="true">
<add-to-group group-id="BasisJS" anchor="first"/>
<action id="BasisJS.Create.Template" class="com.basisjs.actions.BasisJSNewTmplAction" text="_Template"
description="Create new Basis.js template" icon="AllIcons.FileTypes.Html"/>
<action id="BasisJS.Create.L10n" class="com.basisjs.actions.BasisJSNewL10nAction" text="_L10n"
description="Create new Basis.js localization dictionary" icon="AllIcons.FileTypes.Json"/>
</group>
<group id="BasisJS.New" text="_Basis.js" description="Create new Basis.js file" popup="true"
icon="/icons/basisjs.png">
<add-to-group group-id="NewGroup" anchor="last"/>
<action id="BasisJS.New.Template" class="com.basisjs.actions.BasisJSNewTmplAction" text="_Template"
description="Create new Basis.js template" icon="AllIcons.FileTypes.Html"/>
<action id="BasisJS.New.L10n" class="com.basisjs.actions.BasisJSNewL10nAction" text="_L10n"
description="Create new Basis.js localization dictionary" icon="AllIcons.FileTypes.Json"/>
</group>
</actions>
</idea-plugin> | {
"content_hash": "3e1a3fb8373e7dc6f1eaf3c13f56c0df",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 130,
"avg_line_length": 53.09090909090909,
"alnum_prop": 0.6686643835616438,
"repo_name": "smelukov/intellij-basisjs-plugin",
"id": "f3f3d61ae8e85e814a6cd765cba593ef89bae244",
"size": "3504",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resources/META-INF/plugin.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "18417"
}
],
"symlink_target": ""
} |
package config
// ScrapeConfig : Config struct for Scraping
type ScrapeConfig struct {
URL string `json:"scrape_url"`
Name string `json:"name"`
IntervalSeconds int64 `json:"interval"`
}
// ScraperConfig : Config struct for Scraper
type ScraperConfig struct {
Addr string `json:"bind"`
NumWorkers int `json:"workers"`
IntervalSeconds int64 `json:"default_interval"`
Trackers []ScrapeConfig `json:"trackers"`
}
// DefaultScraperConfig is the default config for bittorrent scraping
var DefaultScraperConfig = ScraperConfig{
Addr: ":9999",
// TODO: query system?
NumWorkers: 4,
// every hour
IntervalSeconds: 60 * 60,
Trackers: []ScrapeConfig{
{
URL: "udp://tracker.coppersurfer.tk:6969/",
Name: "coppersurfer.tk",
},
},
}
| {
"content_hash": "4a68ff965470b7be9c92293e644efb9f",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 69,
"avg_line_length": 26.548387096774192,
"alnum_prop": 0.6549210206561361,
"repo_name": "steindornatorinn/nyaa",
"id": "38b28b5c294c03492cf715733fef1a78c99f42b9",
"size": "823",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "config/scrape.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "22666"
},
{
"name": "Go",
"bytes": "327208"
},
{
"name": "HTML",
"bytes": "78127"
},
{
"name": "JavaScript",
"bytes": "7291"
},
{
"name": "Python",
"bytes": "1827"
},
{
"name": "Shell",
"bytes": "2723"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "77230adc8f4f24e3f81dba0711ff6a0c",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "5188d2432bd405db68731e5cbfe0ae293dec1079",
"size": "172",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Cirsium/Cirsium domonii/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package algolia.dsl
import algolia.definitions._
import algolia.objects.Rule
import algolia.responses.{SearchRuleResult, Task}
import algolia.{AlgoliaClient, AlgoliaClientException, Executable}
import org.json4s.Formats
import scala.concurrent.{ExecutionContext, Future}
trait RulesDsl {
implicit val formats: Formats
implicit object GetRuleDefinitionExecutable
extends Executable[GetRuleDefinition, Rule] {
override def apply(client: AlgoliaClient, query: GetRuleDefinition)(
implicit executor: ExecutionContext
): Future[Rule] = {
client.request[Rule](query.build())
}
}
implicit object DeleteRuleDefinitionExecutable
extends Executable[DeleteRuleDefinition, Task] {
override def apply(client: AlgoliaClient, query: DeleteRuleDefinition)(
implicit executor: ExecutionContext
): Future[Task] = {
client.request[Task](query.build())
}
}
implicit object ClearRulesDefinitionExecutable
extends Executable[ClearRulesDefinition, Task] {
override def apply(client: AlgoliaClient, query: ClearRulesDefinition)(
implicit executor: ExecutionContext
): Future[Task] = {
client.request[Task](query.build())
}
}
implicit object SaveRuleDefinitionExecutable
extends Executable[SaveRuleDefinition, Task] {
override def apply(client: AlgoliaClient, query: SaveRuleDefinition)(
implicit executor: ExecutionContext
): Future[Task] = {
if (query.rule.objectID.isEmpty) {
return Future.failed(
new AlgoliaClientException(s"rule's 'objectID' cannot be empty")
)
}
client.request[Task](query.build())
}
}
implicit object BatchRulesDefinitionExecutable
extends Executable[BatchRulesDefinition, Task] {
override def apply(client: AlgoliaClient, query: BatchRulesDefinition)(
implicit executor: ExecutionContext
): Future[Task] = {
client.request[Task](query.build())
}
}
implicit object SearchRulesDefinitionExecutable
extends Executable[SearchRulesDefinition, SearchRuleResult] {
override def apply(client: AlgoliaClient, query: SearchRulesDefinition)(
implicit executor: ExecutionContext
): Future[SearchRuleResult] = {
client.request[SearchRuleResult](query.build())
}
}
}
| {
"content_hash": "a403d98c7cf05d01632467a168618bce",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 76,
"avg_line_length": 26.53409090909091,
"alnum_prop": 0.7160599571734475,
"repo_name": "algolia/algoliasearch-client-scala",
"id": "75c56adb19551c03a8df0da04b87d95a66980f3d",
"size": "3501",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/scala/algolia/dsl/RulesDsl.scala",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "321"
},
{
"name": "Scala",
"bytes": "634984"
},
{
"name": "Shell",
"bytes": "291"
}
],
"symlink_target": ""
} |
package com.perunlabs.testinjector.bind;
import java.lang.reflect.Field;
import com.google.inject.Binder;
import com.google.inject.Key;
public class ToInstanceBinding<T> extends Binding<T> {
private final T instance;
public ToInstanceBinding(Key<T> key, T instance, Field field) {
super(key, field);
this.instance = instance;
}
@Override
public void install(Binder binder) {
binder.bind(key()).toInstance(instance);
}
}
| {
"content_hash": "b9907bd97f57b6fb1d028de54603248f",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 65,
"avg_line_length": 22.45,
"alnum_prop": 0.7282850779510023,
"repo_name": "mikosik/test-injector",
"id": "6f16db03353812bf661db932b86e4f5156b06dc5",
"size": "449",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/perunlabs/testinjector/bind/ToInstanceBinding.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "42387"
},
{
"name": "Starlark",
"bytes": "1141"
}
],
"symlink_target": ""
} |
package it.quartara.boser.worker;
import java.util.Date;
import javax.ejb.ActivationConfigProperty;
import javax.ejb.MessageDriven;
import javax.ejb.TransactionManagement;
import javax.ejb.TransactionManagementType;
import javax.jms.JMSException;
import javax.jms.MapMessage;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import it.quartara.boser.model.ExecutionState;
import it.quartara.boser.model.SearchRequest;
/**
* Si occupa della gestione dei messaggi finiti nella dead letter queue.
* In particolare imposta a ERROR lo stato della richiesta asincrona.
*
* @author webny
*
*/
@MessageDriven(name = "DLQWorker", activationConfig = {
@ActivationConfigProperty(propertyName = "destinationLookup", propertyValue = "queue/DLQ"),
@ActivationConfigProperty(propertyName = "destinationType", propertyValue = "javax.jms.Queue"),
@ActivationConfigProperty(propertyName = "acknowledgeMode", propertyValue = "Auto-acknowledge") })
@TransactionManagement( TransactionManagementType.CONTAINER )
public class DLQWorker implements MessageListener {
private final static Logger log = LoggerFactory.getLogger(DLQWorker.class);
@PersistenceContext(name="BoserPU")
private EntityManager em;
@Override
public void onMessage(Message message) {
log.debug("message {}", message.toString());
try {
if (message instanceof MapMessage) {
Object value = ((MapMessage)message).getObject("searchRequestId");
if (value != null && value instanceof Long) {
Long searchRequestId = (Long) value;
SearchRequest request = em.find(SearchRequest.class, searchRequestId);
if (request != null) {
request.setLastUpdate(new Date());
request.setState(ExecutionState.ERROR);
em.merge(request);
} else {
log.warn("request null per searchRequestId {}", searchRequestId);
}
}
}
} catch (JMSException e) {
log.error("errore di gestione dead letter queue", e);
}
}
}
| {
"content_hash": "ec81757c02be0e761851dc35b8b1d1ef",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 103,
"avg_line_length": 32.765625,
"alnum_prop": 0.748688602765856,
"repo_name": "vernyquartara/boser",
"id": "e48f110e54a8b1ce2420315040754c6d55c0c589",
"size": "2097",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "boser-crawler-web/src/main/java/it/quartara/boser/worker/DLQWorker.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "36"
},
{
"name": "Java",
"bytes": "106101"
},
{
"name": "JavaScript",
"bytes": "13199"
}
],
"symlink_target": ""
} |
package live
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// DescribeLivePullStreamConfig invokes the live.DescribeLivePullStreamConfig API synchronously
func (client *Client) DescribeLivePullStreamConfig(request *DescribeLivePullStreamConfigRequest) (response *DescribeLivePullStreamConfigResponse, err error) {
response = CreateDescribeLivePullStreamConfigResponse()
err = client.DoAction(request, response)
return
}
// DescribeLivePullStreamConfigWithChan invokes the live.DescribeLivePullStreamConfig API asynchronously
func (client *Client) DescribeLivePullStreamConfigWithChan(request *DescribeLivePullStreamConfigRequest) (<-chan *DescribeLivePullStreamConfigResponse, <-chan error) {
responseChan := make(chan *DescribeLivePullStreamConfigResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.DescribeLivePullStreamConfig(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// DescribeLivePullStreamConfigWithCallback invokes the live.DescribeLivePullStreamConfig API asynchronously
func (client *Client) DescribeLivePullStreamConfigWithCallback(request *DescribeLivePullStreamConfigRequest, callback func(response *DescribeLivePullStreamConfigResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *DescribeLivePullStreamConfigResponse
var err error
defer close(result)
response, err = client.DescribeLivePullStreamConfig(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// DescribeLivePullStreamConfigRequest is the request struct for api DescribeLivePullStreamConfig
type DescribeLivePullStreamConfigRequest struct {
*requests.RpcRequest
LiveapiRequestFrom string `position:"Query" name:"LiveapiRequestFrom"`
DomainName string `position:"Query" name:"DomainName"`
OwnerId requests.Integer `position:"Query" name:"OwnerId"`
}
// DescribeLivePullStreamConfigResponse is the response struct for api DescribeLivePullStreamConfig
type DescribeLivePullStreamConfigResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
LiveAppRecordList LiveAppRecordListInDescribeLivePullStreamConfig `json:"LiveAppRecordList" xml:"LiveAppRecordList"`
}
// CreateDescribeLivePullStreamConfigRequest creates a request to invoke DescribeLivePullStreamConfig API
func CreateDescribeLivePullStreamConfigRequest() (request *DescribeLivePullStreamConfigRequest) {
request = &DescribeLivePullStreamConfigRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("live", "2016-11-01", "DescribeLivePullStreamConfig", "live", "openAPI")
request.Method = requests.POST
return
}
// CreateDescribeLivePullStreamConfigResponse creates a response to parse from DescribeLivePullStreamConfig response
func CreateDescribeLivePullStreamConfigResponse() (response *DescribeLivePullStreamConfigResponse) {
response = &DescribeLivePullStreamConfigResponse{
BaseResponse: &responses.BaseResponse{},
}
return
}
| {
"content_hash": "410d21858773dc3411c90fc4a5d5d2c8",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 195,
"avg_line_length": 40.48039215686274,
"alnum_prop": 0.7849358198110923,
"repo_name": "aliyun/alibaba-cloud-sdk-go",
"id": "4444a20d8cc52fbe9bc1e269f605618fafc90f29",
"size": "4129",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "services/live/describe_live_pull_stream_config.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "734307"
},
{
"name": "Makefile",
"bytes": "183"
}
],
"symlink_target": ""
} |
<?php
namespace Welldom;
/**
* Xml Serializer.
*
* @author Jérôme Tamarelle <jerome@tamarelle.net>
*/
class XmlSerializer
{
/**
* Serialize an array into an XML
*
* @param array $array
* @return string
*/
static public function arrayToXml(array $array)
{
$xml = '';
foreach ($array as $name => $content) {
if (is_array($content)) {
$content = self::arrayToXml($content);
}
$xml .= sprintf('<%s>%s</%s>', $name, $content, $name);
}
return $xml;
}
/**
* Unserialize an XML into an array.
* Attributes are node serialized.
*
* @param string $xml
* @return array
*/
static public function xmlToArray($xml)
{
$pattern = '/^<(.*?)>(.*?)<\/\1>(.*)/';
if (preg_match($pattern, $xml)) {
$array = array();
while ($xml && preg_match($pattern, $xml, $matches)) {
$array[$matches[1]] = self::xmlToArray($matches[2]);
$xml = $matches[3];
}
return $array;
}
return $xml;
}
}
| {
"content_hash": "87e56d2fabe9e475f36ce9bc60aa9bc9",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 68,
"avg_line_length": 20.29824561403509,
"alnum_prop": 0.4675885911840968,
"repo_name": "LExpress/Welldom",
"id": "999ab2dd168b5f2f246b73c87ff2851b579441fc",
"size": "1374",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Welldom/XmlSerializer.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "97378"
},
{
"name": "XSLT",
"bytes": "2144"
}
],
"symlink_target": ""
} |
.class public Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;
.super Ljava/lang/Object;
.source "CaBootStatus.java"
# interfaces
.implements Lcom/samsung/android/contextaware/utilbundle/IUtilManager;
# annotations
.annotation system Ldalvik/annotation/MemberClasses;
value = {
Lcom/samsung/android/contextaware/utilbundle/CaBootStatus$1;,
Lcom/samsung/android/contextaware/utilbundle/CaBootStatus$2;
}
.end annotation
# static fields
.field private static final BOOT_COMPLETED:I = 0x1003
.field private static final LOG_CONTEXT_NULL:I = 0x1001
.field private static final LOG_INTENT_NULL:I = 0x1002
.field private static volatile instance:Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;
# instance fields
.field private mBootComplete:Z
.field private mContext:Landroid/content/Context;
.field private final mHandler:Landroid/os/Handler;
.field private final mListeners:Ljava/util/concurrent/CopyOnWriteArrayList;
.annotation system Ldalvik/annotation/Signature;
value = {
"Ljava/util/concurrent/CopyOnWriteArrayList",
"<",
"Lcom/samsung/android/contextaware/utilbundle/IBootStatusObserver;",
">;"
}
.end annotation
.end field
.field private final mReceiver:Landroid/content/BroadcastReceiver;
# direct methods
.method static synthetic -get0(Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;)Landroid/content/Context;
.locals 1
iget-object v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mContext:Landroid/content/Context;
return-object v0
.end method
.method static synthetic -get1(Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;)Landroid/os/Handler;
.locals 1
iget-object v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mHandler:Landroid/os/Handler;
return-object v0
.end method
.method static synthetic -get2(Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;)Landroid/content/BroadcastReceiver;
.locals 1
iget-object v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mReceiver:Landroid/content/BroadcastReceiver;
return-object v0
.end method
.method static synthetic -set0(Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;Z)Z
.locals 0
iput-boolean p1, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mBootComplete:Z
return p1
.end method
.method static synthetic -wrap0(Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;)V
.locals 0
invoke-direct {p0}, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->notifyObservers()V
return-void
.end method
.method public constructor <init>()V
.locals 1
invoke-direct {p0}, Ljava/lang/Object;-><init>()V
const/4 v0, 0x0
iput-boolean v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mBootComplete:Z
new-instance v0, Ljava/util/concurrent/CopyOnWriteArrayList;
invoke-direct {v0}, Ljava/util/concurrent/CopyOnWriteArrayList;-><init>()V
iput-object v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mListeners:Ljava/util/concurrent/CopyOnWriteArrayList;
new-instance v0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus$1;
invoke-direct {v0, p0}, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus$1;-><init>(Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;)V
iput-object v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mReceiver:Landroid/content/BroadcastReceiver;
new-instance v0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus$2;
invoke-direct {v0, p0}, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus$2;-><init>(Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;)V
iput-object v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mHandler:Landroid/os/Handler;
return-void
.end method
.method private checkBootComplete()V
.locals 3
new-instance v0, Landroid/content/IntentFilter;
invoke-direct {v0}, Landroid/content/IntentFilter;-><init>()V
const-string/jumbo v1, "android.intent.action.BOOT_COMPLETED"
invoke-virtual {v0, v1}, Landroid/content/IntentFilter;->addAction(Ljava/lang/String;)V
iget-object v1, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mContext:Landroid/content/Context;
if-eqz v1, :cond_0
iget-object v1, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mContext:Landroid/content/Context;
iget-object v2, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mReceiver:Landroid/content/BroadcastReceiver;
invoke-virtual {v1, v2, v0}, Landroid/content/Context;->registerReceiver(Landroid/content/BroadcastReceiver;Landroid/content/IntentFilter;)Landroid/content/Intent;
:cond_0
return-void
.end method
.method public static getInstance()Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;
.locals 2
sget-object v0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->instance:Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;
if-nez v0, :cond_1
const-class v1, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;
monitor-enter v1
:try_start_0
sget-object v0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->instance:Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;
if-nez v0, :cond_0
new-instance v0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;
invoke-direct {v0}, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;-><init>()V
sput-object v0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->instance:Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;
:try_end_0
.catchall {:try_start_0 .. :try_end_0} :catchall_0
:cond_0
monitor-exit v1
:cond_1
sget-object v0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->instance:Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;
return-object v0
:catchall_0
move-exception v0
monitor-exit v1
throw v0
.end method
.method private notifyObservers()V
.locals 3
iget-object v2, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mListeners:Ljava/util/concurrent/CopyOnWriteArrayList;
invoke-virtual {v2}, Ljava/util/concurrent/CopyOnWriteArrayList;->iterator()Ljava/util/Iterator;
move-result-object v0
:cond_0
:goto_0
invoke-interface {v0}, Ljava/util/Iterator;->hasNext()Z
move-result v2
if-eqz v2, :cond_1
invoke-interface {v0}, Ljava/util/Iterator;->next()Ljava/lang/Object;
move-result-object v1
check-cast v1, Lcom/samsung/android/contextaware/utilbundle/IBootStatusObserver;
if-eqz v1, :cond_0
invoke-interface {v1}, Lcom/samsung/android/contextaware/utilbundle/IBootStatusObserver;->bootCompleted()V
goto :goto_0
:cond_1
return-void
.end method
# virtual methods
.method public initializeManager(Landroid/content/Context;)V
.locals 0
iput-object p1, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mContext:Landroid/content/Context;
invoke-direct {p0}, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->checkBootComplete()V
return-void
.end method
.method public isBootComplete()Z
.locals 1
iget-boolean v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mBootComplete:Z
return v0
.end method
.method public final registerObserver(Lcom/samsung/android/contextaware/utilbundle/IBootStatusObserver;)V
.locals 1
iget-object v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mListeners:Ljava/util/concurrent/CopyOnWriteArrayList;
invoke-virtual {v0, p1}, Ljava/util/concurrent/CopyOnWriteArrayList;->contains(Ljava/lang/Object;)Z
move-result v0
if-nez v0, :cond_0
iget-object v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mListeners:Ljava/util/concurrent/CopyOnWriteArrayList;
invoke-virtual {v0, p1}, Ljava/util/concurrent/CopyOnWriteArrayList;->add(Ljava/lang/Object;)Z
:cond_0
return-void
.end method
.method public terminateManager()V
.locals 0
return-void
.end method
.method public final unregisterObserver(Lcom/samsung/android/contextaware/utilbundle/IBootStatusObserver;)V
.locals 1
iget-object v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mListeners:Ljava/util/concurrent/CopyOnWriteArrayList;
invoke-virtual {v0, p1}, Ljava/util/concurrent/CopyOnWriteArrayList;->contains(Ljava/lang/Object;)Z
move-result v0
if-eqz v0, :cond_0
iget-object v0, p0, Lcom/samsung/android/contextaware/utilbundle/CaBootStatus;->mListeners:Ljava/util/concurrent/CopyOnWriteArrayList;
invoke-virtual {v0, p1}, Ljava/util/concurrent/CopyOnWriteArrayList;->remove(Ljava/lang/Object;)Z
:cond_0
return-void
.end method
| {
"content_hash": "32078d00eeacdacc53660aea90f17602",
"timestamp": "",
"source": "github",
"line_count": 280,
"max_line_length": 167,
"avg_line_length": 32.121428571428574,
"alnum_prop": 0.7630642650655993,
"repo_name": "BatMan-Rom/ModdedFiles",
"id": "ffe0c99832a513af9cf934cfe3f0c6711e014a25",
"size": "8994",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "framework.jar.out/smali_classes3/com/samsung/android/contextaware/utilbundle/CaBootStatus.smali",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "GLSL",
"bytes": "15069"
},
{
"name": "HTML",
"bytes": "139176"
},
{
"name": "Smali",
"bytes": "541934400"
}
],
"symlink_target": ""
} |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/container/v1/cluster_service.proto
package com.google.container.v1;
public interface AutoUpgradeOptionsOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.container.v1.AutoUpgradeOptions)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* [Output only] This field is set when upgrades are about to commence
* with the approximate start time for the upgrades, in
* [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format.
* </pre>
*
* <code>optional string auto_upgrade_start_time = 1;</code>
*/
java.lang.String getAutoUpgradeStartTime();
/**
* <pre>
* [Output only] This field is set when upgrades are about to commence
* with the approximate start time for the upgrades, in
* [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format.
* </pre>
*
* <code>optional string auto_upgrade_start_time = 1;</code>
*/
com.google.protobuf.ByteString
getAutoUpgradeStartTimeBytes();
/**
* <pre>
* [Output only] This field is set when upgrades are about to commence
* with the description of the upgrade.
* </pre>
*
* <code>optional string description = 2;</code>
*/
java.lang.String getDescription();
/**
* <pre>
* [Output only] This field is set when upgrades are about to commence
* with the description of the upgrade.
* </pre>
*
* <code>optional string description = 2;</code>
*/
com.google.protobuf.ByteString
getDescriptionBytes();
}
| {
"content_hash": "66a1e7ce2f76fe1144e8d8784ad6ebc1",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 89,
"avg_line_length": 30.705882352941178,
"alnum_prop": 0.6787994891443168,
"repo_name": "speedycontrol/googleapis",
"id": "123f435c127aa6d40fe46baeaf94ee7d594c5ce6",
"size": "1566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "output/com/google/container/v1/AutoUpgradeOptionsOrBuilder.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1569787"
},
{
"name": "Makefile",
"bytes": "1301"
},
{
"name": "Protocol Buffer",
"bytes": "1085800"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<databaseChangeLog
xmlns="http://www.liquibase.org/xml/ns/dbchangelog"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-2.0.xsd">
<include file="com/socrata/pg/store/schema/20140207-secondary-schema-bootstrap.xml"/>
<include file="com/socrata/pg/store/schema/20140307-add-column-last-modified.xml"/>
<include file="com/socrata/pg/store/schema/20140703-create-postgis-extension.xml"/>
<include file="com/socrata/pg/store/schema/20140629-create-rollup-map.xml"/>
<include file="com/socrata/pg/store/schema/20150501-add-missing-timezone.xml"/>
<include file="com/socrata/pg/store/schema/20160107-create-computation-strategy-map.xml"/>
<include file="com/socrata/pg/store/schema/20160111-add-field-name-columns.xml"/>
<include file="com/socrata/pg/store/schema/20160420-add-copy-map-table-modifiers.xml"/>
<include file="com/socrata/pg/store/schema/20161231-add-disabled.xml"/>
<include file="com/socrata/pg/store/schema/20170402-add-column-resource-name.xml"/>
<include file="com/socrata/pg/store/schema/20170615-add-dataset-map-resource-name-index.xml"/>
<include file="com/socrata/pg/store/schema/20171009-add-column-map-geo-modifiers.xml"/>
<!-- The next three tables are only used in Data Coordinator. But they are created so that cleanup code do not need to be special for pg secondary. -->
<include file="com/socrata/pg/store/schema/20171122-add-secondary-move-jobs-table.xml"/>
<include file="com/socrata/pg/store/schema/20171128-add-collocation-manifest.xml"/>
<include file="com/socrata/pg/store/schema/20180202-add-secondary-metrics-table.xml"/>
<include file="com/socrata/pg/store/schema/20190515-align-truth-instance-name.xml"/>
<include file="com/socrata/pg/store/schema/20190522-add-dataset-map-latest-data-version.xml"/>
<include file="com/socrata/pg/store/schema/20191224-add-some-dc-tables.xml"/>
<include file="com/socrata/pg/store/schema/20200210-add-median-function.xml"/>
<include file="com/socrata/pg/store/schema/20200901-create-index-directive-map.xml"/>
<include file="com/socrata/pg/store/schema/20210420-create-single_row.xml"/>
<include file="com/socrata/pg/store/schema/20220201-add-column-rollup-created-at.xml"/>
<include file="com/socrata/pg/store/schema/20220208-add-data-shape-version.xml"/>
<include file="com/socrata/pg/store/schema/20220218-add-rollup-table-name.xml"/>
<include file="com/socrata/pg/store/schema/20220502-create-index-map.xml"/>
<include file="com/socrata/pg/store/schema/20220718-create-dummy-resync-table.xml"/>
<include file="com/socrata/pg/store/schema/20220907-create-rollup-relationship-map.xml"/>
<include file="com/socrata/pg/store/schema/20221116-create-rollup-table-name-seq.xml"/>
</databaseChangeLog>
| {
"content_hash": "f05997764a919493805a5db6f2d3da6e",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 156,
"avg_line_length": 82.5,
"alnum_prop": 0.7454545454545455,
"repo_name": "socrata-platform/soql-postgres-adapter",
"id": "fda8a352b86d96764f7a35dc03bb1d5a896400f1",
"size": "2970",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "common-pg/src/main/resources/com/socrata/pg/store/schema/migrate.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2464"
},
{
"name": "Jinja",
"bytes": "7204"
},
{
"name": "Scala",
"bytes": "671760"
},
{
"name": "Shell",
"bytes": "10218"
}
],
"symlink_target": ""
} |
package com.jonathancolt.nicity.view.threeD;
/*
* #%L
* nicity-view
* %%
* Copyright (C) 2013 Jonathan Colt
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.jonathancolt.nicity.core.memory.struct.XYZ_D;
/**
*
* @author Administrator
*/
public interface IObject3D {
/**
*
* @param _center
*/
public void setCenter(XYZ_D _center);
/**
*
* @return
*/
public XYZ_D getCenter();
/**
*
* @param _pitch
*/
public void setPitch(double _pitch);
/**
*
* @return
*/
public double getPitch();
/**
*
* @param _yaw
*/
public void setYaw(double _yaw);
/**
*
* @return
*/
public double getYaw();
/**
*
* @param _roll
*/
public void setRoll(double _roll);
/**
*
* @return
*/
public double getRoll();
/**
*
* @param _points
*/
public void setPoints(XYZ_D[] _points);
/**
*
* @param _points
*/
public void addPoints(XYZ_D[] _points);
/**
*
* @return
*/
public XYZ_D[] getPoints();
/**
*
* @param _x
* @param _y
* @param _z
* @param _rx
* @param _ry
* @param _rz
*/
public void change(
double _x, double _y, double _z,
double _rx, double _ry, double _rz
);
/**
*
* @param _x
* @param _y
* @param _z
* @param _rx
* @param _ry
* @param _rz
*/
public void set(
double _x, double _y, double _z,
double _rx, double _ry, double _rz
);
/**
*
* @param _points
*/
public void mapPoints(
Object[] _points
);
/**
*
* @param _color
*/
public void setColor(Object _color);
/**
*
* @return
*/
public Object getColor();
}
| {
"content_hash": "311190ccd89795c819bbb7cc50098e74",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 75,
"avg_line_length": 20.022222222222222,
"alnum_prop": 0.4583795782463929,
"repo_name": "jnthnclt/nicity",
"id": "2797e0fe70ae50abd76d392dfca2c37ff6c71787",
"size": "3361",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nicity-view/src/main/java/com/jonathancolt/nicity/view/threeD/IObject3D.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "26281"
},
{
"name": "Java",
"bytes": "3596700"
},
{
"name": "Ruby",
"bytes": "80"
}
],
"symlink_target": ""
} |
Title: Commands
Slug: docs/commands/index
# XXX Commands (CLI)
XXX is controlled via a very easy to use command-line interface (CLI).
XXX is only a single command-line application: `XXX`. This application
then takes a subcommand such as "agent" or "members". The complete list of
subcommands is in the navigation to the left.
The `XXX` CLI is a well-behaved command line application. In erroneous
cases, a non-zero exit status will be returned. It also responds to `-h` and `--help`
as you'd most likely expect. And some commands that expect input accept
"-" as a parameter to tell XXX to read the input from stdin.
To view a list of the available commands at any time, just run `XXX` with
no arguments:
```
$ XXX
usage: XXX [--version] [--help] <command> [<args>]
Available commands are:
agent Runs a XXX agent
force-leave Forces a member of the cluster to enter the "left" state
info Provides debugging information for operators
join Tell XXX agent to join cluster
keygen Generates a new encryption key
leave Gracefully leaves the XXX cluster and shuts down
members Lists the members of a XXX cluster
monitor Stream logs from a XXX agent
reload Triggers the agent to reload configuration files
version Prints the XXX version
```
To get help for any specific command, pass the `-h` flag to the relevant
subcommand. For example, to see help about the `join` subcommand:
```
$ XXX join -h
Usage: XXX join [options] address ...
Tells a running XXX agent (with "XXX agent") to join the cluster
by specifying at least one existing member.
Options:
-rpc-addr=127.0.0.1:8400 RPC address of the XXX agent.
-wan Joins a server to another server in the WAN pool
```
| {
"content_hash": "e0cf3a46cf0ccd96dfe138a29025fb11",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 85,
"avg_line_length": 34.96153846153846,
"alnum_prop": 0.6974697469746974,
"repo_name": "naparuba/kunai",
"id": "9ab1be1da4f2ff2bf79df9083d8663c496dd47eb",
"size": "1818",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "doc/content/pages/docs/commands/index.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "487"
},
{
"name": "C",
"bytes": "345490"
},
{
"name": "C++",
"bytes": "29298"
},
{
"name": "CSS",
"bytes": "12718"
},
{
"name": "HTML",
"bytes": "12328"
},
{
"name": "JavaScript",
"bytes": "558040"
},
{
"name": "Makefile",
"bytes": "8523"
},
{
"name": "Python",
"bytes": "2180654"
},
{
"name": "Shell",
"bytes": "18255"
},
{
"name": "Smarty",
"bytes": "133"
}
],
"symlink_target": ""
} |
package multierr
import "bytes"
// Multierr struct allows to have multiple errors together
type Multierr struct {
errors []error
}
// NewMultierr creates a new pointer to Multierr
func NewMultierr(errs ...error) *Multierr {
m := Multierr{}
m.errors = append(m.errors, errs...)
return &m
}
// Add adds error to the collection of errors
func (m *Multierr) Add(err error) {
m.errors = append(m.errors, err)
}
// Len return a number of errors in the collection
func (m *Multierr) Len() int {
return len(m.errors)
}
// Error error interface implementation
func (m *Multierr) Error() string {
var buffer bytes.Buffer
for _, err := range m.errors {
buffer.WriteString(err.Error())
buffer.WriteString("\n")
}
return buffer.String()
}
| {
"content_hash": "09501eee4efa3c9580fcea8303dfdf7a",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 58,
"avg_line_length": 19.710526315789473,
"alnum_prop": 0.7022696929238985,
"repo_name": "cristaloleg/funf",
"id": "019db50e5e4349d6f10ec1423d6c47c6c37fc8e9",
"size": "749",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "multerr/multierr.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "28504"
},
{
"name": "Makefile",
"bytes": "939"
}
],
"symlink_target": ""
} |
import re
from django.conf import settings
from django.contrib.syndication.views import Feed
from django.utils.feedgenerator import Atom1Feed
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from .models import FoiRequest
CONTROLCHARS_RE = re.compile(r'[\x00-\x08\x0B-\x0C\x0E-\x1F]')
def clean(val):
return CONTROLCHARS_RE.sub('', val)
class LatestFoiRequestsFeed(Feed):
def __init__(self, items, topic=None, jurisdiction=None, public_body=None, tag=None, status=None):
self.items = items
self.topic = topic
self.jurisdiction = jurisdiction
self.tag = tag
self.status = status
self.public_body = public_body
super(LatestFoiRequestsFeed, self).__init__()
def get_filter_string(self):
by = []
if self.topic:
by.append(_('by topic %(topic)s') % {'topic': self.topic.name})
if self.status:
by.append(_('by status %(status)s') % {
'status': FoiRequest.get_readable_status(
FoiRequest.get_status_from_url(self.status)[1])
})
if self.tag:
by.append(_('by tag %(tag)s') % {'tag': self.tag.name})
if self.jurisdiction:
by.append(_('for %(juris)s') % {'juris': self.jurisdiction.name})
if self.public_body:
by.append(_('to %(public_body)s') % {'public_body': self.public_body.name})
return ' '.join(by)
def title(self, obj):
by = self.get_filter_string()
if by:
return clean(_("Freedom of Information Requests %(by)s on %(sitename)s") % {
"sitename": settings.SITE_NAME,
'by': by
})
return clean(_("Freedom of Information Requests on %(sitename)s") % {
"sitename": settings.SITE_NAME
})
def description(self, obj):
by = self.get_filter_string()
if by:
return clean(_("This feed contains the Freedom of Information requests %(by)s"
" that have been made through %(sitename)s.") % {
"sitename": settings.SITE_NAME,
'by': by
})
return clean(_("This feed contains the latest Freedom of Information requests"
" that have been made through %(sitename)s.") % {
"sitename": settings.SITE_NAME
})
def get_link_kwargs(self):
kwargs = {}
if self.topic:
kwargs['topic'] = self.topic.slug
if self.jurisdiction:
kwargs['jurisdiction'] = self.jurisdiction.slug
if self.status:
kwargs['status'] = self.status
if self.tag:
kwargs['tag'] = self.tag.slug
if self.public_body:
kwargs['public_body'] = self.public_body.slug
return kwargs
def link(self):
return reverse('foirequest-list_feed', kwargs=self.get_link_kwargs())
def items(self):
return self.items.order_by("-first_message")[:15]
def item_title(self, item):
if item.public_body:
pb_name = item.public_body.name
else:
pb_name = _("Not yet known")
return clean(_("'%(title)s' to %(publicbody)s") % {
"title": item.title,
"publicbody": pb_name
})
def item_description(self, item):
return clean(item.description)
def item_pubdate(self, item):
return item.first_message
class LatestFoiRequestsFeedAtom(LatestFoiRequestsFeed):
feed_type = Atom1Feed
subtitle = LatestFoiRequestsFeed.description
def link(self):
return reverse('foirequest-list_feed_atom', kwargs=self.get_link_kwargs())
class FoiRequestFeed(Feed):
def get_object(self, request, slug):
return get_object_or_404(FoiRequest, slug=slug, public=True)
def title(self, obj):
return clean(obj.title)
def link(self, obj):
return reverse('foirequest-feed', kwargs={"slug": obj.slug})
def description(self, obj):
return clean(obj.description)
def items(self, obj):
return obj.foievent_set.order_by("-timestamp")[:15]
def item_title(self, item):
return clean(item.as_text())
def item_description(self, item):
return clean(item.as_text())
def item_pubdate(self, item):
return item.timestamp
class FoiRequestFeedAtom(FoiRequestFeed):
feed_type = Atom1Feed
subtitle = FoiRequestFeed.description
def link(self, obj):
return reverse('foirequest-feed_atom', kwargs={"slug": obj.slug})
| {
"content_hash": "ec140324de4971662b84ac7d9554af92",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 102,
"avg_line_length": 31.938356164383563,
"alnum_prop": 0.5933948102080205,
"repo_name": "CodeforHawaii/froide",
"id": "0ec0a91dd556c670ab48eeb54e22c7fc3ef0e314",
"size": "4663",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "froide/foirequest/feeds.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17807"
},
{
"name": "HTML",
"bytes": "160131"
},
{
"name": "Java",
"bytes": "18275"
},
{
"name": "JavaScript",
"bytes": "407029"
},
{
"name": "Makefile",
"bytes": "329"
},
{
"name": "Python",
"bytes": "1641429"
},
{
"name": "Shell",
"bytes": "1621"
}
],
"symlink_target": ""
} |
<?php
namespace fieldwork\tests\sanitizers;
use fieldwork\sanitizers\NumberSanitizer;
class NumberSanitizerTest extends FieldSanitizer_TestCase
{
public function testSanitization ()
{
$this->setSanitizer(new NumberSanitizer());
$this->assertOutcome('0.a1', '0.1');
$this->assertOutcome('.1', '0.1');
$this->assertOutcome('a,777', '0.78');
$this->assertOutcome('', '');
$this->assertOutcome('hello world', '');
$this->assertOutcome('.', '');
$this->assertOutcome('0.565464', '0.57');
}
public function testPrecision ()
{
$this->setSanitizer(new NumberSanitizer(null, '5'));
$this->assertOutcome('7', '5');
$this->assertOutcome('1-8!!%', '20');
}
} | {
"content_hash": "bcd5967c538c965bb109a3e6354fb46c",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 60,
"avg_line_length": 24.70967741935484,
"alnum_prop": 0.587467362924282,
"repo_name": "jmversteeg/fieldwork",
"id": "c18846c5eaa8f4e81bc0471b638acf14cdfbbcb2",
"size": "941",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/fieldwork/tests/sanitizers/NumberSanitizerTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "78428"
}
],
"symlink_target": ""
} |
/* Declare two string variables and assign them with following value:
The "use" of quotations causes difficulties.
Do the above in two different ways - with and without using quoted strings.
Print the variables to ensure that their value was correctly defined. */
using System;
class QuotesInStrings
{
static void Main()
{
string firstString = "The \"use\" of quotations causes difficulties.";
string secondString = @"The ""use"" of quotations causes difficulties.";
Console.WriteLine("First string with \"\" is: {0} ", firstString);
Console.WriteLine("Second string with @ is: {0} ", secondString);
}
}
| {
"content_hash": "3432deaa8e31e57c9bd203f00f09eb72",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 80,
"avg_line_length": 35.05263157894737,
"alnum_prop": 0.6816816816816816,
"repo_name": "Andro0/TelerikAcademy",
"id": "5aa7ffb57920bab6f649bd4780e2d874754f869d",
"size": "668",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "C#1/Homework_Primitive Data Types and Variables/07.QuotesInStrings/QuotesInStrings.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "683413"
},
{
"name": "CSS",
"bytes": "9105"
},
{
"name": "HTML",
"bytes": "41273"
}
],
"symlink_target": ""
} |
namespace content {
RtcVideoCapturer::RtcVideoCapturer(
const media::VideoCaptureSessionId id,
VideoCaptureImplManager* vc_manager,
bool is_screencast)
: is_screencast_(is_screencast),
delegate_(new RtcVideoCaptureDelegate(id, vc_manager)),
state_(VIDEO_CAPTURE_STATE_STOPPED) {
}
RtcVideoCapturer::~RtcVideoCapturer() {
DCHECK(VIDEO_CAPTURE_STATE_STOPPED);
DVLOG(3) << " RtcVideoCapturer::dtor";
}
cricket::CaptureState RtcVideoCapturer::Start(
const cricket::VideoFormat& capture_format) {
DVLOG(3) << " RtcVideoCapturer::Start ";
if (state_ == VIDEO_CAPTURE_STATE_STARTED) {
DVLOG(1) << "Got a StartCapture when already started!!! ";
return cricket::CS_FAILED;
}
media::VideoCaptureCapability cap;
cap.width = capture_format.width;
cap.height = capture_format.height;
cap.frame_rate = capture_format.framerate();
cap.color = media::VideoCaptureCapability::kI420;
state_ = VIDEO_CAPTURE_STATE_STARTED;
start_time_ = base::Time::Now();
delegate_->StartCapture(cap,
base::Bind(&RtcVideoCapturer::OnFrameCaptured, base::Unretained(this)),
base::Bind(&RtcVideoCapturer::OnStateChange, base::Unretained(this)));
return cricket::CS_STARTING;
}
void RtcVideoCapturer::Stop() {
DVLOG(3) << " RtcVideoCapturer::Stop ";
if (state_ == VIDEO_CAPTURE_STATE_STOPPED) {
DVLOG(1) << "Got a StopCapture while not started.";
return;
}
state_ = VIDEO_CAPTURE_STATE_STOPPED;
delegate_->StopCapture();
SignalStateChange(this, cricket::CS_STOPPED);
}
bool RtcVideoCapturer::IsRunning() {
return state_ == VIDEO_CAPTURE_STATE_STARTED;
}
bool RtcVideoCapturer::GetPreferredFourccs(std::vector<uint32>* fourccs) {
if (!fourccs)
return false;
fourccs->push_back(cricket::FOURCC_I420);
return true;
}
bool RtcVideoCapturer::IsScreencast() {
return is_screencast_;
}
bool RtcVideoCapturer::GetBestCaptureFormat(const cricket::VideoFormat& desired,
cricket::VideoFormat* best_format) {
if (!best_format) {
return false;
}
// Chrome does not support capability enumeration.
// Use the desired format as the best format.
best_format->width = desired.width;
best_format->height = desired.height;
best_format->fourcc = cricket::FOURCC_I420;
best_format->interval = desired.interval;
return true;
}
void RtcVideoCapturer::OnFrameCaptured(
const media::VideoCapture::VideoFrameBuffer& buf) {
// Currently, |fourcc| is always I420.
cricket::CapturedFrame frame;
frame.width = buf.width;
frame.height = buf.height;
frame.fourcc = cricket::FOURCC_I420;
frame.data_size = buf.buffer_size;
// cricket::CapturedFrame time is in nanoseconds.
frame.elapsed_time = (buf.timestamp - start_time_).InMicroseconds() *
base::Time::kNanosecondsPerMicrosecond;
frame.time_stamp = frame.elapsed_time;
frame.data = buf.memory_pointer;
frame.pixel_height = 1;
frame.pixel_width = 1;
// This signals to libJingle that a new VideoFrame is available.
// libJingle have no assumptions on what thread this signal come from.
SignalFrameCaptured(this, &frame);
}
void RtcVideoCapturer::OnStateChange(
RtcVideoCaptureDelegate::CaptureState state) {
cricket::CaptureState converted_state = cricket::CS_FAILED;
switch (state) {
case RtcVideoCaptureDelegate::CAPTURE_STOPPED:
converted_state = cricket::CS_STOPPED;
break;
case RtcVideoCaptureDelegate::CAPTURE_RUNNING:
converted_state = cricket::CS_RUNNING;
break;
case RtcVideoCaptureDelegate::CAPTURE_FAILED:
converted_state = cricket::CS_FAILED;
break;
default:
NOTREACHED();
break;
}
SignalStateChange(this, converted_state);
}
} // namespace content
| {
"content_hash": "0467b6b482afc21cd4b8375e2f379917",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 80,
"avg_line_length": 31.024793388429753,
"alnum_prop": 0.7048481619605754,
"repo_name": "leighpauls/k2cro4",
"id": "950640fedc2618691a3da1d966bbeee778059111",
"size": "4004",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "content/renderer/media/rtc_video_capturer.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "3062"
},
{
"name": "AppleScript",
"bytes": "25392"
},
{
"name": "Arduino",
"bytes": "464"
},
{
"name": "Assembly",
"bytes": "68131038"
},
{
"name": "C",
"bytes": "242794338"
},
{
"name": "C#",
"bytes": "11024"
},
{
"name": "C++",
"bytes": "353525184"
},
{
"name": "Common Lisp",
"bytes": "3721"
},
{
"name": "D",
"bytes": "1931"
},
{
"name": "Emacs Lisp",
"bytes": "1639"
},
{
"name": "F#",
"bytes": "4992"
},
{
"name": "FORTRAN",
"bytes": "10404"
},
{
"name": "Java",
"bytes": "3845159"
},
{
"name": "JavaScript",
"bytes": "39146656"
},
{
"name": "Lua",
"bytes": "13768"
},
{
"name": "Matlab",
"bytes": "22373"
},
{
"name": "Objective-C",
"bytes": "21887598"
},
{
"name": "PHP",
"bytes": "2344144"
},
{
"name": "Perl",
"bytes": "49033099"
},
{
"name": "Prolog",
"bytes": "2926122"
},
{
"name": "Python",
"bytes": "39863959"
},
{
"name": "R",
"bytes": "262"
},
{
"name": "Racket",
"bytes": "359"
},
{
"name": "Ruby",
"bytes": "304063"
},
{
"name": "Scheme",
"bytes": "14853"
},
{
"name": "Shell",
"bytes": "9195117"
},
{
"name": "Tcl",
"bytes": "1919771"
},
{
"name": "Verilog",
"bytes": "3092"
},
{
"name": "Visual Basic",
"bytes": "1430"
},
{
"name": "eC",
"bytes": "5079"
}
],
"symlink_target": ""
} |
#ifndef MICROPY_INCLUDED_SHARED_MODULE_AUDIOIO_WAVEFILE_H
#define MICROPY_INCLUDED_SHARED_MODULE_AUDIOIO_WAVEFILE_H
#include "extmod/vfs_fat.h"
#include "py/obj.h"
#include "shared-module/audiocore/__init__.h"
typedef struct {
mp_obj_base_t base;
uint8_t *buffer;
uint32_t buffer_length;
uint8_t *second_buffer;
uint32_t second_buffer_length;
uint32_t file_length; // In bytes
uint16_t data_start; // Where the data values start
uint8_t bits_per_sample;
uint16_t buffer_index;
uint32_t bytes_remaining;
uint8_t channel_count;
uint32_t sample_rate;
uint32_t len;
pyb_file_obj_t *file;
uint32_t read_count;
uint32_t left_read_count;
uint32_t right_read_count;
} audioio_wavefile_obj_t;
// These are not available from Python because it may be called in an interrupt.
void audioio_wavefile_reset_buffer(audioio_wavefile_obj_t *self,
bool single_channel_output,
uint8_t channel);
audioio_get_buffer_result_t audioio_wavefile_get_buffer(audioio_wavefile_obj_t *self,
bool single_channel_output,
uint8_t channel,
uint8_t **buffer,
uint32_t *buffer_length); // length in bytes
void audioio_wavefile_get_buffer_structure(audioio_wavefile_obj_t *self, bool single_channel_output,
bool *single_buffer, bool *samples_signed,
uint32_t *max_buffer_length, uint8_t *spacing);
#endif // MICROPY_INCLUDED_SHARED_MODULE_AUDIOIO_WAVEFILE_H
| {
"content_hash": "742b8cbae509609c39b378f0b5ebcbd7",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 100,
"avg_line_length": 31.574468085106382,
"alnum_prop": 0.6893530997304582,
"repo_name": "adafruit/circuitpython",
"id": "986359e16ecdd0fe9abeddcd5bff565716d9ec5a",
"size": "2707",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "shared-module/audiocore/WaveFile.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "10241"
},
{
"name": "C",
"bytes": "18450191"
},
{
"name": "C++",
"bytes": "476"
},
{
"name": "CMake",
"bytes": "18203"
},
{
"name": "CSS",
"bytes": "316"
},
{
"name": "HTML",
"bytes": "10126"
},
{
"name": "JavaScript",
"bytes": "13854"
},
{
"name": "Jinja",
"bytes": "11034"
},
{
"name": "Makefile",
"bytes": "330832"
},
{
"name": "Python",
"bytes": "1423935"
},
{
"name": "Shell",
"bytes": "18681"
}
],
"symlink_target": ""
} |
SBEP = SBEP or {}
-- This function basically deals with stuff that happens when a player hops out of a vehicle
function SetExPoint(player, vehicle)
if vehicle.ExitPoint and vehicle.ExitPoint:IsValid() then
local EPP = vehicle.ExitPoint:GetPos()
local VP = vehicle:GetPos()
local Dist = EPP:Distance(VP)
if Dist <= 500 then
player:SetPos(vehicle.ExitPoint:GetPos() + vehicle.ExitPoint:GetUp() * 10)
vehicle.ExitPoint.CDown = CurTime() + 0.5
end
end
if player.CamCon then
player.CamCon = false
player:SetViewEntity()
end
end
hook.Add("PlayerLeaveVehicle", "PlayerRepositioning", SetExPoint)
SBEP.SetExPoint = SetExPoint
function SBEP.ExitFighter(player,vehicle)
if not vehicle.Cont then return end
if vehicle.Cont.ExitFighter then
vehicle.Cont:ExitFighter(player,vehicle)
end
end
hook.Add("PlayerLeaveVehicle", "SBEP.ExitFighter", SBEP.ExitFighter)
--For controling certain entities
function SBEPCCC(ply, data)
local cmd = ply:GetCurrentCommand()
ply.SBEPYaw = cmd:GetMouseX()
ply.SBEPPitch = cmd:GetMouseY()
end
hook.Add("SetupMove", "SBEPControls", SBEPCCC)
SBEP.CCC = SBEPCCC
--This is all the hardpointing stuff
function HPLink( cont, pod, weap )
if weap.Mounted then return false end
if not cont.HPC then return false end
for i = 1, cont.HPC do
if not cont.HP[i]["Ent"] or not cont.HP[i]["Ent"]:IsValid() then
local TypeMatch = false
if type(cont.HP[i]["Type"]) == "string" then
if type(weap.HPType) == "string" then
--print("Double String")
if cont.HP[i]["Type"] == weap.HPType then
TypeMatch = true
end
elseif type(weap.HPType) == "table" then
--print("String - Table")
if table.HasValue( weap.HPType, cont.HP[i]["Type"] ) then
TypeMatch = true
end
end
elseif type(cont.HP[i]["Type"]) == "table" then
if type(weap.HPType) == "string" then
--print("Table - String")
if table.HasValue( cont.HP[i]["Type"], weap.HPType ) then
TypeMatch = true
end
elseif type(weap.HPType) == "table" then
--print("Double Table")
for _,v in pairs(cont.HP[i]["Type"]) do
if table.HasValue( weap.HPType, v ) then
TypeMatch = true
end
end
end
end
if TypeMatch then
------Fishface60's new code-------
local APAng = weap.APAng or Angle(0,0,0)
local HPAng = cont.HP[i]["Angle"] or Angle(0,0,0)
weap:SetAngles(pod:LocalToWorldAngles(HPAng))
weap:SetAngles(weap:LocalToWorldAngles(APAng))
local APPos = weap.APPos or Vector(0,0,0)
local HPPos = cont.HP[i]["Pos"] or Vector(0,0,0)
--copy the vector in case we change it
HPPos = Vector(HPPos.x,HPPos.y,HPPos.z)
if cont.Skewed then
if (type(cont.Skewed) == "boolean" and cont.Skewed == true) then
HPPos:Rotate(Angle(0,-90,0))
elseif type(cont.Skewed) == "angle" then
HPPos:Rotate(cont.Skewed)
end
end
weap:SetPos(pod:LocalToWorld(HPPos))
weap:SetPos(weap:LocalToWorld(APPos))
weap:GetPhysicsObject():EnableCollisions(false)
weap.HPNoc = constraint.NoCollide(pod, weap, 0, 0, 0, true)
weap.HPWeld = constraint.Weld(pod, weap, 0, 0, 0, true)
weap:SetParent( pod )
pod:SetNetworkedEntity( "HPW_"..i, weap )
if pod.Pod and pod.Pod:IsValid() then
pod.Pod:SetNetworkedEntity( "HPW_"..i, weap )
end
cont.HP[i]["Ent"] = weap
weap.Pod = pod
weap.HPN = i
weap.Mounted = true
weap:GetPhysicsObject():EnableGravity(false)
if cont.OnHPLink then cont:OnHPLink(weap) end
return true
end
end
end
return false
end
SBEP.HPLink = HPLink
--This is basically a customized version of the standard GCombat explosion. Thanks to Q42 for making the system so maleable :)
--The only reason this is necessary is to stop certain projectiles from blowing each other up.
function SBGCSplash( position, radius, damage, pierce, filter )
local targets = ents.FindInSphere( position, radius )
local tooclose = ents.FindInSphere( position, 5 )
for _,i in pairs(targets) do
--print(filter)
--print(i:GetClass())
if not table.HasValue( filter, i:GetClass() ) then
--print("Not Matching")
local tracedata = {}
tracedata.start = position
tracedata.endpos = i:LocalToWorld( i:OBBCenter( ) )
tracedata.filter = tooclose
tracedata.mask = MASK_SOLID
local trace = util.TraceLine(tracedata)
if trace.Entity == i then
local hitat = trace.HitPos
cbt_dealhcghit( i, damage, pierce, hitat, hitat)
end
end
end
end
function SetPlayerSPEBWeaponColor(ply,cmd,args)
args[1],args[2],args[3] = tonumber(args[1]),tonumber(args[2]),tonumber(args[3])
if (not args[1]) or (not args[2]) or (not args[3]) then
ply:PrintMessage(HUD_PRINTTALK,"Arguments for color invalid, proper format is \"SBEP_Weapon_Color r g b\"")
return
end
ply.SBEPWeaponColor = Color(math.Clamp(args[1],0,255),math.Clamp(args[2],0,255),math.Clamp(args[3],0,255),255)
ply:SetPData("SBEP_Weapon_Color_Red",ply.SBEPWeaponColor.r)
ply:SetPData("SBEP_Weapon_Color_Green",ply.SBEPWeaponColor.g)
ply:SetPData("SBEP_Weapon_Color_Blue",ply.SBEPWeaponColor.b)
umsg.Start("IdLikeToRecieveMyOwnColorNow",ply)
umsg.Short(ply.SBEPWeaponColor.r)
umsg.Short(ply.SBEPWeaponColor.g)
umsg.Short(ply.SBEPWeaponColor.b)
umsg.End()
end
concommand.Add("SBEP_Weapon_Color",SetPlayerSPEBWeaponColor)
| {
"content_hash": "653075834c47fdbcf8034c0dfc1f9e48",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 126,
"avg_line_length": 32.74233128834356,
"alnum_prop": 0.6874648679033165,
"repo_name": "X-Coder/SBEP-Weapons",
"id": "633e25a245b0c05886d9b37754df7cd808098ef1",
"size": "5383",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lua/autorun/server/sbstuff.lua",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Lua",
"bytes": "1419593"
}
],
"symlink_target": ""
} |
package org.apache.activemq.apollo.util;
import java.util.ArrayList;
/**
* @author <a href="http://www.christianposta.com/blog">Christian Posta</a>
*/
public class CircularBuffer<E> extends ArrayList<E>{
private int maxSize;
private int pos = 0;
public CircularBuffer(int maxSize) {
super(maxSize);
this.maxSize = maxSize;
}
@Override
public boolean add(E e) {
if (this.size() < this.maxSize) {
return super.add(e);
}
else {
onEvicted(this.get(pos));
super.set(pos, e);
pos++;
if (pos > maxSize) {
pos = 0;
}
return true;
}
}
public int getMaxSize() {
return maxSize;
}
protected void onEvicted(E elem) {
}
}
| {
"content_hash": "0b972456f0c30c7a950ec842cf9e74c0",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 75,
"avg_line_length": 19.523809523809526,
"alnum_prop": 0.524390243902439,
"repo_name": "christian-posta/activemq-apollo-java-port",
"id": "75c0c69e948b49cd2ed5d671b90f1042305f5cbc",
"size": "1618",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apollo-util/src/main/java/org/apache/activemq/apollo/util/CircularBuffer.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "654215"
}
],
"symlink_target": ""
} |
from ...utils import *
def bubble_sort(values, comp_function, visual = False):
"""
Use Bubble Sort for sorting a list of n values given a comparison function
Args:
values: The list to be sorted
comp_function: A binary function indicating order for sorting
visual: Set to True for additional screen output
Returns:
A sorted list based on original values
Time:
O(n ^ 2)
Space:
O(1)
Explanation:
For each index i from 0 to n - 1,
For each index j from 1 to n - 1:
If value(j) and value(j - 1) in wrong order:
Swap value(j) and value(j - 1)
"""
if visual:
print("Performing Bubble Sort...")
print("Start:\t\t{}".format(values))
n = len(values)
swap_count = 0
for i in range(n):
for j in range(1, n):
if not comp_function(values[j - 1], values[j]):
values = utils.swap_index(values, j - 1, j)
swap_count += 1
if visual:
print("Step {}:\t\t{} [Swapped {} and {}]".format(swap_count, values, values[j], values[j - 1]))
if visual:
print("End:\t\t{} [{} swap(s) performed]".format(values, swap_count))
return values
| {
"content_hash": "58596fcdd2cd4b4cf5102e6dd0685a4a",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 101,
"avg_line_length": 25.209302325581394,
"alnum_prop": 0.6439114391143912,
"repo_name": "aryrobocode/apothecary",
"id": "86d6065e842bfb503d414da5957bd811727cd7c8",
"size": "1084",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apothecary/cs/sorts/bubble_sort.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "47836"
}
],
"symlink_target": ""
} |
import { isString, isError, isObject } from 'lodash';
import { CustomError } from '../errors/custom.error';
import { Utilities, PlatformType } from '../helpers/utilities';
export class UI {
/** Shows a basic notification at the top of the page
* @param message - Message, either single-string or multiline (punctuated by '\n')
*/
static notify(message: string);
/** Shows a basic error notification at the top of the page
* @param error - Error object
*/
static notify(error: Error);
/** Shows a basic notification with a custom title at the top of the page
* @param title - Title, bolded
* @param message - Message, either single-string or multiline (punctuated by '\n')
*/
static notify(title: string, message: string);
/** Shows a basic error notification with a custom title at the top of the page
* @param title - Title, bolded
* @param error - Error object
*/
static notify(title: string, error: Error);
/** Shows a basic error notification, with custom parameters, at the top of the page */
static notify(error: Error, params: {
title?: string;
/** custom message in place of the error text */
message?: string;
moreDetailsLabel?: string;
});
/** Shows a basic notification at the top of the page, with a background color set based on the type parameter
* @param title - Title, bolded
* @param message - Message, either single-string or multiline (punctuated by '\n')
* @param type - Type, determines the background color of the notification. Acceptable types are:
* 'default' | 'success' | 'error' | 'warning' | 'severe-warning'
*/
static notify(title: string, message: string, type: 'default' | 'success' | 'error' | 'warning' | 'severe-warning');
/** Shows a basic notification at the top of the page, with custom parameters */
static notify(params: {
title?: string;
message: string;
type?: 'default' | 'success' | 'error' | 'warning' | 'severe-warning'
});
static notify() {
const params = parseNotificationParams(arguments);
const messageBarClasses = {
'success': 'ms-MessageBar--success',
'error': 'ms-MessageBar--error',
'warning': 'ms-MessageBar--warning',
'severe-warning': 'ms-MessageBar--severeWarning'
};
const messageBarTypeClass = messageBarClasses[params.type] || '';
let paddingForPersonalityMenu = '0';
if (Utilities.platform === PlatformType.PC) {
paddingForPersonalityMenu = '20px';
} else if (Utilities.platform === PlatformType.MAC) {
paddingForPersonalityMenu = '40px';
}
const messageBannerHtml = `
<div class="office-js-helpers-notification ms-font-m ms-MessageBar ${messageBarTypeClass}">
<style>
.office-js-helpers-notification {
position: fixed;
z-index: 2147483647;
top: 0;
left: 0;
right: 0;
width: 100%;
padding: 0 0 10px 0;
}
.office-js-helpers-notification > div > div {
padding: 10px 15px;
box-sizing: border-box;
}
.office-js-helpers-notification pre {
white-space: pre-wrap;
word-wrap: break-word;
margin: 0px;
font-size: smaller;
}
.office-js-helpers-notification > button {
height: 52px;
width: 40px;
cursor: pointer;
float: right;
background: transparent;
border: 0;
margin-left: 10px;
margin-right: ${paddingForPersonalityMenu}
}
</style>
<button>
<i class="ms-Icon ms-Icon--Clear"></i>
</button>
</div>`;
const existingNotifications = document.getElementsByClassName('office-js-helpers-notification');
while (existingNotifications[0]) {
existingNotifications[0].parentNode.removeChild(existingNotifications[0]);
}
document.body.insertAdjacentHTML('afterbegin', messageBannerHtml);
const notificationDiv = document.getElementsByClassName('office-js-helpers-notification')[0];
const messageTextArea = document.createElement('div');
notificationDiv.insertAdjacentElement('beforeend', messageTextArea);
if (params.title) {
const titleDiv = document.createElement('div');
titleDiv.textContent = params.title;
titleDiv.classList.add('ms-fontWeight-semibold');
messageTextArea.insertAdjacentElement('beforeend', titleDiv);
}
params.message.split('\n').forEach(text => {
const div = document.createElement('div');
div.textContent = text;
messageTextArea.insertAdjacentElement('beforeend', div);
});
if (params.moreDetails) {
const labelDiv = document.createElement('div');
messageTextArea.insertAdjacentElement('beforeend', labelDiv);
const label = document.createElement('a');
label.setAttribute('href', 'javascript:void(0)');
label.onclick = () => {
(document.querySelector('.office-js-helpers-notification pre') as HTMLPreElement)
.parentElement.style.display = 'block';
labelDiv.style.display = 'none';
};
label.textContent = params.moreDetailsLabel;
labelDiv.insertAdjacentElement('beforeend', label);
const preDiv = document.createElement('div');
preDiv.style.display = 'none';
messageTextArea.insertAdjacentElement('beforeend', preDiv);
const detailsDiv = document.createElement('pre');
detailsDiv.textContent = params.moreDetails;
preDiv.insertAdjacentElement('beforeend', detailsDiv);
}
(document.querySelector('.office-js-helpers-notification > button') as HTMLButtonElement)
.onclick = () => {
notificationDiv.parentNode.removeChild(notificationDiv);
};
}
}
function parseNotificationParams(params: IArguments): {
title: string;
message: string;
type: 'default' | 'success' | 'error' | 'warning' | 'severe-warning';
moreDetails: string | null;
moreDetailsLabel: string;
} {
try {
const defaults = {
title: null,
type: 'default' as ('default' | 'success' | 'error' | 'warning' | 'severe-warning'),
moreDetails: null,
moreDetailsLabel: 'Additional details...'
};
switch (params.length) {
case 1: {
if (isError(params[0])) {
return {
...defaults,
title: 'Error',
type: 'error',
...getErrorDetails(params[0])
};
}
if (isString(params[0])) {
return {
...defaults,
message: params[0]
};
}
if (isObject(params[0])) {
const customParams: {
title?: string;
message: string;
type?: 'default' | 'success' | 'error' | 'warning' | 'severe-warning'
} = params[0];
if (!isString(customParams.message)) {
throw new Error();
}
return {
...defaults,
title: customParams.title || defaults.title,
message: customParams.message,
type: customParams.type || defaults.type,
};
}
throw new Error();
}
case 2: {
if (isString(params[0])) {
if (isError(params[1])) {
return {
...defaults,
title: params[0],
...getErrorDetails(params[1])
};
}
if (isString(params[1])) {
return {
...defaults,
title: params[0],
message: params[1]
};
}
} else if (isError(params[0]) && isObject(params[1])) {
const customParams: {
title?: string;
/** custom message in place of the error text */
message?: string;
moreDetailsLabel?: string;
} = params[1];
const result = {
...defaults,
...getErrorDetails(params[0]),
moreDetailsLabel: customParams.moreDetailsLabel || defaults.moreDetailsLabel
};
result.title = customParams.title || result.title;
result.message = customParams.message || result.message;
return result;
}
throw new Error();
}
case 3: {
if (!(isString(params[0]) && isString(params[2]))) {
throw new Error();
}
if (!isString(params[1])) {
throw new Error();
}
return {
...defaults,
title: params[0],
message: params[1],
type: params[2]
};
}
default:
throw new Error();
}
} catch (e) {
throw new Error('Invalid parameters passed to "notify" function');
}
}
function getErrorDetails(error: Error): {
type: 'error'
message: string,
moreDetails: string;
} {
let moreDetails: string;
let innerException = error;
if (error instanceof CustomError) {
innerException = error.innerError;
}
if ((window as any).OfficeExtension && innerException instanceof OfficeExtension.Error) {
moreDetails = JSON.stringify((error as OfficeExtension.Error).debugInfo, null, 4);
}
return {
type: 'error',
message: error.toString(),
moreDetails
};
}
| {
"content_hash": "463a4b947799bb92f901d54ece8ae628",
"timestamp": "",
"source": "github",
"line_count": 295,
"max_line_length": 120,
"avg_line_length": 37.49491525423729,
"alnum_prop": 0.49769460265798754,
"repo_name": "nujhong/office-js-helpers",
"id": "23922b8080e293275fdf3d1d7757c2750c235ce2",
"size": "11144",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ui/ui.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "3106"
},
{
"name": "TypeScript",
"bytes": "64200"
}
],
"symlink_target": ""
} |
import Menu from 'ant-design-vue/es/menu'
import Icon from 'ant-design-vue/es/icon'
const { Item, SubMenu } = Menu
export default {
name: 'SMenu',
props: {
menu: {
type: Array,
required: true
},
theme: {
type: String,
required: false,
default: 'dark'
},
mode: {
type: String,
required: false,
default: 'inline'
},
collapsed: {
type: Boolean,
required: false,
default: false
}
},
data () {
return {
openKeys: [],
selectedKeys: [],
cachedOpenKeys: [],
cachedPath: null
}
},
computed: {
rootSubmenuKeys: vm => {
const keys = []
vm.menu.forEach(item => keys.push(item.path))
return keys
}
},
created () {
this.updateMenu()
},
watch: {
collapsed (val) {
if (val) {
this.cachedOpenKeys = this.openKeys.concat()
this.openKeys = []
} else {
this.openKeys = this.cachedOpenKeys
}
},
$route: function () {
this.updateMenu()
}
},
methods: {
// select menu item
onOpenChange (openKeys) {
if (this.mode === 'horizontal') {
this.openKeys = openKeys
return
}
const latestOpenKey = openKeys.find(key => !this.openKeys.includes(key))
if (!this.rootSubmenuKeys.includes(latestOpenKey)) {
this.openKeys = openKeys
} else {
this.openKeys = latestOpenKey ? [latestOpenKey] : []
}
},
updateMenu () {
const routes = this.$route.matched.concat()
if (routes.length >= 4 && this.$route.meta.hidden) {
routes.pop()
this.selectedKeys = [routes[2].path]
} else {
this.selectedKeys = [routes.pop().path]
}
const openKeys = []
if (this.mode === 'inline') {
routes.forEach(item => {
openKeys.push(item.path)
})
}
this.cachedPath = this.selectedKeys[0]
this.collapsed ? (this.cachedOpenKeys = openKeys) : (this.openKeys = openKeys)
},
// render
renderItem (menu) {
if (!menu.hidden) {
return menu.children && !menu.hideChildrenInMenu ? this.renderSubMenu(menu) : this.renderMenuItem(menu)
}
return null
},
renderMenuItem (menu) {
const target = menu.meta.target || null
const props = {
to: { name: menu.name },
target: target
}
return (
<Item {...{ key: menu.path }}>
<router-link {...{ props }}>
{this.renderIcon(menu.meta.icon, menu)}
<span>{this.$t(menu.meta.title)}</span>
</router-link>
</Item>
)
},
renderSubMenu (menu) {
const itemArr = []
const on = {
click: () => {
this.handleClickParentMenu(menu)
}
}
if (!menu.hideChildrenInMenu) {
menu.children.forEach(item => itemArr.push(this.renderItem(item)))
}
return (
<SubMenu {...{ key: menu.path }}>
<span slot="title">
{this.renderIcon(menu.meta.icon, menu)}
<span {...{ on: on }}>{this.$t(menu.meta.title)}</span>
</span>
{itemArr}
</SubMenu>
)
},
renderIcon (icon, menuItem) {
if (icon === 'none' || icon === undefined) {
return null
}
const props = {}
const on = {
click: () => {
this.handleClickParentMenu(menuItem)
}
}
typeof (icon) === 'object' ? props.component = icon : props.type = icon
return (
<Icon {... { props, on } } />
)
},
handleClickParentMenu (menuItem) {
if (this.cachedPath === menuItem.redirect) {
return
}
if (menuItem.redirect) {
this.cachedPath = menuItem.redirect
setTimeout(() => this.$router.push({ path: menuItem.path }))
}
}
},
render () {
const { mode, theme, menu } = this
const props = {
mode: mode,
theme: theme,
openKeys: this.openKeys
}
const on = {
select: obj => {
this.selectedKeys = obj.selectedKeys
this.$emit('select', obj)
},
openChange: this.onOpenChange
}
const menuTree = menu.map(item => {
if (item.hidden) {
return null
}
return this.renderItem(item)
})
// {...{ props, on: on }}
return (
<Menu vModel={this.selectedKeys} {...{ props, on: on }}>
{menuTree}
</Menu>
)
}
}
| {
"content_hash": "938b937c9f1a0a7671b2b81acd9dbe68",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 111,
"avg_line_length": 23.652631578947368,
"alnum_prop": 0.5113484646194927,
"repo_name": "GabrielBrascher/cloudstack",
"id": "7cb1a0133552a4e2d5bfb329115eeef71549a6c1",
"size": "5296",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ui/src/components/menu/menu.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "9979"
},
{
"name": "C#",
"bytes": "2356211"
},
{
"name": "CSS",
"bytes": "42504"
},
{
"name": "Dockerfile",
"bytes": "4189"
},
{
"name": "FreeMarker",
"bytes": "4887"
},
{
"name": "Groovy",
"bytes": "146420"
},
{
"name": "HTML",
"bytes": "53626"
},
{
"name": "Java",
"bytes": "38859783"
},
{
"name": "JavaScript",
"bytes": "995137"
},
{
"name": "Less",
"bytes": "28250"
},
{
"name": "Makefile",
"bytes": "871"
},
{
"name": "Python",
"bytes": "12977377"
},
{
"name": "Ruby",
"bytes": "22732"
},
{
"name": "Shell",
"bytes": "744445"
},
{
"name": "Vue",
"bytes": "2012353"
},
{
"name": "XSLT",
"bytes": "57835"
}
],
"symlink_target": ""
} |
/**
* Given metadata on a tile source, a GeoJS tileLayer, and a set of options,
* add a function to the layer `setFrameQuad(<frame>)` that will, if possible,
* set the baseQuad to a cropped section of an image that contains excerpts of
* all frames.
*
* @param {object} tileinfo The metadata of the source image. This expects
* ``sizeX`` and ``sizeY`` to be the width and height of the image and
* ``frames`` to contain a list of the frames of the image or be undefined if
* there is only one frame.
* @param {geo.tileLayer} layer The GeoJS layer to add the function to. This
* is also used to get a maximal texture size if the layer is a webGL
* layer.
* @param {object} options Additional options for the function. This must
* minimally include ``baseUrl``.
* @param {string} options.baseUrl The reference to the tile endpoint, e.g.,
* <url>/api/v1/item/<item id>/tiles.
* @param {string} options.restRequest A backbone-like ajax handler function.
* @param {string} options.restUrl A reference to the tile endpoint as used by
* the restRequest function, e.g., item/<item id>/tiles.
* @param {string} [options.format='encoding=JPEG&jpegQuality=85&jpegSubsampling=1']
* The compression and format for the texture.
* @param {string} [options.query] Additional query options to add to the
* tile_frames endpoint, e.g. 'style={"min":"min","max":"max"}'. Do not
* include framesAcross or frameList. You must specify 'cache=true' if
* that is desired.
* @param {number} [options.frameBase=0] Starting frame number used.
* @param {number} [options.frameStride=1] Only use every ``frameStride`` frame
* of the image.
* @param {number} [options.frameGroup=1] If above 1 and multiple textures are
* used, each texture will have an even multiple of the group size number of
* frames. This helps control where texture loading transitions occur.
* @param {number} [options.frameGroupFactor=4] If ``frameGroup`` would reduce
* the size of the tile images beyond this factor, don't use it.
* @param {number} [options.frameGroupStride=1] If ``frameGroup`` is above 1
* and multiple textures are used, then the frames are reordered based on this
* stride value.
* @param {number} [options.maxTextureSize] Limit the maximum texture size to a
* square of this size. The size is also limited by the WebGL maximum
* size for webgl-based layers or 8192 for canvas-based layers.
* @param {number} [options.maxTextures=1] If more than one, allow multiple
* textures to increase the size of the individual frames. The number of
* textures will be capped by ``maxTotalTexturePixels`` as well as this
* number.
* @param {number} [options.maxTotalTexturePixels=1073741824] Limit the
* maximum texture size and maximum number of textures so that the combined
* set does not exceed this number of pixels.
* @param {number} [options.alignment=16] Individual frames are buffer to an
* alignment of this maxy pixels. If JPEG compression is used, this should
* be 8 for monochrome images or jpegs without subsampling, or 16 for jpegs
* with moderate subsampling to avoid compression artifacts from leaking
* between frames.
* @param {number} [options.adjustMinLevel=true] If truthy, adjust the tile
* layer's minLevel after the quads are loaded.
* @param {number} [options.maxFrameSize] If set, limit the maximum width and
* height of an individual frame to this value.
* @param {string} [options.crossOrigin] If specified, use this as the
* crossOrigin policy for images.
* @param {string} [options.progress] If specified, a function to call whenever
* a texture image is loaded. This is also called before the first load.
* @param {boolean} [options.redrawOnFirstLoad=true] If truthy, redraw the
* layer after the base quad is first loaded if a frame value has been set.
*/
function setFrameQuad(tileinfo, layer, options) {
layer.setFrameQuad = function () { };
if (!tileinfo || !tileinfo.sizeX || !tileinfo.sizeY || !options || !options.baseUrl) {
return;
}
let maxTextureSize;
try {
maxTextureSize = layer.renderer()._maxTextureSize || layer.renderer().constructor._maxTextureSize;
} catch (err) { }
options = Object.assign({}, {maxTextureSize: Math.min(16384, maxTextureSize)}, options);
const status = {
tileinfo: tileinfo,
options: options,
images: [],
src: [],
quads: [],
frames: ['placeholder'],
framesToIdx: {},
loadedCount: 0
};
let qiOptions = Object.assign({}, options);
['restRequest', 'restUrl', 'baseUrl', 'crossOrigin', 'progress', 'redrawOnFirstLoad'].forEach((k) => delete qiOptions[k]);
options.restRequest({
type: 'GET',
url: `${options.restUrl}/tile_frames/quad_info`,
data: qiOptions
}).then((data) => {
status.quads = data.quads;
status.frames = data.frames;
status.framesToIdx = data.framesToIdx;
for (let idx = 0; idx < data.src.length; idx += 1) {
const img = new Image();
for (let qidx = 0; qidx < data.quads.length; qidx += 1) {
if (data.quadsToIdx[qidx] === idx) {
status.quads[qidx].image = img;
}
}
if (options.baseUrl.indexOf(':') >= 0 && options.baseUrl.indexOf('/') === options.baseUrl.indexOf(':') + 1) {
img.crossOrigin = options.crossOrigin || 'anonymous';
}
let params = Object.keys(data.src[idx]).map((k) => encodeURIComponent(k) + '=' + encodeURIComponent(data.src[idx][k])).join('&');
let src = `${options.baseUrl}/tile_frames?` + params;
status.src.push(src);
if (idx === data.src.length - 1) {
img.onload = function () {
status.loadedCount += 1;
status.loaded = true;
if (layer._options && layer._options.minLevel !== undefined && (options.adjustMinLevel === undefined || options.adjustMinLevel) && status.minLevel && status.minLevel > layer._options.minLevel) {
layer._options.minLevel = Math.min(layer._options.maxLevel, status.minLevel);
}
if (options.progress) {
try {
options.progress(status);
} catch (err) {}
}
if (status.frame !== undefined) {
layer.baseQuad = Object.assign({}, status.quads[status.framesToIdx[status.frame]]);
if (options.redrawOnFirstLoad || options.redrawOnFirstLoad === undefined) {
layer.draw();
}
}
};
} else {
((idx) => {
img.onload = function () {
status.loadedCount += 1;
status.images[idx + 1].src = status.src[idx + 1];
if (options.progress) {
try {
options.progress(status);
} catch (err) {}
}
};
})(idx);
}
status.images.push(img);
}
status.images[0].src = status.src[0];
if (options.progress) {
try {
options.progress(status);
} catch (err) {}
}
return status;
});
layer.setFrameQuad = function (frame) {
if (status.framesToIdx[frame] !== undefined && status.loaded) {
layer.baseQuad = Object.assign({}, status.quads[status.framesToIdx[frame]]);
}
status.frame = frame;
};
layer.setFrameQuad.status = status;
}
export default setFrameQuad;
| {
"content_hash": "492cda5c8298c2b3bef6d3e77a67580f",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 214,
"avg_line_length": 50.063291139240505,
"alnum_prop": 0.6032869785082174,
"repo_name": "girder/large_image",
"id": "0e5d318cecf9582c5086958f74db4218a1c9e711",
"size": "7910",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "girder/girder_large_image/web_client/views/imageViewerWidget/setFrameQuad.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "7114"
},
{
"name": "JavaScript",
"bytes": "307859"
},
{
"name": "Pug",
"bytes": "21406"
},
{
"name": "Python",
"bytes": "1371949"
},
{
"name": "Shell",
"bytes": "5500"
},
{
"name": "Stylus",
"bytes": "4261"
}
],
"symlink_target": ""
} |
from RPi import GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
num_pins = 28
pins = range(num_pins)
for pin in pins:
GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP)
pin_states = {pin: GPIO.input(pin) for pin in pins}
print()
for pin, state in pin_states.items():
print("%2d: %s" % (pin, state))
active = [pin for pin, state in pin_states.items() if not state]
inactive = [pin for pin, state in pin_states.items() if state]
print()
print("Total active: %s" % len(active))
print("Total inactive: %s" % len(inactive))
print()
print("Active pins: %s" % str(active))
print("Inactive pins: %s" % str(inactive))
| {
"content_hash": "a6268e032c5eab02e585a690c4f05d62",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 64,
"avg_line_length": 22.035714285714285,
"alnum_prop": 0.6709886547811994,
"repo_name": "RPi-Distro/python-rpi-dots",
"id": "f0b8b66f8ad2f0b69aff7ea8d70ea58946ff907f",
"size": "617",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "28760"
}
],
"symlink_target": ""
} |
using GeckoUBL.Ubl21.Udt;
namespace GeckoUBL.Ubl21.Cac
{
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.33440")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2")]
[System.Xml.Serialization.XmlRootAttribute("EventComment", Namespace="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2", IsNullable=false)]
public class EventCommentType {
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Namespace="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2")]
public TextType Comment { get; set; }
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Namespace="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2")]
public DateType IssueDate { get; set; }
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Namespace="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2")]
public TimeType IssueTime { get; set; }
}
} | {
"content_hash": "2ac2e6caf12cd4a33060319a870db91f",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 164,
"avg_line_length": 47.96,
"alnum_prop": 0.7873227689741451,
"repo_name": "JohnGrekso/GeckoUBL",
"id": "8b07b1e3d12f5a11f234bb3bef95ab79d8dedb4a",
"size": "1199",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/GeckoUBL/Ubl21/Cac/EventCommentType.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5794"
},
{
"name": "C#",
"bytes": "1000435"
}
],
"symlink_target": ""
} |
#pragma once
#include "Animation/AnimationTrack.h"
#include "Base/BaseTypes.h"
#include "Base/BaseMath.h"
#include "Debug/DVAssert.h"
#include "Entity/Component.h"
#include "Math/AABBox3.h"
#include "Reflection/Reflection.h"
#include "Scene3D/Entity.h"
#include "Scene3D/SceneFile/SerializationContext.h"
#include "Scene3D/SkeletonAnimation/JointTransform.h"
#include "Scene3D/SkeletonAnimation/SkeletonPose.h"
namespace DAVA
{
class AnimationClip;
class Entity;
class SkeletonSystem;
class SkeletonComponent : public Component
{
friend class SkeletonSystem;
public:
const static uint32 INVALID_JOINT_INDEX = 0xffffff; //same as INFO_PARENT_MASK
struct Joint : public InspBase
{
uint32 parentIndex = INVALID_JOINT_INDEX;
FastName name;
FastName uid;
AABBox3 bbox;
Matrix4 bindTransform;
Matrix4 bindTransformInv;
bool operator==(const Joint& other) const;
DAVA_VIRTUAL_REFLECTION(Joint, InspBase);
};
SkeletonComponent() = default;
~SkeletonComponent() = default;
uint32 GetJointIndex(const FastName& uid) const;
uint32 GetJointsCount() const;
const Joint& GetJoint(uint32 jointIndex) const;
void SetJoints(const Vector<Joint>& config);
const JointTransform& GetJointTransform(uint32 jointIndex) const;
const JointTransform& GetJointObjectSpaceTransform(uint32 jointIndex) const;
const SkeletonPose& GetDefaultPose() const;
void ApplyPose(const SkeletonPose& pose);
void SetJointTransform(uint32 jointIndex, const JointTransform& transform);
void SetJointPosition(uint32 jointIndex, const Vector3& position);
void SetJointOrientation(uint32 jointIndex, const Quaternion& orientation);
void SetJointScale(uint32 jointIndex, float32 scale);
Component* Clone(Entity* toEntity) override;
void Serialize(KeyedArchive* archive, SerializationContext* serializationContext) override;
void Deserialize(KeyedArchive* archive, SerializationContext* serializationContext) override;
private:
void UpdateJointsMap();
void SetJointUpdated(uint32 jointIndex);
void UpdateDefaultPose();
/*config time*/
Vector<Joint> jointsArray;
SkeletonPose defaultPose;
/*runtime*/
const static uint32 INFO_PARENT_MASK = 0xffffff;
const static uint32 INFO_FLAG_BASE = 0x1000000;
const static uint32 FLAG_UPDATED_THIS_FRAME = INFO_FLAG_BASE << 0;
const static uint32 FLAG_MARKED_FOR_UPDATED = INFO_FLAG_BASE << 1;
Vector<uint32> jointInfo; //flags and parent
//transforms info
Vector<JointTransform> localSpaceTransforms;
Vector<JointTransform> objectSpaceTransforms;
Vector<JointTransform> finalTransforms;
//bind pose
Vector<JointTransform> inverseBindTransforms;
//bounding boxes
Vector<AABBox3> objectSpaceBoxes;
UnorderedMap<FastName, uint32> jointMap;
uint32 startJoint = 0u; //first joint in the list that was updated this frame - cache this value to optimize processing
bool configUpdated = true;
bool drawSkeleton = false;
DAVA_VIRTUAL_REFLECTION(SkeletonComponent, Component);
friend class SkeletonSystem;
};
inline uint32 SkeletonComponent::GetJointIndex(const FastName& uid) const
{
auto found = jointMap.find(uid);
if (jointMap.end() != found)
return found->second;
else
return INVALID_JOINT_INDEX;
}
inline uint32 SkeletonComponent::GetJointsCount() const
{
return uint32(jointsArray.size());
}
inline const SkeletonComponent::Joint& SkeletonComponent::GetJoint(uint32 jointIndex) const
{
DVASSERT(jointIndex < GetJointsCount());
return jointsArray[jointIndex];
}
inline const JointTransform& SkeletonComponent::GetJointTransform(uint32 jointIndex) const
{
DVASSERT(jointIndex < GetJointsCount());
return localSpaceTransforms[jointIndex];
}
inline const JointTransform& SkeletonComponent::GetJointObjectSpaceTransform(uint32 jointIndex) const
{
DVASSERT(jointIndex < objectSpaceTransforms.size());
return objectSpaceTransforms[jointIndex];
}
inline void SkeletonComponent::SetJointTransform(uint32 jointIndex, const JointTransform& transform)
{
SetJointUpdated(jointIndex);
localSpaceTransforms[jointIndex] = transform;
}
inline void SkeletonComponent::SetJointPosition(uint32 jointIndex, const Vector3& position)
{
SetJointUpdated(jointIndex);
localSpaceTransforms[jointIndex].SetPosition(position);
}
inline void SkeletonComponent::SetJointOrientation(uint32 jointIndex, const Quaternion& orientation)
{
SetJointUpdated(jointIndex);
localSpaceTransforms[jointIndex].SetOrientation(orientation);
}
inline void SkeletonComponent::SetJointScale(uint32 jointIndex, float32 scale)
{
SetJointUpdated(jointIndex);
localSpaceTransforms[jointIndex].SetScale(scale);
}
inline void SkeletonComponent::SetJointUpdated(uint32 jointIndex)
{
DVASSERT(jointIndex < GetJointsCount());
jointInfo[jointIndex] |= FLAG_MARKED_FOR_UPDATED;
startJoint = Min(startJoint, jointIndex);
}
template <>
bool AnyCompare<SkeletonComponent::Joint>::IsEqual(const Any& v1, const Any& v2);
extern template struct AnyCompare<SkeletonComponent::Joint>;
} //ns
| {
"content_hash": "95261ab887cf35d8fcbe7115be145dbb",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 123,
"avg_line_length": 31.58235294117647,
"alnum_prop": 0.7316073756751723,
"repo_name": "dava/dava.engine",
"id": "37664fe16e31bfcf8b3dc9ad89b09f898c6759fe",
"size": "5369",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "Sources/Internal/Scene3D/Components/SkeletonComponent.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Ada",
"bytes": "89080"
},
{
"name": "Assembly",
"bytes": "166572"
},
{
"name": "Batchfile",
"bytes": "18562"
},
{
"name": "C",
"bytes": "61621347"
},
{
"name": "C#",
"bytes": "574524"
},
{
"name": "C++",
"bytes": "50229645"
},
{
"name": "CLIPS",
"bytes": "5291"
},
{
"name": "CMake",
"bytes": "11439187"
},
{
"name": "CSS",
"bytes": "32773"
},
{
"name": "Cuda",
"bytes": "37073"
},
{
"name": "DIGITAL Command Language",
"bytes": "27303"
},
{
"name": "Emacs Lisp",
"bytes": "44259"
},
{
"name": "Fortran",
"bytes": "8835"
},
{
"name": "GLSL",
"bytes": "3726"
},
{
"name": "Go",
"bytes": "1235"
},
{
"name": "HTML",
"bytes": "8621333"
},
{
"name": "Java",
"bytes": "232072"
},
{
"name": "JavaScript",
"bytes": "2560"
},
{
"name": "Lua",
"bytes": "43080"
},
{
"name": "M4",
"bytes": "165145"
},
{
"name": "Makefile",
"bytes": "1349214"
},
{
"name": "Mathematica",
"bytes": "4633"
},
{
"name": "Module Management System",
"bytes": "15224"
},
{
"name": "Objective-C",
"bytes": "1909821"
},
{
"name": "Objective-C++",
"bytes": "498191"
},
{
"name": "Pascal",
"bytes": "99390"
},
{
"name": "Perl",
"bytes": "396608"
},
{
"name": "Python",
"bytes": "782784"
},
{
"name": "QML",
"bytes": "43105"
},
{
"name": "QMake",
"bytes": "156"
},
{
"name": "Roff",
"bytes": "71083"
},
{
"name": "Ruby",
"bytes": "22742"
},
{
"name": "SAS",
"bytes": "16030"
},
{
"name": "Shell",
"bytes": "2482394"
},
{
"name": "Slash",
"bytes": "117430"
},
{
"name": "Smalltalk",
"bytes": "5908"
},
{
"name": "TeX",
"bytes": "428489"
},
{
"name": "Vim script",
"bytes": "133255"
},
{
"name": "Visual Basic",
"bytes": "54056"
},
{
"name": "WebAssembly",
"bytes": "13987"
}
],
"symlink_target": ""
} |
module DiabloApi
module Data
module Follower
end
end
end
| {
"content_hash": "69b97e644108824bea3082c21670dde1",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 19,
"avg_line_length": 11.5,
"alnum_prop": 0.6956521739130435,
"repo_name": "SwamiRama/diablo_api",
"id": "a10f87a90249aadbb6b39e489f0561080931029c",
"size": "69",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/diablo_api/models/data/follower.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "25891"
},
{
"name": "Shell",
"bytes": "115"
}
],
"symlink_target": ""
} |
<!-- saved from url=(0022)http://internet.e-mail -->
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=windows-1250">
<meta name="GENERATOR" content="Microsoft FrontPage 4.0">
<title>Main page - ECM ECO Monitoring</title>
</head>
<body bgcolor="#FFFFFF">
<p align="center"><b><font color="#000080" size="7"><a name="ECM ECO Monitoring">ECM ECO Monitoring</a></font></b></p>
<p align="center"><strong><font size="3">Instrumenty
pomiarowe i systemy w ochronie rodowiska.</font></strong></p>
<p align="center"><font size="3"><strong>41-506
Chorzów, ul. Maronia 44, tel./fax: +(48)-32-246 2471, e-mail : </strong><a href="mailto:ecmpol@demo.pl"><strong>ecmpol@demo.pl</strong></a></font></p>
<ul type="disc">
<li><a href="#Informacje o firmie"><strong><font size="3">Informacje o
Firmie</font> </strong></a></li>
<li><a href="#Podstawowy zakres dzia³ania ECM obejmuje :"><strong><font size="3">Zakres
dzia³ania</font></strong></a> </li>
<li><a href="#Informacje kontaktowe :"><strong><font size="3">Informacje
kontaktowe</font></strong></a> </li>
<li><a href="#Wiêcej informacii"><strong><font size="3">Wiêcej informacji</font></strong></a></li>
<li><a href="#Lista ³¹czy do partnerów ECM"><strong><font size="3">Lista
³¹czy do partnerów ECM</font></strong></a> </li>
<li><a href="#BIULETYNY"><strong><font size="3">Biuletyny</font></strong></a></li>
</ul>
<table border="0" cellpadding="2" width="100%">
<tr>
<td bgcolor="#004080">
<p><strong><i><font color="#FFFFFF" size="4"><a name="Informacje o firmie">Informacje
o firmie</a></font></i></strong></p>
</td>
</tr>
</table>
<p><font size="3"><b>ECO Monitoring (ECM) </b>jest
producentem sprzêtu do monitorowania rodowiska, integratorem
systemów zbierania i analizy danych pomiarowych oraz firm¹
konsultingowo-eksperck¹ w projektach dotycz¹cych ochrony rodowiska.</font></p>
<p><font size="3">Holding ECM ECO Monitoring jest
czêci¹ amerykañskiego koncernu <a href="http:///www.tsi.com">TSI</a> - <a href="http:///www.envirosys.com">Environmental
Systems Corporation</a>.</font></p>
<p><a href="produkty/produkty.html"><font face="Times New Roman CE" size="3">(Wiêcej
o ECM ECO Monitoring)</font></a></p>
<h5><a href="#ECM ECO Monitoring"><font size="3">Powrót do pocz¹tku</font></a></h5>
<table border="0" cellpadding="2" width="100%">
<tr>
<td bgcolor="#004080">
<p><strong><i><font color="#FFFFFF" size="4"><a name="Podstawowy zakres dzia³ania ECM obejmuje :">Podstawowy zakres dzia³ania
ECM obejmuje :</a></font></i> </strong></p>
</td>
</tr>
</table>
<p><b><i><font size="3">Monitorowanie
gazów procesowych.</font></i></b></p>
<p><font size="3">ECM oferuje szerok¹ gam¹
analizatorów do kontroli procesów przemys³owych. Do najwa¿niejszych
nale¿¹ analizatory O2 i wszelkich gazów wystêpuj¹cych w
procesie spalania dla przemys³ów takich jak metalurgia, energetyka, przemys³ cementowy,
spo¿ywczy, petrochemia, chemia</font></p>
<p><em><strong>Monitorowanie spalin - emisje.</strong></em></p>
<p><font size="3">ECM jest integratorem z³o¿onych
systemów monitoringu przy u¿yciu wszelkich znanych technik.
Obok stacjonarnych zestawów dla monitorowania przemys³owych róde³
emisji dostêpne s¹ tak¿e ruchome stanowiska pomiarowe
zainstalowane na kompletnie wyposa¿onych samochodach.</font></p>
<p><font size="3">ECM produkuje najwy¿szej
jakoci kontenery z izolacj¹ ciepln¹ - niezbêdne dla
zainstalowania pracuj¹cych w trudnych warunkach zestawów pomiarowych.</font></p>
<p><em><strong><font size="3">Monitorowanie
zanieczyszczen powietrza - imisje.</font></strong></em></p>
<p><font size="3">ECM integruje systemy
zbierania danych z³o¿one z czujników pomiarowych, analizatorów
i komputerowych centrów przetwarzania i obróbki informacji
wyposa¿one w specjalistyczne oprogramowanie.</font></p>
<p><em><strong><font size="3">Monitorowanie
jakoci wody</font></strong></em></p>
<p><font size="3">ECM oferuje kompletne
zestawy analizatorów dla wody pitnej, przemys³owej, cieków,
a tak¿e dla specjalnych zastosowañ. Dostarczane s¹ tak¿e
niezbêdne pompy, filtry oraz systemy obróbki danych pomiarowych.</font></p>
<p><em><strong><font size="3">Meteorologia</font></strong></em></p>
<p><font size="3">ECM dostarcza
profesjonalne stacje meteorologiczne do pomiarów wszelkich
parametrów meteorologicznych.</font></p>
<p><em><strong><font size="3">Odpady</font></strong></em></p>
<p><font size="3">ECM dysponuje dowiadczeniem
w zakresie projektowania i wykonawstwa obiektów utylizacji odpadów.</font></p>
<h5><a href="#ECM ECO Monitoring"><font size="3">Powrót do pocz¹tku</font></a></h5>
<hr>
<table border="0" cellpadding="2" width="100%">
<tr>
<td bgcolor="#004080">
<p><strong><i><font color="#FFFFFF" size="4"><a name="Informacje kontaktowe :">Informacje
kontaktowe :</a></font></i></strong></p>
</td>
</tr>
</table>
<p><font size="3">Adres : ECO Monitoring, Maronia 44, 41-506 Chorzów</font></p>
<p><font size="3">tel./fax : (32) 246 2471 wewn. 362</font></p>
<p><font size="3"><strong>Adres poczty elektronicznej :</strong> <a href="mailto:ecmpol@demo.pl">ecmpol@demo.pl</a><br>
<br>
<a href="#ECM ECO Monitoring"><strong>Powrót do
pocz¹tku</strong></a></font></p>
<hr>
<table border="0" cellpadding="2" width="100%">
<tr>
<td bgcolor="#004080">
<strong><i><font color="#FFFFFF" size="4"><a name="Wiêcej informacii">Wi</a></font></i></strong><b><i><a name="Wiêcej informacii"><font color="#FFFFFF" size="4">êcej
informacii</font></a></i></b>
</td>
</tr>
</table>
<p><font size="3">Chc¹c otrzymaæ wiêcej
informacji na interesuj¹cy Ciê temat przylij na nasz adres
poczt¹, faxem lub poczt¹ elektroniczn¹ wype³niony <a href="http://www.???.sk">Kwestionariusz.</a></font></p>
<p><b><a href="#ECM ECO Monitoring"><font size="3">Powrót do pocz¹tku </font></a></b></p>
<hr>
<table border="0" cellpadding="2" width="100%">
<tr>
<td bgcolor="#004080">
<p><b><i><font size="4" color="#FFFFFF"><a name="BIULETYNY">BIULETYNY</a></font></i></b></p>
</td>
</tr>
</table>
<p> </p>
<div align="center">
<center>
<table border="0" cellpadding="2" width="587" height="30">
<tr>
<td width="587" height="30">
<p><font size="3">Biuletyn 1 / 2000 </font></p>
</td>
</tr>
</table>
</center>
</div>
<p><b><a href="#ECM ECO Monitoring"><font size="3">Powrót do pocz¹tku </font></a></b></p>
<hr>
<table border="0" cellpadding="2" width="100%">
<tr>
<td bgcolor="#004080">
<b><i><font size="4" color="#FFFFFF"><a name="Lista ³¹czy do partnerów ECM">Lista
</a></font></i></b><i><b><a name="Lista ³¹czy do partnerów ECM"><font color="#FFFFFF" size="4"> ³¹czy
do partnerów ECM</font></a></b></i>
</td>
</tr>
</table>
<p><font size="3"><strong>Apel</strong> <a href="http://www.apel.cz">www.apel.cz</a> <br>
<strong>Balston-Whatman</strong> <a href="http://www.balston.co.uk">www.balston.co.uk</a> <br>
<strong>BAS Elektra </strong><a href="http://www.bas-elektra.cz">www.bas-elektra.cz</a> <br>
<strong>DataQua </strong><a href="http://www.dataqua.hu">www.dataqua.hu</a> <br>
<strong>EG&G Chandler</strong> <a href="http://www.trigas.net">www.trigas.net</a> <br>
<strong>EPM </strong><a href="http://www.epmenvironmental.com">www.epmenvironmental.com</a>
<br>
<strong>ESC</strong> <a href="http://www.envirosys.com">www.envirosys.com</a> <br>
<strong>Gastech</strong> <a href="http://www.gastech-inc.com">www.gastech-inc.com</a> <br>
<strong>Hydrolab</strong> <a href="http://www.hydrolab.com">www.hydrolab.com</a> <br>
<strong>J.U.M.</strong> <a href="http://www.jum.com">www.jum.com</a> <br>
<strong>Kalman System</strong> <a href="http://www.ns.kfkipark.hu/kalman">www.ns.kfkipark.hu/kalman</a>
<br>
<strong>Monitor Europe</strong> <a href="http://www.monitoreurope.com">www.monitoreurope.com</a> <br>
<strong>PCME</strong> <a href="http://www.pcme.co.uk">www.pcme.co.uk</a> <br>
<strong>Shaw Moisture</strong> <a href="http://www.shawmeters.com">www.shawmeters.com</a> <br>
<strong>TSI </strong><a href="http://www.tsi.com">www.tsi.com</a> <br>
<strong>Turner Designs</strong> <a href="http://www.turnerdesigns.com">www.turnerdesigns.com</a> <br>
<strong>Verewa</strong> <a href="http://www.durag.de">www.durag.de</a></font></p>
<h5><a href="#ECM ECO Monitoring"><font size="3">Powrót do pocz¹tku</font></a></h5>
<hr>
<h5><font size="3" color="#004080"><i>ECM ECO Monitoring<br>
Ostatnia modyfikacja: <!--webbot bot="TimeStamp" s-type="EDITED" s-format="%B %d, %Y" startspan -->máj 02, 2000<!--webbot bot="TimeStamp" endspan i-checksum="14347" -->.</i></font></h5>
</body>
</html>
| {
"content_hash": "885b01140f6aab349e54519b59db0be5",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 185,
"avg_line_length": 37.8296943231441,
"alnum_prop": 0.6749393974373773,
"repo_name": "RadoBuransky/ancient-code-of-mine",
"id": "1e77c94dd354306c30c3e048e9d123354a808efc",
"size": "8663",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "www/Ecm/POLSKO/index.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "7426"
},
{
"name": "C",
"bytes": "7757"
},
{
"name": "CSS",
"bytes": "14509"
},
{
"name": "Dylan",
"bytes": "86"
},
{
"name": "HTML",
"bytes": "3924122"
},
{
"name": "JavaScript",
"bytes": "42139"
},
{
"name": "PHP",
"bytes": "244567"
},
{
"name": "PLSQL",
"bytes": "1170620"
},
{
"name": "Pascal",
"bytes": "6863809"
},
{
"name": "TeX",
"bytes": "231"
}
],
"symlink_target": ""
} |
'use strict';
var
_ = require('underscore'),
// test dependencies
mocha = require('mocha'),
expect = require('chai').expect,
sinon = require('sinon'),
requireHelper = require('test/_util/require_helper'),
// other
ExpandsURLMap = require('src/modules/api/util/ExpandsURLMap'),
// file to test
Response = requireHelper('modules/api/util/Response');
describe('modules/api/util/Response', function() {
var model, spy;
before(function(done) {
// mock the model
var constr = {
deepPopulate : function(data, paths, populationOpts, callback) {
callback(null, data);
}
};
spy = sinon.spy(constr, 'deepPopulate');
model = { constructor: constr };
done();
});
beforeEach(function(done) {
spy.reset();
done();
});
it('should allow setting the pagination params', function(done) {
var response = new Response(null, new ExpandsURLMap());
var
pageCount = 4,
itemCount = 20;
var resp = response.setPaginationParams(pageCount, itemCount);
// the method is chainable (it returns the instance)
expect(response).to.deep.equal(resp);
expect(response).to.have.property('paginationParams');
expect(response.paginationParams).to.not.be.null;
expect(response.paginationParams.pageCount).to.equal(pageCount);
expect(response.paginationParams.itemCount).to.equal(itemCount);
done();
});
it('should format the response', function(done) {
// mock the request obj
var request = {
getExpands: function() { return ['a', 'b']; }
};
var response = new Response(request, new ExpandsURLMap());
response.formatOutput(model, function(error, formattedOutput) {
expect(error).to.be.null;
expect(formattedOutput).to.have.property('meta');
expect(formattedOutput).to.have.property('data');
done();
});
});
describe('_expandData', function() {
it('should accept a single model', function(done) {
var response = new Response();
response._expandData(model,{foo:{}}, function(err, data) {
expect(err).to.be.null;
expect(spy.called).to.be.true;
done();
});
});
it('should accept a an empty array', function(done) {
var response = new Response();
response._expandData([],{foo:{}}, function(err, data) {
expect(err).to.be.null;
expect(spy.called).to.be.false;
done();
});
});
it('should accept a an array of models', function(done) {
var response = new Response();
response._expandData([model, model],{foo:{}}, function(err, data) {
expect(err).to.be.null;
expect(spy.called).to.be.true;
done();
});
});
it('should not call the `deepPopulate` method on the model if there are no expands', function(done) {
var response = new Response();
response._expandData([],{}, function(err, data) {
expect(err).to.be.null;
expect(spy.called).to.be.false;
done();
});
});
});
});
| {
"content_hash": "4188b8fdc177892907cee061844b58e6",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 105,
"avg_line_length": 24.991935483870968,
"alnum_prop": 0.6018070345272669,
"repo_name": "mosaiqo/frontend-devServer",
"id": "37f9acb7cf3606c6bde9b4d9a38adfed7b2a313f",
"size": "3099",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/unit/modules/api/util/Response.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CoffeeScript",
"bytes": "7722"
},
{
"name": "JavaScript",
"bytes": "264096"
}
],
"symlink_target": ""
} |
package com.facebook.litho.specmodels.processor;
import static com.facebook.litho.specmodels.internal.ImmutableList.copyOf;
import static com.facebook.litho.specmodels.processor.DelegateMethodExtractor.getPermittedMethodParamAnnotations;
import static com.facebook.litho.specmodels.processor.PsiMethodExtractorUtils.getMethodParams;
import com.facebook.litho.annotations.OnUpdateState;
import com.facebook.litho.annotations.OnUpdateStateWithTransition;
import com.facebook.litho.specmodels.internal.ImmutableList;
import com.facebook.litho.specmodels.model.MethodParamModel;
import com.facebook.litho.specmodels.model.SpecMethodModel;
import com.facebook.litho.specmodels.model.UpdateStateMethod;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiMethod;
import com.squareup.javapoet.TypeVariableName;
import java.lang.annotation.Annotation;
import java.util.ArrayList;
import java.util.List;
public class PsiUpdateStateMethodExtractor {
public static ImmutableList<SpecMethodModel<UpdateStateMethod, Void>> getOnUpdateStateMethods(
PsiClass psiClass,
List<Class<? extends Annotation>> permittedInterStageInputAnnotations,
List<Class<? extends Annotation>> permittedPrepareInterStageInputAnnotations,
boolean isTransition) {
final List<SpecMethodModel<UpdateStateMethod, Void>> delegateMethods = new ArrayList<>();
for (PsiMethod psiMethod : psiClass.getMethods()) {
final Annotation onUpdateStateAnnotation =
isTransition
? PsiAnnotationProxyUtils.findAnnotationInHierarchy(
psiMethod, OnUpdateStateWithTransition.class)
: PsiAnnotationProxyUtils.findAnnotationInHierarchy(psiMethod, OnUpdateState.class);
if (onUpdateStateAnnotation != null) {
final List<MethodParamModel> methodParams =
getMethodParams(
psiMethod,
getPermittedMethodParamAnnotations(
permittedInterStageInputAnnotations,
permittedPrepareInterStageInputAnnotations),
permittedInterStageInputAnnotations,
permittedPrepareInterStageInputAnnotations,
ImmutableList.<Class<? extends Annotation>>of());
final SpecMethodModel<UpdateStateMethod, Void> delegateMethod =
SpecMethodModel.<UpdateStateMethod, Void>builder()
.annotations(ImmutableList.<Annotation>of(onUpdateStateAnnotation))
.modifiers(PsiModifierExtractor.extractModifiers(psiMethod.getModifierList()))
.name(psiMethod.getName())
.returnTypeSpec(PsiTypeUtils.generateTypeSpec(psiMethod.getReturnType()))
.typeVariables(ImmutableList.<TypeVariableName>of())
.methodParams(copyOf(methodParams))
.representedObject(psiMethod)
.build();
delegateMethods.add(delegateMethod);
}
}
return copyOf(delegateMethods);
}
}
| {
"content_hash": "83053fa1cb75ea693760eac40f2a4baa",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 113,
"avg_line_length": 45.707692307692305,
"alnum_prop": 0.7367889599461461,
"repo_name": "facebook/litho",
"id": "aba6d98ced7680691f624c51c430e911a75cd6b8",
"size": "3589",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "litho-intellij-plugin/src/main/java/com/facebook/litho/specmodels/processor/PsiUpdateStateMethodExtractor.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "3065"
},
{
"name": "C++",
"bytes": "382935"
},
{
"name": "CMake",
"bytes": "1490"
},
{
"name": "Haskell",
"bytes": "7720"
},
{
"name": "Java",
"bytes": "9413537"
},
{
"name": "JavaScript",
"bytes": "28046"
},
{
"name": "Kotlin",
"bytes": "1326831"
},
{
"name": "SCSS",
"bytes": "5360"
},
{
"name": "Shell",
"bytes": "12115"
},
{
"name": "Starlark",
"bytes": "241912"
}
],
"symlink_target": ""
} |
<!--
Copyright 2017 Next Century Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!doctype html>
<html>
<head>
<meta name="viewport" content="width=device-width, minimum-scale=1.0, initial-scale=1.0, user-scalable=yes">
<title>send-search</title>
<script src="../bower_components/webcomponentsjs/webcomponents-lite.js"></script>
<script src="../bower_components/web-component-tester/browser.js"></script>
<link rel="import" href="../elements/send-search/send-search.html">
</head>
<body>
<test-fixture id="send-search-fixture">
<template>
<send-search
search='{"name": "query #1", "createdBy": "testUser"}'
current-user="testUser"
user-index="dig-users"
user-type="user">
</send-search>
</template>
</test-fixture>
<script>
/* globals suite, test, assert, setup, fixture */
/* jshint -W030 */
suite('send-search tests', function() {
var element;
setup(function() {
element = fixture('send-search-fixture');
});
test('properties are correct values', function() {
assert.equal(element.currentUser, 'testUser');
assert.equal(element.userIndex, 'dig-users');
assert.equal(element.userType, 'user');
assert.deepEqual(element.search, {name: 'query #1', createdBy: 'testUser'});
assert.isUndefined(element.sendError);
assert.isFalse(element.disableSend);
assert.isUndefined(element.sendInProgress);
assert.isUndefined(element._sendingThisQuery);
assert.isUndefined(element._queryToSend);
assert.isUndefined(element._sentUserSearchResult);
assert.isUndefined(element._noUserRecordFound);
assert.isUndefined(element._queryAlreadySent);
assert.isUndefined(element._checkForUser);
assert.isUndefined(element._sendUserUpdateBody);
assert.isUndefined(element._userIdToSendQuery);
assert.isUndefined(element._querySentSuccessfully);
assert.equal(element._userToSendQuery, '');
assert.equal(element._messageToSend, '');
});
test('_isSendDisabled returns correct value', function() {
assert.isTrue(element._isSendDisabled(true, true));
assert.isTrue(element._isSendDisabled(true, false));
assert.isTrue(element._isSendDisabled(false, true));
assert.isFalse(element._isSendDisabled(false, false));
});
test('_checkIfQuerySent updates appropriate values when needed', function() {
element._checkIfQuerySent({_version: 2});
setTimeout(function() {
assert.isTrue(element._querySentSuccessfully);
assert.isFalse(element._sendingThisQuery);
assert.isFalse(element.sendInProgress);
}, 1000);
});
test('_createSendSearchListener returns an object with a function', function() {
var listener = element._createSendSearchListener();
assert.isObject(listener);
assert.isFunction(listener.onClick);
});
test('_createSendSearchListener.onClick sends the saved search', function() {
element._noUserRecordFound = true;
element._queryAlreadySent = false;
element._querySentSuccessfully = false;
element._userIdToSendQuery = 1;
/* jscs:disable requireCamelCaseOrUpperCaseIdentifiers */
element._set_sendUserUpdateBody({key: 'test'});
/* jscs:enable requireCamelCaseOrUpperCaseIdentifiers */
element._userToSendQuery = 'someUser';
element._createSendSearchListener().onClick();
assert.isUndefined(element._querySentSuccessfully);
assert.isUndefined(element._noUserRecordFound);
assert.isUndefined(element._queryAlreadySent);
assert.isUndefined(element._userIdToSendQuery);
assert.isUndefined(element._sendUserUpdateBody);
assert.deepEqual(element._queryToSend, element.search);
assert.equal(element._userToSendQuery, element._checkForUser);
});
test('_createSendSearchListener.onClick does nothing if _userToSendQuery matches username', function() {
element.search = {
name: 'myText',
_userToSendQuery: 'testUser'
};
element._createSendSearchListener().onClick();
assert.isUndefined(element._queryToSend);
assert.isUndefined(element._checkForUser);
});
test('_createSendSearchListener.onClick does nothing if no _userToSendQuery specified', function() {
element.search = {
name: 'myText'
};
element._createSendSearchListener().onClick();
assert.isUndefined(element._queryToSend);
assert.isUndefined(element._checkForUser);
});
test('_createSendSearchListener.onClick deletes notification related fields before query is sent', function() {
element.search = {
name: 'myText',
lastRunDate: new Date(),
notificationDate: new Date(),
notificationHasRun: false,
sendEmailNotification: true
};
element._userToSendQuery = 'someUser';
element._createSendSearchListener(0).onClick();
assert.deepEqual(element._queryToSend, {name: 'myText'});
});
test('_createSendSearchListener.onClick populates _queryToSend.message if _messageToSend is defined', function() {
element._messageToSend = 'some note';
element.search = {
name: 'myText'
};
element._userToSendQuery = 'someUser';
element._createSendSearchListener(0).onClick();
assert.deepEqual(element._queryToSend, {name: 'myText', message: 'some note'});
});
test('_transformSentUserSearchResult if no results found', function() {
element._sentUserSearchResult = {
hits: {
hits: []
}
};
element._transformSentUserSearchResult();
assert.isTrue(element._noUserRecordFound);
});
test('_transformSentUserSearchResult if user result found and query to send exists', function() {
element._sentUserSearchResult = {
hits: {
hits: [{
_id: 2,
_source: {
username: 'mockUser',
blurImages: true,
emailAddress: 'test@test.com',
receivedQueries: []
}
}]
}
};
element._queryToSend = {name: 'query #1', createdBy: 'testUser'};
element._transformSentUserSearchResult();
assert.isFalse(element._noUserRecordFound);
assert.isUndefined(element._queryAlreadySent);
assert.equal(element._userIdToSendQuery, 2);
assert.isTrue(element._sendingThisQuery);
assert.isTrue(element.sendInProgress);
assert.deepEqual(element._sendUserUpdateBody, {doc: {
receivedQueries: [{name: 'query #1', createdBy: 'testUser'}]
}});
});
test('_transformSentUserSearchResult if query was already sent', function() {
element._sentUserSearchResult = {
hits: {
hits: [{
_id: 2,
_source: {
username: 'mockUser',
blurImages: true,
emailAddress: 'test@test.com',
receivedQueries: [{name: 'query #1', createdBy: 'testUser'}]
}
}]
}
};
element._queryToSend = {name: 'query #1', createdBy: 'testUser'};
element._transformSentUserSearchResult();
assert.isFalse(element._noUserRecordFound);
assert.isTrue(element._queryAlreadySent);
assert.equal(element._userIdToSendQuery, 2);
assert.isUndefined(element._sendUserUpdateBody);
assert.isUndefined(element._sendingThisQuery);
assert.isUndefined(element.sendInProgress);
});
});
</script>
</body>
</html>
| {
"content_hash": "3979407421510b65689d02da76fa7b89",
"timestamp": "",
"source": "github",
"line_count": 221,
"max_line_length": 118,
"avg_line_length": 36.23981900452489,
"alnum_prop": 0.6635035584966912,
"repo_name": "NextCenturyCorporation/digapp-ht",
"id": "0fa234738023991ec1e225d8b77b6aee8a0c2872",
"size": "8009",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/test/send-search-basic.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "882"
},
{
"name": "HTML",
"bytes": "1302897"
},
{
"name": "JavaScript",
"bytes": "132158"
},
{
"name": "Perl",
"bytes": "17341"
},
{
"name": "Shell",
"bytes": "5416"
}
],
"symlink_target": ""
} |
package api
import (
"net/http"
"github.com/keydotcat/keycatd/managers"
"github.com/keydotcat/keycatd/models"
"github.com/keydotcat/keycatd/util"
)
// /team/:tid/secret
func (ah apiHandler) teamSecretRoot(w http.ResponseWriter, r *http.Request, t *models.Team) error {
var head string
head, r.URL.Path = shiftPath(r.URL.Path)
if len(head) == 0 {
switch r.Method {
case "GET":
return ah.teamSecretGetAll(w, r, t)
}
}
return util.NewErrorFrom(ErrNotFound)
}
type teamSecretListWrap struct {
Secrets []*models.Secret `json:"secrets"`
}
// GET /team/:tid/secret
func (ah apiHandler) teamSecretGetAll(w http.ResponseWriter, r *http.Request, t *models.Team) error {
ctx := r.Context()
u := ctxGetUser(ctx)
s, err := t.GetSecretsForUser(ctx, u)
if err != nil {
return err
}
return jsonResponse(w, teamSecretListWrap{s})
}
// /team/:tid/vault/:vid/secret
func (ah apiHandler) validVaultSecretRoot(w http.ResponseWriter, r *http.Request, t *models.Team, v *models.Vault) error {
var head string
head, r.URL.Path = shiftPath(r.URL.Path)
if len(head) == 0 {
switch r.Method {
case "GET":
return ah.vaultGetSecrets(w, r, t, v)
case "POST":
return ah.vaultCreateSecret(w, r, t, v)
}
} else {
switch r.Method {
case "DELETE":
return ah.vaultDeleteSecret(w, r, t, v, head)
case "PATCH", "PUT":
return ah.vaultUpdateSecret(w, r, t, v, head)
}
}
return util.NewErrorFrom(ErrNotFound)
}
func (ah apiHandler) vaultGetSecrets(w http.ResponseWriter, r *http.Request, t *models.Team, v *models.Vault) error {
ctx := r.Context()
secrets, err := v.GetSecrets(ctx)
if err != nil {
return err
}
return jsonResponse(w, teamSecretListWrap{secrets})
}
type vaultCreateSecretRequest struct {
Team string `json:"team"`
Vault string `json:"vault"`
Data []byte `json:"data"`
}
func (ah apiHandler) vaultCreateSecret(w http.ResponseWriter, r *http.Request, t *models.Team, v *models.Vault) error {
ctx := r.Context()
vscr := &vaultCreateSecretRequest{}
if err := jsonDecode(w, r, 16*1024, vscr); err != nil {
return err
}
s := &models.Secret{Data: vscr.Data}
if err := v.AddSecret(ctx, s); err != nil {
return err
}
ah.bcast.Send(v.Team, v.Id, managers.BCAST_ACTION_SECRET_NEW, s)
return jsonResponse(w, s)
}
func (ah apiHandler) vaultDeleteSecret(w http.ResponseWriter, r *http.Request, t *models.Team, v *models.Vault, sid string) error {
ctx := r.Context()
if err := v.DeleteSecret(ctx, sid); err != nil {
return err
}
ah.bcast.Send(v.Team, v.Id, managers.BCAST_ACTION_SECRET_REMOVE, &models.Secret{Id: sid})
return jsonResponse(w, v)
}
func (ah apiHandler) vaultUpdateSecret(w http.ResponseWriter, r *http.Request, t *models.Team, v *models.Vault, sid string) error {
ctx := r.Context()
vscr := &vaultCreateSecretRequest{}
if err := jsonDecode(w, r, 16*1024, vscr); err != nil {
return err
}
s := &models.Secret{Id: sid, Data: vscr.Data}
if len(vscr.Vault) == 0 || (t.Id == vscr.Team && v.Id == vscr.Vault) {
//Modify secret
if len(vscr.Data) > 0 {
if err := v.UpdateSecret(ctx, s); err != nil {
return err
}
ah.bcast.Send(v.Team, v.Id, managers.BCAST_ACTION_SECRET_CHANGE, s)
}
return jsonResponse(w, s)
} else {
//Move it to a different team/vault
u := ctxGetUser(r.Context())
var targetTeam = t
if len(vscr.Team) != 0 {
var err error
targetTeam, err = u.GetTeam(r.Context(), vscr.Team)
if err != nil {
return err
}
}
targetVault, err := targetTeam.GetVaultForUser(r.Context(), vscr.Vault, u)
if err != nil {
return err
}
if err := models.MoveSecretToVault(ctx, s, v, targetVault); err != nil {
return err
}
ah.bcast.Send(v.Team, v.Id, managers.BCAST_ACTION_SECRET_REMOVE, &models.Secret{Id: sid})
ah.bcast.Send(targetTeam.Id, targetVault.Id, managers.BCAST_ACTION_SECRET_NEW, s)
return jsonResponse(w, s)
}
}
// /team/:tid/vault/:vid/secrets
func (ah apiHandler) validVaultSecretsRoot(w http.ResponseWriter, r *http.Request, t *models.Team, v *models.Vault) error {
var head string
head, r.URL.Path = shiftPath(r.URL.Path)
if len(head) == 0 {
switch r.Method {
case "POST":
return ah.vaultCreateSecretList(w, r, t, v)
}
}
return util.NewErrorFrom(ErrNotFound)
}
func (ah apiHandler) vaultCreateSecretList(w http.ResponseWriter, r *http.Request, t *models.Team, v *models.Vault) error {
ctx := r.Context()
vl := &teamSecretListWrap{}
if err := jsonDecode(w, r, 1024*1024, &vl); err != nil {
return err
}
sl := make([]*models.Secret, len(vl.Secrets))
for i, vc := range vl.Secrets {
sl[i] = &models.Secret{Data: vc.Data}
}
if err := v.AddSecretList(ctx, sl); err != nil {
return err
}
for _, s := range sl {
ah.bcast.Send(v.Team, v.Id, managers.BCAST_ACTION_SECRET_NEW, s)
}
return jsonResponse(w, teamSecretListWrap{sl})
}
| {
"content_hash": "c7f57f8fe44e2324aa956b51a4e61315",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 131,
"avg_line_length": 28.323529411764707,
"alnum_prop": 0.6747663551401869,
"repo_name": "keydotcat/backend",
"id": "0e2b029e62c924db005a2058ba57de789fa8d8c7",
"size": "4815",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/secret.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "145162"
},
{
"name": "Makefile",
"bytes": "1897"
}
],
"symlink_target": ""
} |
#ifndef Magnum_Shaders_Implementation_CreateCompatibilityShader_h
#define Magnum_Shaders_Implementation_CreateCompatibilityShader_h
#include <Corrade/Utility/Resource.h>
#include "Magnum/Context.h"
#include "Magnum/Extensions.h"
#include "Magnum/Shader.h"
/* Enable only when compiling Shaders library and thus work around
"static symbol not used" warning when using this file for TextureTools */
#if defined(MAGNUM_BUILD_STATIC) && defined(MAGNUM_SHADERS_EXPORT)
static void importShaderResources() {
CORRADE_RESOURCE_INITIALIZE(MagnumShaders_RCS)
}
#endif
namespace Magnum { namespace Shaders { namespace Implementation {
inline Shader createCompatibilityShader(const Utility::Resource& rs, Version version, Shader::Type type) {
Shader shader(version, type);
#ifndef MAGNUM_TARGET_GLES
if(Context::current().isExtensionDisabled<Extensions::GL::ARB::explicit_attrib_location>(version))
shader.addSource("#define DISABLE_GL_ARB_explicit_attrib_location\n");
if(Context::current().isExtensionDisabled<Extensions::GL::ARB::shading_language_420pack>(version))
shader.addSource("#define DISABLE_GL_ARB_shading_language_420pack\n");
if(Context::current().isExtensionDisabled<Extensions::GL::ARB::explicit_uniform_location>(version))
shader.addSource("#define DISABLE_GL_ARB_explicit_uniform_location\n");
#endif
/* My Android emulator (running on NVidia) doesn't define GL_ES
preprocessor macro, thus *all* the stock shaders fail to compile */
/** @todo remove this when Android emulator is sane */
#ifdef CORRADE_TARGET_ANDROID
shader.addSource("#ifndef GL_ES\n#define GL_ES 1\n#endif\n");
#endif
shader.addSource(rs.get("compatibility.glsl"));
return shader;
}
}}}
#endif
| {
"content_hash": "dcc8e88b57b8cc2579ac6803947faf1b",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 106,
"avg_line_length": 38.47826086956522,
"alnum_prop": 0.7451977401129943,
"repo_name": "MiUishadow/magnum",
"id": "5c7f08fbb0ef9e06f475f79bcdf6645c41e17c88",
"size": "3003",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Magnum/Shaders/Implementation/CreateCompatibilityShader.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7448"
},
{
"name": "C",
"bytes": "720028"
},
{
"name": "C++",
"bytes": "6125526"
},
{
"name": "CMake",
"bytes": "307663"
},
{
"name": "CSS",
"bytes": "805"
},
{
"name": "GLSL",
"bytes": "47956"
},
{
"name": "HTML",
"bytes": "1181"
},
{
"name": "JavaScript",
"bytes": "2571"
},
{
"name": "Makefile",
"bytes": "852"
},
{
"name": "Objective-C",
"bytes": "11702"
},
{
"name": "Objective-C++",
"bytes": "3863"
},
{
"name": "Ruby",
"bytes": "935"
},
{
"name": "Shell",
"bytes": "12459"
}
],
"symlink_target": ""
} |
To contribute your custom controls to UI5lab you need to transform it into a UI5Lab library.
A simple example set up according to our best practices can be found in project [UI5Lab-library-simple](https://github.com/UI5Lab/UI5Lab-library-simple).
**Note:** Please use the new [OpenUI5 npm modules](https://www.npmjs.com/org/openui5) and the [UI5 tooling](https://github.com/SAP/ui5-tooling) when creating new community libraries - we do not recommend to use bower and grunt anymore. If you like to switch your library to our latest recommendations, see the [migration guide](https://blogs.sap.com/2018/11/02/migrate-your-ui5lab-library-to-the-new-ui5-tooling/) for more details.
Apart from this documentation, you can follow these hands-on tutorials to create your own UI5Lab library and add it to UI5Lab:
* [Create a UI5 Library for UI5Lab](https://blogs.sap.com/2018/03/02/create-your-own-ui5-library-for-ui5lab/)
* [Add your UI5 Library to UI5Lab](https://blogs.sap.com/2018/03/02/add-your-ui5-library-to-ui5lab/)
* [Migrate your UI5Lab library to the new UI5 tooling](https://blogs.sap.com/2018/11/02/migrate-your-ui5lab-library-to-the-new-ui5-tooling/)
## Instructions
#### 1. Get Your Custom Control Ready
Custom controls need to follow a certain file and library structure to be integrated into UI5Lab.
This blog post describes all the steps in detail with a practical example:
[Custom Control 101 by @stermi](https://medium.com/@stermi/custom-control-101-sapui5-openui5-tipoftheday-customcontrol-fd51a85bbed3)
When you are done with your control implementation you can add metadata to integrate it with UI5Lab browser. Each Custom Control library has its own `index.json`. This is a small file with descriptions about the custom control library.
#### 2. Edit Your index.json
```json
{
"the.library.namespace": {
"icon": "a SAP-Icon e.g. sap-icon://database",
"name": "the Library name",
"description": "a short description",
"source" : "a link to the source code on GitHub",
"documentation": "a link to the documentation",
"demo": "a link to the demonstration webpage",
"license": "the license you chose for your Custom Control Library e.g. Apache 2.0",
"version": 1.0,
"content": {
"Your First Custom Control": {
"id": "Custom Control ID",
"name": "Custom Control name",
"type": "control",
"version": 1.0,
"description": "a short description",
"samples": [
{
"id": "ID of first sample",
"name": "sample name 1",
"description": "a short sample description"
},
{
"id": "ID of second sample",
"name": "sample name 2",
"description": "a short sample description"
}
]
}
}
}
}
```
`content` is a substructure describing 1 or more custom controls included in the library.
Good examples are the `index.json` of published UI5lab libraries like [UI5Lab-library-simple](https://github.com/UI5Lab/UI5Lab-library-simple/blob/master/test/ui5lab/geometry/index.json) or [openui5-qrcode](https://github.com/StErMi/openui5-qrcode/blob/master/test/index.json).
#### 3. Generate a Pull Request
Once you are ready to publish your custom control library, simply fork [UI5Lab-central](https://github.com/UI5Lab/UI5Lab-central) and edit the `package.json` file. After that, create a pull request. And when this pull request is merged into the repository, a Travis job will automatically generate an updated version of UI5lab where your Library is now listed among all the others.
The `combineProjects.js` file will do a copy from the `node_modules` folder to the resources folder of this project. Test your library, provide a sample and create a pull request with a meaningful description to list your project.
| {
"content_hash": "3dcb15973e9783c43a9ed3e5b524211d",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 431,
"avg_line_length": 63.5,
"alnum_prop": 0.6523502743975185,
"repo_name": "UI5Lab/UI5Lab",
"id": "eb5277167aee3c2671b93ab70228d8b2412ecce6",
"size": "4258",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "docs/how_to/contribute_custom_control_library.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "4297"
}
],
"symlink_target": ""
} |
require "spec_helper"
describe CategoriesController do
describe "routing" do
it "routes to #show" do
get("/categories/1/page/1").should \
route_to("categories#show", id: "1", page: "1")
end
end
end
| {
"content_hash": "d3bb3a200b6d3861cffde009c1c7545a",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 55,
"avg_line_length": 19.25,
"alnum_prop": 0.6190476190476191,
"repo_name": "cihad/ilan",
"id": "6df3146c497ab2a80803ed5505dcd8c01d80bac4",
"size": "231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/routing/categories_routing_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5070"
},
{
"name": "CoffeeScript",
"bytes": "422"
},
{
"name": "JavaScript",
"bytes": "8318"
},
{
"name": "Ruby",
"bytes": "100819"
}
],
"symlink_target": ""
} |
namespace MusicStore.Models
{
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using Common.Constants;
public class Album
{
private ICollection<Artist> artists;
private ICollection<Song> songs;
public Album()
{
this.artists = new HashSet<Artist>();
this.songs = new HashSet<Song>();
}
[Key]
public int AlbumId { get; set; }
[Required]
[MaxLength(ValidationConstants.AlbumTitleMaxLength)]
[MinLength(ValidationConstants.AlbumTitleMinLength)]
public string Title { get; set; }
[Required]
public int Year { get; set; }
[Required]
[MaxLength(ValidationConstants.ProducerNameMaxLength)]
[MinLength(ValidationConstants.ProducerNameMinLength)]
public string Producer { get; set; }
public virtual ICollection<Artist> Artists
{
get
{
return this.artists;
}
set
{
this.artists = value;
}
}
public virtual ICollection<Song> Songs
{
get
{
return this.songs;
}
set
{
this.songs = value;
}
}
}
}
| {
"content_hash": "3182f42c4fe4d6c3e98e678b4c80f642",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 62,
"avg_line_length": 22.491803278688526,
"alnum_prop": 0.5116618075801749,
"repo_name": "DimitarSD/Telerik-Academy",
"id": "f9e7f149beadd6229d56a5eaeb3e211951846ec7",
"size": "1374",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "03. Software Technologies/02. Web Services and Cloud/01. Introduction To ASP.NET Web API/MusicStore/Data/MusicStore.Models/Album.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "324"
},
{
"name": "C#",
"bytes": "1637823"
},
{
"name": "CSS",
"bytes": "121271"
},
{
"name": "CoffeeScript",
"bytes": "4643"
},
{
"name": "HTML",
"bytes": "665465"
},
{
"name": "Java",
"bytes": "7842"
},
{
"name": "JavaScript",
"bytes": "1453926"
},
{
"name": "Objective-C",
"bytes": "8928"
},
{
"name": "SQLPL",
"bytes": "941"
},
{
"name": "XSLT",
"bytes": "3924"
}
],
"symlink_target": ""
} |
<?php
use Illuminate\Database\Seeder;
use Illuminate\Support\Facades\DB;
class AccessTableSeeder extends Seeder
{
public function run()
{
if (env('DB_CONNECTION') == 'mysql') {
DB::statement('SET FOREIGN_KEY_CHECKS=0;');
}
$this->call(UserTableSeeder::class);
$this->call(RoleTableSeeder::class);
$this->call(UserRoleSeeder::class);
$this->call(PermissionGroupTableSeeder::class);
$this->call(PermissionTableSeeder::class);
$this->call(PermissionDependencyTableSeeder::class);
if (env('DB_CONNECTION') == 'mysql') {
DB::statement('SET FOREIGN_KEY_CHECKS=1;');
}
}
} | {
"content_hash": "51e9ed4a9fb84cb75754a35471aa2543",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 60,
"avg_line_length": 25.444444444444443,
"alnum_prop": 0.6069868995633187,
"repo_name": "tyb917/Lshop",
"id": "2676a55729e450a5ce5e380f8fcb88882bc3dc76",
"size": "687",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "database/seeds/AccessTableSeeder.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "39553"
},
{
"name": "CSS",
"bytes": "362834"
},
{
"name": "HTML",
"bytes": "2837555"
},
{
"name": "JavaScript",
"bytes": "4903393"
},
{
"name": "PHP",
"bytes": "1358617"
}
],
"symlink_target": ""
} |
<?php
require('cbsd.php');
?>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<title>CBSD Project</title>
<style>
body {
font-size:14px;
}
</style>
</head>
<body>
<form action="addnode-fromargs.php" method="post">
<div class="main">
<div class="field">
<label for="address">Node IP or Hostname:</label>
<input type="text" name="address" />
</div>
<div class="field">
<label for="password">CBSD User Password:</label>
<input type="text" name="password" />
</div>
<div class="field">
<label for="sshport">SSH Port:</label>
<input type="text" name="sshport" value="22222" maxlength="5" size="5" />
</div>
<p><input type="submit" name="create" value="Connect" ></p>
</div>
</form>
</body>
</html>
| {
"content_hash": "a708d039a394460647befc9424cb92dd",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 76,
"avg_line_length": 21.61111111111111,
"alnum_prop": 0.6169665809768637,
"repo_name": "olevole/cbsd-websample",
"id": "ab0deb1955fe268fe4bfbe00909a98871f4bbac8",
"size": "778",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jaction/addnode.php",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "1745"
},
{
"name": "CSS",
"bytes": "36457"
},
{
"name": "HTML",
"bytes": "4161"
},
{
"name": "JavaScript",
"bytes": "114187"
},
{
"name": "Nginx",
"bytes": "1320"
},
{
"name": "PHP",
"bytes": "61613"
},
{
"name": "Shell",
"bytes": "248"
}
],
"symlink_target": ""
} |
require 'spec_helper'
RSpec.describe Duralex::User do
class FakeUser
def self.before_create(*args)
end
def self.validates(*args)
end
include Duralex::Model
end
before do
allow(Duralex).to receive(:definitions).and_return({
documents: Proc.new do |user|
if user.admin?
nil
else
if user.b2b?
'some_tos'
else
['some_tos', 'another_tos']
end
end
end,
validation: Proc.new do |user, doc|
!user.refused_doc?
end
})
end
describe '#[]' do
subject { Duralex[user] }
context 'with regular user' do
let(:user) { FakeUser.new }
it { expect(subject).to be_kind_of Duralex::User }
end
context 'with nil user' do
let(:user) { nil }
it { expect(subject).to be_kind_of Duralex::Guest }
end
end
describe '#documents' do
subject { Duralex[user].documents }
context 'with nil user' do
let(:user) { nil }
it { expect(subject).to eq [] }
end
context 'with regular user' do
context 'no tos' do
let(:user) { double(accept_tos!: true, admin?: true) }
it 'returns an empty array' do
expect(subject).to eq []
end
end
context 'one tos' do
let(:user) { double(accept_tos!: true, admin?: false, b2b?: true) }
it 'returns an array representation of the TOS definition' do
expect(subject).to eq ['some_tos']
end
end
context 'more tos' do
let(:user) { double(accept_tos!: true, admin?: false, b2b?: false) }
it 'returns an array representation of the TOS definition' do
expect(subject).to eq ['some_tos', 'another_tos']
end
end
end
end
describe '#up_to_date?' do
subject { Duralex[user].up_to_date? }
context 'nil user' do
let(:user) { nil }
it { expect(subject).to be_truthy }
end
end
end
| {
"content_hash": "7d6306a88ddd7bcfc1487318c0cef419",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 76,
"avg_line_length": 23.511904761904763,
"alnum_prop": 0.5594936708860759,
"repo_name": "demental/duralex",
"id": "f2d5d903f42005d4f5f8829fda166557e64d623b",
"size": "1975",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/user_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "10709"
}
],
"symlink_target": ""
} |
require 'forwardable'
require 'rubygems'
gem 'oauth', '~> 0.4.1'
require 'oauth'
gem 'hashie'
require 'hashie'
gem 'httparty', '>= 0.5.2'
require 'httparty'
module Weibo
class WeiboError < StandardError
attr_reader :data
def initialize(data)
@data = data
super
end
end
class RepeatedWeiboText < WeiboError; end
class RateLimitExceeded < WeiboError; end
class Unauthorized < WeiboError; end
class General < WeiboError; end
class Unavailable < StandardError; end
class InformWeibo < StandardError; end
class NotFound < StandardError; end
end
module Hashie
class Mash
# Converts all of the keys to strings, optionally formatting key name
def rubyify_keys!
keys.each{|k|
v = delete(k)
new_key = k.to_s.underscore
self[new_key] = v
v.rubyify_keys! if v.is_a?(Hash)
v.each{|p| p.rubyify_keys! if p.is_a?(Hash)} if v.is_a?(Array)
}
self
end
end
end
directory = File.expand_path(File.dirname(__FILE__))
require File.join(directory, 'weibo', 'oauth')
require File.join(directory, 'weibo', 'oauth_hack')
require File.join(directory, 'weibo', 'httpauth')
require File.join(directory, 'weibo', 'request')
require File.join(directory, 'weibo', 'config')
require File.join(directory, 'weibo', 'base')
# code is an adaptation of the twitter gem by John Nunemaker
# http://github.com/jnunemaker/twitter
# Copyright (c) 2009 John Nunemaker
#
# made to work with china's leading twitter service, 新浪微博
| {
"content_hash": "95b8b08cc2405b21e612e89f0782e10c",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 73,
"avg_line_length": 24.203125,
"alnum_prop": 0.6655907036797934,
"repo_name": "encoreshao/weibo",
"id": "fcc4da1a7efbc71921481190623e7f806df66d30",
"size": "1557",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/weibo.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "25493"
}
],
"symlink_target": ""
} |
- Initial scaffolding and objects
- Adding dice rolling base process. JPG
- hold selected dice. JPG
| {
"content_hash": "97fed912f3966652f87215293f0098d0",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 39,
"avg_line_length": 33.333333333333336,
"alnum_prop": 0.78,
"repo_name": "YachtZoes/yachtzoe_project",
"id": "893151118e1d4cdd5c032e1c4eba916f17db03ee",
"size": "140",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8980"
},
{
"name": "HTML",
"bytes": "12652"
},
{
"name": "JavaScript",
"bytes": "27780"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "94c5ab7a9166b08c33a970b28137f541",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "8f6c74277ffd1c12c641618b824172a151bd3509",
"size": "175",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Sapindales/Anacardiaceae/Trujanoa/Trujanoa pinnata/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
==ArrayList==
- ArrayList#new - O(1). Technically, the process is is really O(n), since I have to create the array itself, and that would require initializing each 'index' in this array.
- ArrayList#add - O(1), amortized constant time. The worst-case scenario is O(n), when we call ArrayList#resize_array.
- ArrayList#get - O(1).
- ArrayList#set - O(1).
- ArrayList#size - O(1).
- ArrayList#insert - O(n). Even if we do not need to call ArrayList#resize_array, we do have to spend some time shifting the other elements one index over.
- ArrayList#resize_array - O(n). We need to recreate a brand new array and then copy everything from the old array into the new one.
==LinkedList==
ArrayList Approach:
- LinkedList#new - O(1)
- LinkedList#insert_first - O(n). This requires inserting an item into the ArrayList, and since inserting is O(n), the LinkedList#insert_first is also O(n) as well.
- LinkedList#insert_last - O(1)
- LinkedList#set - O(1)
- LinkedList#get - O(1)
- LinkedList#remove_first - O(n). This method requires creating a new array to shift everything back one index.
- LinkedList#remove_last - O(1). This only requires deleting the last node in the array and reassigning the tail to the previous node.
Inserting a value in a LinkedList is faster than inserting a value in an ArrayList since you only need to change two elements (the one before and after the inserted Node) instead of having to change the entire array.
Node-Based Approach:
- LinkedList#new - O(1)
- LinkedList#insert_first - O(1).
- LinkedList#insert_last - O(1).
- LinkedList#set - O(n)
- LinkedList#get - O(n)
- LinkedList#remove_first - O(1).
- LinkedList#remove_last - O(1).
A node-based approach to linked lists is great for adding and removing nodes, but is pretty terrible at retreiving and inserting in nodes.
In both the ArrayList Approach and the Node-Based Approach, I have kept track of the "tail" of a Linked List, which was NOT a good idea.
1. First of all, the specs never mentioned keeping track of tails. This is why they mentioned LinkedList#insert_last should originally NOT be O(1)...because, after all, you don't have a pointer to the ending and have to traverse the Linked List to go to the 'tail'.
2. Secondly, it means I have to do some maintance work to make sure that LinkedList#set doesn't accidentally insert in a node that should be a new tail. It's not entirely horrible.
It seems that it might be better to move onto the next lesson in the Algos unit than to worry about some silliness like this.
==Stack==
ArrayList Approach
- Stack#new - O(1). Constant initial size for the ArrayList.
- Stack#top - O(1). Uses ArrayList#get.
- Stack#push - O(n). Uses ArrayList#insert.
- Stack#pop - O(n). Need to duplicate the array without the 'zeroeth' element. This method is a copycat of the ArrayList version of "Linked Lists".
- Stack#empty? - O(1).
LinkedList Approach
- Stack#new - O(1).
- Stack#top - O(1).
- Stack#push - O(1).
- Stack#pop - O(1).
- Stack#empty? - O(1).
In conclusion, the LinkedList approach is slightly better than the ArrayList approach simply because there is no need to resize the array or worry about moving elements around after an insertion/deletion. | {
"content_hash": "88c4c9419afd0db9cb85695412f1d542",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 265,
"avg_line_length": 51.645161290322584,
"alnum_prop": 0.7435977514053717,
"repo_name": "tra38/algorithms-phase4",
"id": "5b99757d68143daf79155d42b0526a2bef61df72",
"size": "3202",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "complexity.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "52520"
}
],
"symlink_target": ""
} |
"""
Mercurial
=========
This module provides high-level tools for managing `Mercurial`_ repositories.
.. _Mercurial: http://mercurial.selenic.com/
"""
from fabric.api import run
from fabtools import mercurial
from fabtools.files import is_dir
from fabtools.system import UnsupportedFamily, distrib_family
def command():
"""
Require the ``hg`` command-line tool.
Example::
from fabric.api import run
from fabtools import require
require.mercurial.command()
run('hg --help')
"""
from fabtools.require.deb import package as require_deb_package
from fabtools.require.rpm import package as require_rpm_package
from fabtools.require.portage import package as require_portage_package
res = run('hg --version', quiet=True)
if res.failed:
family = distrib_family()
if family == 'debian':
require_deb_package('mercurial')
elif family == 'gentoo':
require_portage_package('mercurial')
elif family == 'redhat':
require_rpm_package('mercurial')
else:
raise UnsupportedFamily(supported=['debian', 'redhat', 'gentoo'])
def working_copy(remote_url, path=None, branch="default", update=True,
use_sudo=False, user=None):
"""
Require a working copy of the repository from the ``remote_url``.
The ``path`` is optional, and defaults to the last segment of the
remote repository URL.
If the ``path`` does not exist, this will clone the remote
repository and check out the specified branch.
If the ``path`` exists and ``update`` is ``True``, it will pull
changes from the remote repository, check out the specified branch,
then update the working copy.
If the ``path`` exists and ``update`` is ``False``, it will only
check out the specified branch, without pulling remote changesets.
:param remote_url: URL of the remote repository
:type remote_url: str
:param path: Absolute or relative path of the working copy on the
filesystem. If this directory doesn't exist yet, a new
working copy is created through ``hg clone``. If the
directory does exist *and* ``update == True``, a
``hg pull && hg up`` is issued. If ``path is None`` the
``hg clone`` is issued in the current working directory and
the directory name of the working copy is created by ``hg``.
:type path: str
:param branch: Branch or tag to check out. If the given value is a tag
name, update must be ``False`` or consecutive calls will
fail.
:type branch: str
:param update: Whether or not to pull and update remote changesets.
:type update: bool
:param use_sudo: If ``True`` execute ``hg`` with
:func:`fabric.operations.sudo`, else with
:func:`fabric.operations.run`.
:type use_sudo: bool
:param user: If ``use_sudo is True``, run :func:`fabric.operations.sudo`
with the given user. If ``use_sudo is False`` this parameter
has no effect.
:type user: str
"""
command()
if path is None:
path = remote_url.split('/')[-1]
if is_dir(path, use_sudo=use_sudo):
mercurial.pull(path, use_sudo=use_sudo, user=user)
if update:
mercurial.update(path=path, branch=branch, use_sudo=use_sudo,
user=user)
elif not is_dir(path, use_sudo=use_sudo):
mercurial.clone(remote_url, path=path, use_sudo=use_sudo, user=user)
mercurial.update(path=path, branch=branch, use_sudo=use_sudo, user=user)
else:
raise ValueError("Invalid combination of parameters.")
| {
"content_hash": "2d8cbc646ac83ffe7b4480144e63245a",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 80,
"avg_line_length": 34.24324324324324,
"alnum_prop": 0.6224677716390423,
"repo_name": "n0n0x/fabtools-python",
"id": "a863473c24ec63c946cf46545ef2d39a49f33429",
"size": "3801",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "fabtools/require/mercurial.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "314684"
}
],
"symlink_target": ""
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.tests.hive;
import io.prestodb.tempto.fulfillment.table.TableDefinitionsRepository;
import io.prestodb.tempto.fulfillment.table.hive.HiveDataSource;
import io.prestodb.tempto.fulfillment.table.hive.HiveTableDefinition;
import io.prestodb.tempto.query.QueryExecutor;
import java.util.Locale;
import java.util.Optional;
import java.util.concurrent.ThreadLocalRandom;
import static io.prestodb.tempto.context.ThreadLocalTestContextHolder.testContext;
import static io.prestodb.tempto.fulfillment.table.hive.InlineDataSource.createResourceDataSource;
import static java.lang.String.format;
public final class AllSimpleTypesTableDefinitions
{
private AllSimpleTypesTableDefinitions()
{
}
private static String tableNameFormat = "%s_all_types";
@TableDefinitionsRepository.RepositoryTableDefinition
public static final HiveTableDefinition ALL_HIVE_SIMPLE_TYPES_TEXTFILE = tableDefinitionBuilder("TEXTFILE", Optional.of("DELIMITED FIELDS TERMINATED BY '|'"))
.setDataSource(getTextFileDataSource())
.build();
@TableDefinitionsRepository.RepositoryTableDefinition
public static final HiveTableDefinition ALL_HIVE_SIMPLE_TYPES_PARQUET = parquetTableDefinitionBuilder()
.setNoData()
.build();
@TableDefinitionsRepository.RepositoryTableDefinition
public static final HiveTableDefinition ALL_HIVE_SIMPLE_TYPES_AVRO = avroTableDefinitionBuilder()
.setNoData()
.build();
@TableDefinitionsRepository.RepositoryTableDefinition
public static final HiveTableDefinition ALL_HIVE_SIMPLE_TYPES_ORC = tableDefinitionBuilder("ORC", Optional.empty())
.setNoData()
.build();
@TableDefinitionsRepository.RepositoryTableDefinition
public static final HiveTableDefinition ALL_HIVE_SIMPLE_TYPES_RCFILE = tableDefinitionBuilder("RCFILE", Optional.of("SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'"))
.setNoData()
.build();
private static HiveTableDefinition.HiveTableDefinitionBuilder tableDefinitionBuilder(String fileFormat, Optional<String> rowFormat)
{
String tableName = format(tableNameFormat, fileFormat.toLowerCase(Locale.ENGLISH));
return HiveTableDefinition.builder(tableName)
.setCreateTableDDLTemplate("" +
"CREATE %EXTERNAL% TABLE %NAME%(" +
" c_tinyint TINYINT," +
" c_smallint SMALLINT," +
" c_int INT," +
" c_bigint BIGINT," +
" c_float FLOAT," +
" c_double DOUBLE," +
" c_decimal DECIMAL," +
" c_decimal_w_params DECIMAL(10,5)," +
" c_timestamp TIMESTAMP," +
" c_date DATE," +
" c_string STRING," +
" c_varchar VARCHAR(10)," +
" c_char CHAR(10)," +
" c_boolean BOOLEAN," +
" c_binary BINARY" +
") " +
(rowFormat.isPresent() ? "ROW FORMAT " + rowFormat.get() + " " : " ") +
"STORED AS " + fileFormat);
}
private static HiveTableDefinition.HiveTableDefinitionBuilder avroTableDefinitionBuilder()
{
return HiveTableDefinition.builder("avro_all_types")
.setCreateTableDDLTemplate("" +
"CREATE %EXTERNAL% TABLE %NAME%(" +
" c_int INT," +
" c_bigint BIGINT," +
" c_float FLOAT," +
" c_double DOUBLE," +
" c_decimal DECIMAL," +
" c_decimal_w_params DECIMAL(10,5)," +
" c_timestamp TIMESTAMP," +
" c_date DATE," +
" c_string STRING," +
" c_varchar VARCHAR(10)," +
" c_char CHAR(10)," +
" c_boolean BOOLEAN," +
" c_binary BINARY" +
") " +
"STORED AS AVRO");
}
private static HiveTableDefinition.HiveTableDefinitionBuilder parquetTableDefinitionBuilder()
{
return HiveTableDefinition.builder("parquet_all_types")
.setCreateTableDDLTemplate("" +
"CREATE %EXTERNAL% TABLE %NAME%(" +
" c_tinyint TINYINT," +
" c_smallint SMALLINT," +
" c_int INT," +
" c_bigint BIGINT," +
" c_float FLOAT," +
" c_double DOUBLE," +
" c_decimal DECIMAL," +
" c_decimal_w_params DECIMAL(10,5)," +
" c_timestamp TIMESTAMP," +
" c_string STRING," +
" c_varchar VARCHAR(10)," +
" c_char CHAR(10)," +
" c_boolean BOOLEAN," +
" c_binary BINARY" +
") " +
"STORED AS PARQUET");
}
private static HiveDataSource getTextFileDataSource()
{
return createResourceDataSource(format(tableNameFormat, "textfile"), String.valueOf(ThreadLocalRandom.current().nextLong(Long.MAX_VALUE)), "com/facebook/presto/tests/hive/data/all_types/data.textfile");
}
public static void populateDataToHiveTable(String tableName)
{
onHive().executeQuery(format("INSERT INTO TABLE %s SELECT * FROM %s",
tableName,
format(tableNameFormat, "textfile")));
}
public static QueryExecutor onHive()
{
return testContext().getDependency(QueryExecutor.class, "hive");
}
}
| {
"content_hash": "cc8159869c3d4765fad6e4c952a31648",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 210,
"avg_line_length": 47.59060402684564,
"alnum_prop": 0.5257368495275702,
"repo_name": "Teradata/presto",
"id": "e33be839934db70e243bdf5573aff32ea45fe306",
"size": "7091",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "presto-product-tests/src/main/java/com/facebook/presto/tests/hive/AllSimpleTypesTableDefinitions.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "26770"
},
{
"name": "CSS",
"bytes": "12676"
},
{
"name": "HTML",
"bytes": "55873"
},
{
"name": "Java",
"bytes": "26866232"
},
{
"name": "JavaScript",
"bytes": "201964"
},
{
"name": "Makefile",
"bytes": "6822"
},
{
"name": "PLSQL",
"bytes": "7831"
},
{
"name": "Python",
"bytes": "6568"
},
{
"name": "SQLPL",
"bytes": "8247"
},
{
"name": "Shell",
"bytes": "26950"
},
{
"name": "Thrift",
"bytes": "12038"
}
],
"symlink_target": ""
} |
using System;
using System.Globalization;
using System.Linq;
using System.Reflection;
using System.Web.Http.Controllers;
using System.Web.Http.Description;
using System.Xml.XPath;
using DataStoreService.Areas.HelpPage.ModelDescriptions;
namespace DataStoreService.Areas.HelpPage
{
/// <summary>
/// A custom <see cref="IDocumentationProvider"/> that reads the API documentation from an XML documentation file.
/// </summary>
public class XmlDocumentationProvider : IDocumentationProvider, IModelDocumentationProvider
{
private XPathNavigator _documentNavigator;
private const string TypeExpression = "/doc/members/member[@name='T:{0}']";
private const string MethodExpression = "/doc/members/member[@name='M:{0}']";
private const string PropertyExpression = "/doc/members/member[@name='P:{0}']";
private const string FieldExpression = "/doc/members/member[@name='F:{0}']";
private const string ParameterExpression = "param[@name='{0}']";
/// <summary>
/// Initializes a new instance of the <see cref="XmlDocumentationProvider"/> class.
/// </summary>
/// <param name="documentPath">The physical path to XML document.</param>
public XmlDocumentationProvider(string documentPath)
{
if (documentPath == null)
{
throw new ArgumentNullException("documentPath");
}
XPathDocument xpath = new XPathDocument(documentPath);
_documentNavigator = xpath.CreateNavigator();
}
public string GetDocumentation(HttpControllerDescriptor controllerDescriptor)
{
XPathNavigator typeNode = GetTypeNode(controllerDescriptor.ControllerType);
return GetTagValue(typeNode, "summary");
}
public virtual string GetDocumentation(HttpActionDescriptor actionDescriptor)
{
XPathNavigator methodNode = GetMethodNode(actionDescriptor);
return GetTagValue(methodNode, "summary");
}
public virtual string GetDocumentation(HttpParameterDescriptor parameterDescriptor)
{
ReflectedHttpParameterDescriptor reflectedParameterDescriptor = parameterDescriptor as ReflectedHttpParameterDescriptor;
if (reflectedParameterDescriptor != null)
{
XPathNavigator methodNode = GetMethodNode(reflectedParameterDescriptor.ActionDescriptor);
if (methodNode != null)
{
string parameterName = reflectedParameterDescriptor.ParameterInfo.Name;
XPathNavigator parameterNode = methodNode.SelectSingleNode(String.Format(CultureInfo.InvariantCulture, ParameterExpression, parameterName));
if (parameterNode != null)
{
return parameterNode.Value.Trim();
}
}
}
return null;
}
public string GetResponseDocumentation(HttpActionDescriptor actionDescriptor)
{
XPathNavigator methodNode = GetMethodNode(actionDescriptor);
return GetTagValue(methodNode, "returns");
}
public string GetDocumentation(MemberInfo member)
{
string memberName = String.Format(CultureInfo.InvariantCulture, "{0}.{1}", GetTypeName(member.DeclaringType), member.Name);
string expression = member.MemberType == MemberTypes.Field ? FieldExpression : PropertyExpression;
string selectExpression = String.Format(CultureInfo.InvariantCulture, expression, memberName);
XPathNavigator propertyNode = _documentNavigator.SelectSingleNode(selectExpression);
return GetTagValue(propertyNode, "summary");
}
public string GetDocumentation(Type type)
{
XPathNavigator typeNode = GetTypeNode(type);
return GetTagValue(typeNode, "summary");
}
private XPathNavigator GetMethodNode(HttpActionDescriptor actionDescriptor)
{
ReflectedHttpActionDescriptor reflectedActionDescriptor = actionDescriptor as ReflectedHttpActionDescriptor;
if (reflectedActionDescriptor != null)
{
string selectExpression = String.Format(CultureInfo.InvariantCulture, MethodExpression, GetMemberName(reflectedActionDescriptor.MethodInfo));
return _documentNavigator.SelectSingleNode(selectExpression);
}
return null;
}
private static string GetMemberName(MethodInfo method)
{
string name = String.Format(CultureInfo.InvariantCulture, "{0}.{1}", GetTypeName(method.DeclaringType), method.Name);
ParameterInfo[] parameters = method.GetParameters();
if (parameters.Length != 0)
{
string[] parameterTypeNames = parameters.Select(param => GetTypeName(param.ParameterType)).ToArray();
name += String.Format(CultureInfo.InvariantCulture, "({0})", String.Join(",", parameterTypeNames));
}
return name;
}
private static string GetTagValue(XPathNavigator parentNode, string tagName)
{
if (parentNode != null)
{
XPathNavigator node = parentNode.SelectSingleNode(tagName);
if (node != null)
{
return node.Value.Trim();
}
}
return null;
}
private XPathNavigator GetTypeNode(Type type)
{
string controllerTypeName = GetTypeName(type);
string selectExpression = String.Format(CultureInfo.InvariantCulture, TypeExpression, controllerTypeName);
return _documentNavigator.SelectSingleNode(selectExpression);
}
private static string GetTypeName(Type type)
{
string name = type.FullName;
if (type.IsGenericType)
{
// Format the generic type name to something like: Generic{System.Int32,System.String}
Type genericType = type.GetGenericTypeDefinition();
Type[] genericArguments = type.GetGenericArguments();
string genericTypeName = genericType.FullName;
// Trim the generic parameter counts from the name
genericTypeName = genericTypeName.Substring(0, genericTypeName.IndexOf('`'));
string[] argumentTypeNames = genericArguments.Select(t => GetTypeName(t)).ToArray();
name = String.Format(CultureInfo.InvariantCulture, "{0}{{{1}}}", genericTypeName, String.Join(",", argumentTypeNames));
}
if (type.IsNested)
{
// Changing the nested type name from OuterType+InnerType to OuterType.InnerType to match the XML documentation syntax.
name = name.Replace("+", ".");
}
return name;
}
}
}
| {
"content_hash": "f753e317ea9b84a22a9402f92d745076",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 160,
"avg_line_length": 43.701863354037265,
"alnum_prop": 0.6323194997157476,
"repo_name": "akhleshg/datastore-service",
"id": "d055b0bdd36b0bd6fbfc86bb40a29404e576f47e",
"size": "7036",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/DataStoreService/Areas/HelpPage/XmlDocumentationProvider.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "110"
},
{
"name": "C#",
"bytes": "162080"
},
{
"name": "CSS",
"bytes": "2626"
},
{
"name": "HTML",
"bytes": "5069"
},
{
"name": "JavaScript",
"bytes": "10714"
}
],
"symlink_target": ""
} |
sh demo/gbdt/multiclass_classification/libsvm_convert_2_ytklearn.sh
sh demo/gbdt/multiclass_classification/local_optimizer.sh
sh demo/gbdt/multiclass_classification/predict.sh
| {
"content_hash": "24b7c7fe0841141338812852dc7d0fe9",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 67,
"avg_line_length": 58.666666666666664,
"alnum_prop": 0.8579545454545454,
"repo_name": "yuantiku/ytk-learn",
"id": "99a27aab9c2dd17a20151402f3d95426494597c8",
"size": "244",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo/gbdt/multiclass_classification/run.sh",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2780"
},
{
"name": "Java",
"bytes": "1091226"
},
{
"name": "Python",
"bytes": "3636"
},
{
"name": "Shell",
"bytes": "19472"
}
],
"symlink_target": ""
} |
[](https://travis-ci.org/mswiden36@gmail.com/PieChartLib)
[](http://cocoapods.org/pods/PieChartLib)
[](http://cocoapods.org/pods/PieChartLib)
[](http://cocoapods.org/pods/PieChartLib)
## Example
To run the example project, clone the repo, and run `pod install` from the Example directory first.
## Requirements
## Installation
PieChartLib is available through [CocoaPods](http://cocoapods.org). To install
it, simply add the following line to your Podfile:
```ruby
pod "PieChartLib"
```
## Author
mswiden36@gmail.com, souiden@360medlink.com
## License
PieChartLib is available under the MIT license. See the LICENSE file for more info.
| {
"content_hash": "820234af9bab67877290ebc0a2a32b03",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 146,
"avg_line_length": 35.370370370370374,
"alnum_prop": 0.7612565445026178,
"repo_name": "swimoAmin/PieChart",
"id": "badf6da67de1c16fccd27513497ee09e18c67e6f",
"size": "970",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "1416"
},
{
"name": "Ruby",
"bytes": "1813"
},
{
"name": "Shell",
"bytes": "17325"
},
{
"name": "Swift",
"bytes": "807759"
}
],
"symlink_target": ""
} |
layout: page
title: Massey Family Reunion
date: 2016-05-24
author: Sara Black
tags: weekly links, java
status: published
summary: Nullam sit amet orci sed enim.
banner: images/banner/leisure-01.jpg
booking:
startDate: 01/27/2018
endDate: 01/31/2018
ctyhocn: NYCSIHX
groupCode: MFR
published: true
---
Aliquam vulputate quis ante et egestas. Vestibulum commodo hendrerit neque, sit amet gravida est consequat id. Proin suscipit euismod commodo. Cras in molestie nulla, vel fringilla nisl. Praesent vitae eleifend purus, vel egestas ligula. Proin sed mauris id tortor vehicula mollis at in diam. Cras venenatis pharetra nulla, sed condimentum ligula. Fusce porta eros sit amet tincidunt sodales. Sed elit erat, lacinia ut magna sit amet, venenatis cursus eros.
* Donec vel velit sed risus aliquet sodales sed eu neque.
In tincidunt, quam id venenatis luctus, lectus dui vestibulum neque, ut tincidunt lorem nunc quis diam. Vivamus lobortis turpis nisl, ac faucibus lacus blandit a. Ut porta arcu et quam aliquet, in semper orci placerat. Maecenas maximus blandit magna, et dapibus mi blandit vitae. Aliquam consequat ex massa, a euismod massa laoreet vel. Donec in quam eget quam congue dignissim in ac lorem. Maecenas ultricies lectus id nisi placerat, vel viverra velit tristique. Etiam dolor diam, tempor in porttitor id, interdum rhoncus elit. Donec lacinia at diam ut lacinia. Vestibulum elementum dignissim laoreet. Maecenas euismod mattis neque dapibus pretium.
| {
"content_hash": "6eeed2b90c003924519b0280a65e2089",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 649,
"avg_line_length": 73.85,
"alnum_prop": 0.8009478672985783,
"repo_name": "KlishGroup/prose-pogs",
"id": "acabb94dddc53200e6c742ba9e6146ec07c53443",
"size": "1481",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "pogs/N/NYCSIHX/MFR/index.md",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
using System.Collections.Generic;
using System.Linq;
using GraphLib.EdgeKeeping;
using GraphLib.VertexCreation;
using GraphLib.Vertices;
using GraphLib.Visiting;
namespace GraphLib
{
public class Graph
{
private readonly IVertexTagFactory _vertexTagFactory;
private readonly Dictionary<string, IVertex> _vertices;
private readonly IEdgeKeeper _edges;
private readonly EdgeKeepingFactory _edgeKeepingFactory;
private readonly VertexFactory _vertexFactory;
private readonly GraphOptions _graphOptions;
public Graph(GraphOptions graphOptions) : this(graphOptions, graphOptions.CustomVertexTagFactory ?? new DefaultVertexTagFactory(), new EdgeKeepingFactory(graphOptions), new VertexFactory(graphOptions))
{
}
private Graph(GraphOptions graphOptions, IVertexTagFactory vertexTagFactory, EdgeKeepingFactory edgeKeepingFactory, VertexFactory vertexFactory, int? vertexCapacity = null)
{
_graphOptions = graphOptions;
_vertexTagFactory = vertexTagFactory;
_edgeKeepingFactory = edgeKeepingFactory;
_vertexFactory = vertexFactory;
if (vertexCapacity != null)
_vertices = new Dictionary<string, IVertex>(vertexCapacity.Value);
else
_vertices = new Dictionary<string, IVertex>();
_edges = edgeKeepingFactory.CreateGlobal();
}
public GraphOptions Options => _graphOptions;
public IVertex[] Vertices => _vertices.Values.ToArray();
public IVertexTag[] VertexTags => _vertices.Values.Select(v=>v.VertexTag).ToArray();
public Edge[] Edges => _edges.GetEdges().ToArray();
public int VertexCount => _vertices.Count;
public void AddEdge(string tailName, string headName, double length = 1)
{
AddEdge(GetVertex(tailName), GetVertex(headName), length);
}
public void AddEdge(IVertexTag tail, IVertexTag head, double length = 1)
{
AddEdge(GetVertex(tail.Name), GetVertex(head.Name), length);
}
public void AddEdge(IVertex tail, IVertex head, double length)
{
Edge result = new Edge(tail, head, length);
_edges.Add(result);
tail.AddOutcomeEdge(result);
head.AddIncomeEdge(result);
if (_graphOptions.Direction == GraphDirection.Undirected)
{
head.AddOutcomeEdge(result);
tail.AddIncomeEdge(result);
}
}
public void RemoveEdge(Edge edge)
{
_edges.Remove(edge);
edge.Tail.RemoveOutcomeEdge(edge);
edge.Head.RemoveIncomeEdge(edge);
if (_graphOptions.Direction == GraphDirection.Undirected)
{
edge.Head.RemoveOutcomeEdge(edge);
edge.Tail.RemoveIncomeEdge(edge);
}
}
public void RemoveVertex(string name)
{
RemoveVertex(GetVertex(name));
}
void RemoveVertex(IVertex vertexData)
{
foreach (var edge in vertexData.GetOutcomeEdges())
{
edge.Head.RemoveIncomeEdge(edge);
_edges.Remove(edge);
}
foreach (var edge in vertexData.GetIncomeEdges())
{
edge.Tail.RemoveOutcomeEdge(edge);
_edges.Remove(edge);
}
_vertices.Remove(vertexData.Name);
}
public IVertexTag GetVertexTag(string name)
{
return GetVertex(name).VertexTag;
}
public IVertex GetVertex(string name)
{
IVertex result;
if (!_vertices.TryGetValue(name, out result))
result = RegisterVertex(_vertexTagFactory.CreateVertex(name));
return result;
}
private IVertex RegisterVertex(IVertexTag vertexTag)
{
var result = _vertexFactory.Create(vertexTag, _edgeKeepingFactory);
_vertices.Add(vertexTag.Name, result);
return result;
}
public Graph Clone()
{
Graph result = new Graph(_graphOptions, _vertexTagFactory, _edgeKeepingFactory, _vertexFactory, VertexCount);
foreach (var edge in _edges.GetEdges())
result.AddEdge(edge.Tail.Name, edge.Head.Name);
return result;
}
internal Graph GetBlankForClone()
{
return new Graph(_graphOptions, _vertexTagFactory, _edgeKeepingFactory, _vertexFactory, VertexCount);
}
public void Merge(string vName1, string vName2)
{
Merge(GetVertexTag(vName1), GetVertexTag(vName2));
}
public void Merge(IVertexTag vertexTag1, IVertexTag vertexTag2)
{
Merge(GetVertex(vertexTag1.Name), GetVertex(vertexTag2.Name));
}
public void Merge(IVertex v1, IVertex v2)
{
var newVertex = GetVertex(v1.Name + "_" + v2.Name);
MergeInt(v1, v2, newVertex);
MergeInt(v2, v1, newVertex);
_vertices.Remove(v1.Name);
_vertices.Remove(v2.Name);
}
private void MergeInt(IVertex v1, IVertex v2, IVertex newVertex)
{
var edges = v1.GetOutcomeEdges();
foreach (Edge edge in edges)
{
if (edge.Head != v2)
{
AddEdge(newVertex, edge.Head, edge.Length);
}
RemoveEdge(edge);
}
edges = v1.GetIncomeEdges();
foreach (Edge edge in edges)
{
if (edge.Tail != v2)
{
AddEdge(edge.Tail, newVertex, edge.Length);
}
RemoveEdge(edge);
}
}
public bool HasEdge(IVertex fromVertex, IVertex toVertex)
{
return GetEdge(fromVertex, toVertex) != null;
}
public Edge GetEdge(IVertex fromVertex, IVertex toVertex)
{
var edge = fromVertex.GetOutcomeEdges().FirstOrDefault(e => e.Head == toVertex);
if (edge != null)
return edge;
if (_graphOptions.Direction == GraphDirection.Undirected)
edge = toVertex.GetOutcomeEdges().FirstOrDefault(e => e.Head == fromVertex);
return edge;
}
IEnumerable<IVertex> GetConnectedVertices(IVertex fromVertex)
{
foreach (var edge in fromVertex.GetOutcomeEdges())
yield return edge.Head;
if (_graphOptions.Direction == GraphDirection.Undirected)
foreach (var edge in fromVertex.GetIncomeEdges())
yield return edge.Tail;
}
public void Visit(IVisitAlgorithm visitAlgorithm, IGraphVisitor visitor, IEnumerable<IVertexTag> vertexTags = null)
{
IEnumerable<IVertex> verticesToObserve = _vertices.Values;
if (vertexTags != null)
verticesToObserve = vertexTags.Select(v => _vertices[v.Name]);
HashSet<IVertex> visitedVertices = new HashSet<IVertex>();
foreach (IVertex vertexOfWholeList in verticesToObserve)
{
if (!visitedVertices.Contains(vertexOfWholeList))
{
visitor.StartVisit(vertexOfWholeList);
visitAlgorithm.EnqueueVertices(new[] {vertexOfWholeList}, null);
DequeueResult? currentVertexResult;
while ((currentVertexResult = visitAlgorithm.DequeueVertex()) != null)
{
IVertex currentVertex = currentVertexResult.Value.Vertex;
if (currentVertexResult.Value.Type == VertexDequeueType.Finishing)
visitor.FinishVertex(currentVertex);
else
{
if (!visitedVertices.Contains(currentVertex))
{
visitedVertices.Add(currentVertex);
visitor.VisitVertex(currentVertex);
visitAlgorithm.EnqueueVertices(currentVertex.GetOutcomeEdges().Select(e=>e.Other(currentVertex)), currentVertex);
}
}
}
}
}
}
}
}
| {
"content_hash": "d227b3dab821cb563102e10d43612ed6",
"timestamp": "",
"source": "github",
"line_count": 251,
"max_line_length": 209,
"avg_line_length": 34.30677290836653,
"alnum_prop": 0.5594007664615027,
"repo_name": "tihilv/GraphLib",
"id": "b0e6c4f653b81233e1c52fa0c08747d485f11b15",
"size": "8613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "GraphLib/Graph.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "95703"
}
],
"symlink_target": ""
} |
#pragma once
#ifndef __H__OCULAR_EDITOR_EVENTS_SCENE_OBJECT_SELECTED__H__
#define __H__OCULAR_EDITOR_EVENTS_SCENE_OBJECT_SELECTED__H__
#include "Events/AEvent.hpp"
#include "Scene/SceneObject.hpp"
//------------------------------------------------------------------------------------------
/**
* \addtogroup Ocular
* @{
*/
namespace Ocular
{
/**
* \addtogroup Editor
* @{
*/
namespace Editor
{
/**
* \class SceneObjectSelectedEvent
*
* Event generated when a SceneObject has been selected.
* For example, when an object is single-clicked in the SceneTree widget.
*
* This event should be used to notify of a selection, not to trigger a selection.
* To set the selected object, use Ocular::Editor::setSelectedObject
*
* See also SceneObjectFocusedEvent
*
* String Descriptor: "SceneObjectSelectedEvent" <br/>
* Event Priority: Medium
*/
class SceneObjectSelectedEvent : public Core::AEvent
{
public:
/**
* \param[in] object Pointer to object that was selected. Set to NULL to indicate previous object was unselected.
*/
SceneObjectSelectedEvent(Core::SceneObject* object);
~SceneObjectSelectedEvent();
Core::SceneObject* object;
protected:
private:
};
}
/**
* @} End of Doxygen Groups
*/
}
/**
* @} End of Doxygen Groups
*/
//------------------------------------------------------------------------------------------
#endif | {
"content_hash": "2501c23c74b806d7f2b86d1a001e34c2",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 125,
"avg_line_length": 25.323076923076922,
"alnum_prop": 0.5078979343863913,
"repo_name": "ssell/OcularEngine",
"id": "f9e5574aa6b3159e5bb31b625124569dd3ae7563",
"size": "2274",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "OcularEditor/include/Events/SceneObjectSelectedEvent.hpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1913"
},
{
"name": "C++",
"bytes": "3134358"
},
{
"name": "GLSL",
"bytes": "8"
},
{
"name": "HLSL",
"bytes": "28813"
}
],
"symlink_target": ""
} |
package ru.job4j.strategy;
/**
* This class generate string as figure triangle.
*
* @author Kucykh Vasily (mailto:basil135@mail.ru)
* @version $Id$
* @since 14.04.2017
*/
public class Triangle implements IShape {
/**
* method generate string as figure triangle.
*
* @return generated string
*/
@Override
public String pic() {
StringBuilder sb = new StringBuilder();
sb.append(" * \n");
sb.append(" *** \n");
sb.append(" ***** \n");
sb.append(" ******* \n");
sb.append(" ********* \n");
sb.append("***********\n");
return sb.toString();
}
}
| {
"content_hash": "2684fe9ffce4cf6ef1a99a5296f2b397",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 50,
"avg_line_length": 20.393939393939394,
"alnum_prop": 0.5007429420505201,
"repo_name": "Basil135/vkucyh",
"id": "10ab3323e51190a566ac621e52487a6aa29c60c6",
"size": "673",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chapter_002/src/main/java/ru/job4j/strategy/Triangle.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "319527"
}
],
"symlink_target": ""
} |
extern int omp_default_mem_alloc;
namespace X {
int x;
};
struct B {
static int ib; // expected-note {{'B::ib' declared here}}
static int bfoo() { return 8; }
};
int bfoo() { return 4; }
int z;
const int C1 = 1;
const int C2 = 2;
void test_linear_colons()
{
int B = 0;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(B:bfoo())
for (int i = 0; i < 10; ++i) ;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(B::ib:B:bfoo()) // expected-error {{unexpected ':' in nested name specifier; did you mean '::'}}
for (int i = 0; i < 10; ++i) ;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(B:ib) // expected-error {{use of undeclared identifier 'ib'; did you mean 'B::ib'}}
for (int i = 0; i < 10; ++i) ;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(z:B:ib) // expected-error {{unexpected ':' in nested name specifier; did you mean '::'?}}
for (int i = 0; i < 10; ++i) ;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(B:B::bfoo())
for (int i = 0; i < 10; ++i) ;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(X::x : ::z)
for (int i = 0; i < 10; ++i) ;
// expected-error@+2 3 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(B,::z, X::x)
for (int i = 0; i < 10; ++i) ;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(::z)
for (int i = 0; i < 10; ++i) ;
#pragma omp target
#pragma omp teams distribute parallel for simd linear(B::bfoo()) // expected-error {{expected variable name}}
for (int i = 0; i < 10; ++i) ;
// expected-error@+2 2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(B::ib,B:C1+C2)
for (int i = 0; i < 10; ++i) ;
}
template<int L, class T, class N> T test_template(T* arr, N num) {
N i;
T sum = (T)0;
T ind2 = - num * L; // expected-note {{'ind2' defined here}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(ind2:L) // expected-error {{argument of a linear clause should be of integral or pointer type}}
for (i = 0; i < num; ++i) {
T cur = arr[(int)ind2];
ind2 += L;
sum += cur;
}
return T();
}
template<int LEN> int test_warn() {
int ind2 = 0;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(ind2:LEN) // expected-warning {{zero linear step (ind2 should probably be const)}}
for (int i = 0; i < 100; i++) {
ind2 += LEN;
}
return ind2;
}
struct S1; // expected-note 2 {{declared here}} expected-note 2 {{forward declaration of 'S1'}}
extern S1 a;
class S2 {
mutable int a;
public:
S2():a(0) { }
};
const S2 b; // expected-note 2 {{'b' defined here}}
const S2 ba[5];
class S3 {
int a;
public:
S3():a(0) { }
};
const S3 ca[5];
class S4 {
int a;
S4();
public:
S4(int v):a(v) { }
};
class S5 {
int a;
S5():a(0) {}
public:
S5(int v):a(v) { }
};
S3 h;
#pragma omp threadprivate(h) // expected-note 2 {{defined as threadprivate or thread local}}
template<class I, class C> int foomain(I argc, C **argv) {
I e(4);
I g(5);
int i;
int &j = i;
#pragma omp target
#pragma omp teams distribute parallel for simd linear // expected-error {{expected '(' after 'linear'}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear ( // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear () // expected-error {{expected expression}}
for (int k = 0; k < argc; ++k) ++k;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear (argc // expected-error {{expected ')'}} expected-note {{to match this '('}}
for (int k = 0; k < argc; ++k) ++k;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear (argc, // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear (argc > 0 ? argv[1] : argv[2]) // expected-error {{expected variable name}}
for (int k = 0; k < argc; ++k) ++k;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear (argc : 5) allocate , allocate(, allocate(omp_default , allocate(omp_default_mem_alloc, allocate(omp_default_mem_alloc:, allocate(omp_default_mem_alloc: argc, allocate(omp_default_mem_alloc: argv), allocate(argv) // expected-error {{expected '(' after 'allocate'}} expected-error 2 {{expected expression}} expected-error 2 {{expected ')'}} expected-error {{use of undeclared identifier 'omp_default'}} expected-note 2 {{to match this '('}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear (S1) // expected-error {{'S1' does not refer to a value}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear (a, b:B::ib) // expected-error {{linear variable with incomplete type 'S1'}} expected-error {{argument of a linear clause should be of integral or pointer type, not 'S2'}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear (argv[1]) // expected-error {{expected variable name}}
for (int k = 0; k < argc; ++k) ++k;
// expected-error@+2 2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(e, g)
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear(h) // expected-error {{threadprivate or thread local variable cannot be linear}}
for (int k = 0; k < argc; ++k) ++k;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear(i)
for (int k = 0; k < argc; ++k) ++k;
return 0;
}
namespace A {
double x;
#pragma omp threadprivate(x) // expected-note {{defined as threadprivate or thread local}}
}
namespace C {
using A::x;
}
int main(int argc, char **argv) {
double darr[100];
// expected-note@+1 {{in instantiation of function template specialization 'test_template<-4, double, int>' requested here}}
test_template<-4>(darr, 4);
// expected-note@+1 {{in instantiation of function template specialization 'test_warn<0>' requested here}}
test_warn<0>();
S4 e(4); // expected-note {{'e' defined here}}
S5 g(5); // expected-note {{'g' defined here}}
int i;
int &j = i;
#pragma omp target
#pragma omp teams distribute parallel for simd linear // expected-error {{expected '(' after 'linear'}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear ( // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear () // expected-error {{expected expression}}
for (int k = 0; k < argc; ++k) ++k;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear (argc // expected-error {{expected ')'}} expected-note {{to match this '('}}
for (int k = 0; k < argc; ++k) ++k;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear (argc, // expected-error {{expected expression}} expected-error {{expected ')'}} expected-note {{to match this '('}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear (argc > 0 ? argv[1] : argv[2]) // expected-error {{expected variable name}}
for (int k = 0; k < argc; ++k) ++k;
// expected-error@+2 {{only loop iteration variables are allowed in 'linear' clause in distribute directives}}
#pragma omp target
#pragma omp teams distribute parallel for simd linear (argc)
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear (S1) // expected-error {{'S1' does not refer to a value}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear (a, b) // expected-error {{linear variable with incomplete type 'S1'}} expected-error {{argument of a linear clause should be of integral or pointer type, not 'S2'}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear (argv[1]) // expected-error {{expected variable name}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear(e, g) // expected-error {{argument of a linear clause should be of integral or pointer type, not 'S4'}} expected-error {{argument of a linear clause should be of integral or pointer type, not 'S5'}}
for (int k = 0; k < argc; ++k) ++k;
#pragma omp target
#pragma omp teams distribute parallel for simd linear(h, C::x) // expected-error 2 {{threadprivate or thread local variable cannot be linear}}
for (int k = 0; k < argc; ++k) ++k;
foomain<int,char>(argc,argv); // expected-note {{in instantiation of function template specialization 'foomain<int, char>' requested here}}
return 0;
}
| {
"content_hash": "75218c3dbeaed70a0bb9cb56a891752c",
"timestamp": "",
"source": "github",
"line_count": 268,
"max_line_length": 493,
"avg_line_length": 41.115671641791046,
"alnum_prop": 0.6827298302931301,
"repo_name": "apple/swift-clang",
"id": "798aa565596717c74e30b1804278572c1cdde062",
"size": "11104",
"binary": false,
"copies": "1",
"ref": "refs/heads/stable",
"path": "test/OpenMP/teams_distribute_parallel_for_simd_linear_messages.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AppleScript",
"bytes": "1429"
},
{
"name": "Assembly",
"bytes": "48411"
},
{
"name": "Batchfile",
"bytes": "73"
},
{
"name": "C",
"bytes": "21215199"
},
{
"name": "C#",
"bytes": "27472"
},
{
"name": "C++",
"bytes": "72404163"
},
{
"name": "CMake",
"bytes": "175348"
},
{
"name": "CSS",
"bytes": "8395"
},
{
"name": "Cool",
"bytes": "22451"
},
{
"name": "Cuda",
"bytes": "413843"
},
{
"name": "Dockerfile",
"bytes": "2083"
},
{
"name": "Emacs Lisp",
"bytes": "17001"
},
{
"name": "Forth",
"bytes": "925"
},
{
"name": "Fortran",
"bytes": "8180"
},
{
"name": "HTML",
"bytes": "986812"
},
{
"name": "JavaScript",
"bytes": "42269"
},
{
"name": "LLVM",
"bytes": "27231"
},
{
"name": "M",
"bytes": "4660"
},
{
"name": "MATLAB",
"bytes": "69036"
},
{
"name": "Makefile",
"bytes": "8489"
},
{
"name": "Mathematica",
"bytes": "15066"
},
{
"name": "Mercury",
"bytes": "1193"
},
{
"name": "Objective-C",
"bytes": "3626772"
},
{
"name": "Objective-C++",
"bytes": "1033408"
},
{
"name": "Perl",
"bytes": "96256"
},
{
"name": "Python",
"bytes": "784542"
},
{
"name": "RenderScript",
"bytes": "741"
},
{
"name": "Roff",
"bytes": "10932"
},
{
"name": "Rust",
"bytes": "200"
},
{
"name": "Shell",
"bytes": "10663"
}
],
"symlink_target": ""
} |
module Azure::ServiceFabric::V6_5_0_36
module Models
#
# Load Information about a Service Fabric application.
#
class ApplicationLoadInfo
include MsRestAzure
# @return [String] The identity of the application. This is an encoded
# representation of the application name. This is used in the REST APIs
# to identify the application resource.
# Starting in version 6.0, hierarchical names are delimited with the "\~"
# character. For example, if the application name is
# "fabric:/myapp/app1",
# the application identity would be "myapp\~app1" in 6.0+ and
# "myapp/app1" in previous versions.
attr_accessor :id
# @return [Integer] The minimum number of nodes for this application.
# It is the number of nodes where Service Fabric will reserve Capacity in
# the cluster which equals to ReservedLoad * MinimumNodes for this
# Application instance.
# For applications that do not have application capacity defined this
# value will be zero.
attr_accessor :minimum_nodes
# @return [Integer] The maximum number of nodes where this application
# can be instantiated.
# It is the number of nodes this application is allowed to span.
# For applications that do not have application capacity defined this
# value will be zero.
attr_accessor :maximum_nodes
# @return [Integer] The number of nodes on which this application is
# instantiated.
# For applications that do not have application capacity defined this
# value will be zero.
attr_accessor :node_count
# @return [Array<ApplicationMetricDescription>] List of application
# capacity metric description.
attr_accessor :application_load_metric_information
#
# Mapper for ApplicationLoadInfo class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ApplicationLoadInfo',
type: {
name: 'Composite',
class_name: 'ApplicationLoadInfo',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'Id',
type: {
name: 'String'
}
},
minimum_nodes: {
client_side_validation: true,
required: false,
serialized_name: 'MinimumNodes',
type: {
name: 'Number'
}
},
maximum_nodes: {
client_side_validation: true,
required: false,
serialized_name: 'MaximumNodes',
type: {
name: 'Number'
}
},
node_count: {
client_side_validation: true,
required: false,
serialized_name: 'NodeCount',
type: {
name: 'Number'
}
},
application_load_metric_information: {
client_side_validation: true,
required: false,
serialized_name: 'ApplicationLoadMetricInformation',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ApplicationMetricDescriptionElementType',
type: {
name: 'Composite',
class_name: 'ApplicationMetricDescription'
}
}
}
}
}
}
}
end
end
end
end
| {
"content_hash": "947ed682bcd7076323a50861d9ee9859",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 81,
"avg_line_length": 34.62280701754386,
"alnum_prop": 0.5348365847479098,
"repo_name": "Azure/azure-sdk-for-ruby",
"id": "0bd9569209767e1e7b417ba831e487b088123d5a",
"size": "4111",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/azure_service_fabric/lib/6.5.0.36/generated/azure_service_fabric/models/application_load_info.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "345216400"
},
{
"name": "Shell",
"bytes": "305"
}
],
"symlink_target": ""
} |
#include <linux/kernel.h>
#include <linux/fs.h>
#include <linux/jffs2.h>
#include <linux/xattr.h>
#include <linux/mtd/mtd.h>
#include "nodelist.h"
static int jffs2_trusted_getxattr(const struct xattr_handler *handler,
struct dentry *dentry, const char *name,
void *buffer, size_t size)
{
return do_jffs2_getxattr(d_inode(dentry), JFFS2_XPREFIX_TRUSTED,
name, buffer, size);
}
static int jffs2_trusted_setxattr(const struct xattr_handler *handler,
struct dentry *dentry, const char *name,
const void *buffer, size_t size, int flags)
{
return do_jffs2_setxattr(d_inode(dentry), JFFS2_XPREFIX_TRUSTED,
name, buffer, size, flags);
}
static bool jffs2_trusted_listxattr(struct dentry *dentry)
{
return capable(CAP_SYS_ADMIN);
}
const struct xattr_handler jffs2_trusted_xattr_handler = {
.prefix = XATTR_TRUSTED_PREFIX,
.list = jffs2_trusted_listxattr,
.set = jffs2_trusted_setxattr,
.get = jffs2_trusted_getxattr
};
| {
"content_hash": "2da290f48f68b52b35fd125136d3cb1e",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 70,
"avg_line_length": 26.61111111111111,
"alnum_prop": 0.7181628392484343,
"repo_name": "mikedlowis-prototypes/albase",
"id": "b2555ef07a12b1f969f63cac461e0f06f7517533",
"size": "1190",
"binary": false,
"copies": "87",
"ref": "refs/heads/master",
"path": "source/kernel/fs/jffs2/xattr_trusted.c",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "10263145"
},
{
"name": "Awk",
"bytes": "55187"
},
{
"name": "Batchfile",
"bytes": "31438"
},
{
"name": "C",
"bytes": "551654518"
},
{
"name": "C++",
"bytes": "11818066"
},
{
"name": "CMake",
"bytes": "122998"
},
{
"name": "Clojure",
"bytes": "945"
},
{
"name": "DIGITAL Command Language",
"bytes": "232099"
},
{
"name": "GDB",
"bytes": "18113"
},
{
"name": "Gherkin",
"bytes": "5110"
},
{
"name": "HTML",
"bytes": "18291"
},
{
"name": "Lex",
"bytes": "58937"
},
{
"name": "M4",
"bytes": "561745"
},
{
"name": "Makefile",
"bytes": "7082768"
},
{
"name": "Objective-C",
"bytes": "634652"
},
{
"name": "POV-Ray SDL",
"bytes": "546"
},
{
"name": "Perl",
"bytes": "1229221"
},
{
"name": "Perl6",
"bytes": "11648"
},
{
"name": "Python",
"bytes": "316536"
},
{
"name": "Roff",
"bytes": "4201130"
},
{
"name": "Shell",
"bytes": "2436879"
},
{
"name": "SourcePawn",
"bytes": "2711"
},
{
"name": "TeX",
"bytes": "182745"
},
{
"name": "UnrealScript",
"bytes": "12824"
},
{
"name": "Visual Basic",
"bytes": "11568"
},
{
"name": "XS",
"bytes": "1239"
},
{
"name": "Yacc",
"bytes": "146537"
}
],
"symlink_target": ""
} |
package com.mathutil.sigmaTests;
import static org.junit.Assert.*;
import org.junit.Test;
import com.mathutil.operations.Sigma;
/**
* Test case 11, test -x
* @author danielxu
*
*/
public class TestSigma11 {
@Test
public void test() {
double expect = -1+Math.pow(2, -1) + -2+Math.pow(2, -2) + -3+Math.pow(2, -3);
String exp = "-x+2^(-x)";
double test = Sigma.sum(1, 3, exp);
assertEquals(expect , test , 0);
}
}
| {
"content_hash": "7a10d354186749f56ebd986e9d4ba373",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 79,
"avg_line_length": 17.4,
"alnum_prop": 0.6275862068965518,
"repo_name": "NielXu/MathUtil",
"id": "9de2c733ed5a8006ce8bf4793bfab3d1a093847e",
"size": "435",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java/com/mathutil/sigmaTests/TestSigma11.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "151449"
}
],
"symlink_target": ""
} |
@interface EventGetTalks : APICaller {
EventDetailModel *event;
}
@property (nonatomic, strong) EventDetailModel *event;
- (void)call:(EventDetailModel *)event;
- (void)gotData:(NSObject *)obj;
- (void)gotError:(NSObject *)error;
@end
@interface APICaller (APICaller_EventGetTalks)
+ (EventGetTalks *)EventGetTalks:(id)_delegate;
@end
@protocol EventGetTalksResponse
- (void)gotTalksForEvent:(TalkListModel *)tlm error:(APIError *)err;
@end
| {
"content_hash": "5421e4013342e809ea452860dbac636f",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 68,
"avg_line_length": 23.526315789473685,
"alnum_prop": 0.756152125279642,
"repo_name": "joindin/joind.in-iphone",
"id": "e0db2056f248553b9b02f82192ec57a22f926f6e",
"size": "2057",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "joindinapp/Classes/EventGetTalks.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Objective-C",
"bytes": "354395"
}
],
"symlink_target": ""
} |
import {Component, OnInit, TemplateRef, ViewEncapsulation} from '@angular/core';
import {ToastService} from "@app/services/toast.service";
import {AppNotification} from "@app/model/app-notification";
@Component({
selector: 'app-toasts',
template: `
<ngb-toast
*ngFor="let toast of toastService.toasts"
[ngClass]="toast.classname"
[autohide]="autohide"
[delay]="toast.delay || 5000"
(hidden)="toastService.remove(toast); autohide=true;"
(mouseenter)="autohide = false"
(mouseleave)="autohide = true"
>
<ng-template ngbToastHeader >
<i class="fas fa-exclamation-triangle" *ngIf="toast.type=='error'" ></i>
<i class="fas fa-thumbs-up" *ngIf="toast.type=='success'" ></i>
<i class="fas fa-info" *ngIf="toast.type!='success'&&toast.type!='error'" ></i>
</ng-template>
<ng-template [ngIf]="isTemplate(toast)" [ngIfElse]="text">
<ng-template [ngTemplateOutlet]="toast.body" [ngTemplateOutletContext]="toast.contextData" ></ng-template>
</ng-template>
<ng-template #text>{{ toast.body }}</ng-template>
</ngb-toast>
`,
styleUrls:['./toast.component.scss'],
// Needed for styling the components
encapsulation: ViewEncapsulation.None
})
export class ToastComponent implements OnInit {
autohide:boolean=true;
constructor(public toastService:ToastService) { }
ngOnInit(): void {
}
isTemplate(toast:AppNotification) {
return toast.body instanceof TemplateRef; }
}
| {
"content_hash": "d6f3a57fc2173da8370941374237e7ec",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 114,
"avg_line_length": 32.255319148936174,
"alnum_prop": 0.6543535620052771,
"repo_name": "sadlil/archiva",
"id": "def16b0d2a8b828cdd7dad9192660a2c7605aed6",
"size": "2318",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "archiva-modules/archiva-web/archiva-webapp/src/main/archiva-web/src/app/modules/shared/toast/toast.component.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AGS Script",
"bytes": "27"
},
{
"name": "Batchfile",
"bytes": "948"
},
{
"name": "CSS",
"bytes": "193266"
},
{
"name": "Dockerfile",
"bytes": "3302"
},
{
"name": "Groovy",
"bytes": "8937"
},
{
"name": "HTML",
"bytes": "315360"
},
{
"name": "Java",
"bytes": "7001198"
},
{
"name": "JavaScript",
"bytes": "830548"
},
{
"name": "Mustache",
"bytes": "12049"
},
{
"name": "PowerShell",
"bytes": "7011"
},
{
"name": "SCSS",
"bytes": "26549"
},
{
"name": "Shell",
"bytes": "31357"
},
{
"name": "TypeScript",
"bytes": "334037"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html>
<!-- This file documents the GNU Assembler "as".
Copyright (C) 1991-2013 Free Software Foundation, Inc.
Permission is granted to copy, distribute and/or modify this document
under the terms of the GNU Free Documentation License, Version 1.3
or any later version published by the Free Software Foundation;
with no Invariant Sections, with no Front-Cover Texts, and with no
Back-Cover Texts. A copy of the license is included in the
section entitled "GNU Free Documentation License".
-->
<!-- Created by GNU Texinfo 5.2, http://www.gnu.org/software/texinfo/ -->
<head>
<title>Using as: HPPA Opcodes</title>
<meta name="description" content="Using as: HPPA Opcodes">
<meta name="keywords" content="Using as: HPPA Opcodes">
<meta name="resource-type" content="document">
<meta name="distribution" content="global">
<meta name="Generator" content="makeinfo">
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<link href="index.html#Top" rel="start" title="Top">
<link href="AS-Index.html#AS-Index" rel="index" title="AS Index">
<link href="index.html#SEC_Contents" rel="contents" title="Table of Contents">
<link href="HPPA_002dDependent.html#HPPA_002dDependent" rel="up" title="HPPA-Dependent">
<link href="ESA_002f390_002dDependent.html#ESA_002f390_002dDependent" rel="next" title="ESA/390-Dependent">
<link href="HPPA-Directives.html#HPPA-Directives" rel="prev" title="HPPA Directives">
<style type="text/css">
<!--
a.summary-letter {text-decoration: none}
blockquote.smallquotation {font-size: smaller}
div.display {margin-left: 3.2em}
div.example {margin-left: 3.2em}
div.indentedblock {margin-left: 3.2em}
div.lisp {margin-left: 3.2em}
div.smalldisplay {margin-left: 3.2em}
div.smallexample {margin-left: 3.2em}
div.smallindentedblock {margin-left: 3.2em; font-size: smaller}
div.smalllisp {margin-left: 3.2em}
kbd {font-style:oblique}
pre.display {font-family: inherit}
pre.format {font-family: inherit}
pre.menu-comment {font-family: serif}
pre.menu-preformatted {font-family: serif}
pre.smalldisplay {font-family: inherit; font-size: smaller}
pre.smallexample {font-size: smaller}
pre.smallformat {font-family: inherit; font-size: smaller}
pre.smalllisp {font-size: smaller}
span.nocodebreak {white-space:nowrap}
span.nolinebreak {white-space:nowrap}
span.roman {font-family:serif; font-weight:normal}
span.sansserif {font-family:sans-serif; font-weight:normal}
ul.no-bullet {list-style: none}
-->
</style>
</head>
<body lang="en" bgcolor="#FFFFFF" text="#000000" link="#0000FF" vlink="#800080" alink="#FF0000">
<a name="HPPA-Opcodes"></a>
<div class="header">
<p>
Previous: <a href="HPPA-Directives.html#HPPA-Directives" accesskey="p" rel="prev">HPPA Directives</a>, Up: <a href="HPPA_002dDependent.html#HPPA_002dDependent" accesskey="u" rel="up">HPPA-Dependent</a> [<a href="index.html#SEC_Contents" title="Table of contents" rel="contents">Contents</a>][<a href="AS-Index.html#AS-Index" title="Index" rel="index">Index</a>]</p>
</div>
<hr>
<a name="Opcodes-8"></a>
<h4 class="subsection">9.13.6 Opcodes</h4>
<p>For detailed information on the HPPA machine instruction set, see
<cite>PA-RISC Architecture and Instruction Set Reference Manual</cite>
(HP 09740-90039).
</p>
</body>
</html>
| {
"content_hash": "7f3338b4fed35ec9d820447810c0080f",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 372,
"avg_line_length": 42.278481012658226,
"alnum_prop": 0.7398203592814371,
"repo_name": "AlbandeCrevoisier/ldd-athens",
"id": "80cfdb317e06386a468c2a3fa158ce1ddf9526d4",
"size": "3340",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "gcc-linaro-4.9-2015.02-3-x86_64_arm-linux-gnueabihf/share/doc/gas/as.html/HPPA-Opcodes.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "10184236"
},
{
"name": "Awk",
"bytes": "40418"
},
{
"name": "Batchfile",
"bytes": "81753"
},
{
"name": "C",
"bytes": "566858455"
},
{
"name": "C++",
"bytes": "21399133"
},
{
"name": "Clojure",
"bytes": "971"
},
{
"name": "Cucumber",
"bytes": "5998"
},
{
"name": "FORTRAN",
"bytes": "11832"
},
{
"name": "GDB",
"bytes": "18113"
},
{
"name": "Groff",
"bytes": "2686457"
},
{
"name": "HTML",
"bytes": "34688334"
},
{
"name": "Lex",
"bytes": "56961"
},
{
"name": "Logos",
"bytes": "133810"
},
{
"name": "M4",
"bytes": "3325"
},
{
"name": "Makefile",
"bytes": "1685015"
},
{
"name": "Objective-C",
"bytes": "920162"
},
{
"name": "Perl",
"bytes": "752477"
},
{
"name": "Perl6",
"bytes": "3783"
},
{
"name": "Python",
"bytes": "533352"
},
{
"name": "Shell",
"bytes": "468244"
},
{
"name": "SourcePawn",
"bytes": "2711"
},
{
"name": "UnrealScript",
"bytes": "12824"
},
{
"name": "XC",
"bytes": "33970"
},
{
"name": "XS",
"bytes": "34909"
},
{
"name": "Yacc",
"bytes": "113516"
}
],
"symlink_target": ""
} |
using namespace std;
int main() {
Solution s;
}
| {
"content_hash": "eafa37aea4a20df9606b76acb7d2c9cf",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 20,
"avg_line_length": 10.6,
"alnum_prop": 0.6226415094339622,
"repo_name": "leetcode-practice/leetcode",
"id": "dbffa88294f0d5471afa9183b6e92fdfd67b6344",
"size": "96",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cat/20160613/0289. Game Of Life/main.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "267707"
}
],
"symlink_target": ""
} |
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE191_Integer_Underflow__int_listen_socket_sub_83a.cpp
Label Definition File: CWE191_Integer_Underflow__int.label.xml
Template File: sources-sinks-83a.tmpl.cpp
*/
/*
* @description
* CWE: 191 Integer Underflow
* BadSource: listen_socket Read data using a listen socket (server side)
* GoodSource: Set data to a small, non-zero number (negative two)
* Sinks: sub
* GoodSink: Ensure there will not be an underflow before subtracting 1 from data
* BadSink : Subtract 1 from data, which can cause an Underflow
* Flow Variant: 83 Data flow: data passed to class constructor and destructor by declaring the class object on the stack
*
* */
#include "std_testcase.h"
#include "CWE191_Integer_Underflow__int_listen_socket_sub_83.h"
namespace CWE191_Integer_Underflow__int_listen_socket_sub_83
{
#ifndef OMITBAD
void bad()
{
int data;
/* Initialize data */
data = 0;
CWE191_Integer_Underflow__int_listen_socket_sub_83_bad badObject(data);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
static void goodG2B()
{
int data;
/* Initialize data */
data = 0;
CWE191_Integer_Underflow__int_listen_socket_sub_83_goodG2B goodG2BObject(data);
}
/* goodG2B uses the BadSource with the GoodSink */
static void goodB2G()
{
int data;
/* Initialize data */
data = 0;
CWE191_Integer_Underflow__int_listen_socket_sub_83_goodB2G goodB2GObject(data);
}
void good()
{
goodG2B();
goodB2G();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
using namespace CWE191_Integer_Underflow__int_listen_socket_sub_83; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
| {
"content_hash": "622c04c2f5c1f22f75e566c6cac43619",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 121,
"avg_line_length": 26.706521739130434,
"alnum_prop": 0.6703296703296703,
"repo_name": "JianpingZeng/xcc",
"id": "4fae8a7157accba56337c6a9046296566bd62b98",
"size": "2457",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "xcc/test/juliet/testcases/CWE191_Integer_Underflow/s02/CWE191_Integer_Underflow__int_listen_socket_sub_83a.cpp",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
// Copyright 2020 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Component for position of terms editor.
*/
// Every editor directive should implement an alwaysEditable option. There
// may be additional customization options for the editor that should be passed
// in via initArgs.
import { Component, EventEmitter, Input, OnInit, Output } from '@angular/core';
import { downgradeComponent } from '@angular/upgrade/static';
import { AppConstants } from 'app.constants';
export interface PositionOfTerm {
readonly name: 'string';
readonly humanReadableName: 'string';
}
@Component({
selector: 'position-of-terms-editor',
templateUrl: './position-of-terms-editor.component.html',
styleUrls: []
})
export class PositionOfTermsEditorComponent implements OnInit {
// These properties are initialized using Angular lifecycle hooks
// and we need to do non-null assertion, for more information see
// https://github.com/oppia/oppia/wiki/Guide-on-defining-types#ts-7-1
@Input() modalId!: symbol;
@Input() value!: string;
localValue!: PositionOfTerm;
@Output() valueChanged = new EventEmitter();
alwaysEditable = true;
positionOfTerms = AppConstants.POSITION_OF_TERMS_MAPPING;
constructor() { }
ngOnInit(): void {
this.localValue = this.positionOfTerms[2] as unknown as PositionOfTerm;
for (let i = 0; i < this.positionOfTerms.length; i++) {
if (this.positionOfTerms[i].name === this.value) {
this.localValue = this.positionOfTerms[i] as unknown as PositionOfTerm;
}
}
if (this.value === null || this.value === undefined) {
this.value = this.localValue.name;
this.valueChanged.emit(this.value);
}
}
onChangePosition(name: string): void {
this.value = name;
this.valueChanged.emit(this.value);
for (let i = 0; i < this.positionOfTerms.length; i++) {
if (this.positionOfTerms[i].name === this.value) {
this.localValue = this.positionOfTerms[i] as unknown as PositionOfTerm;
break;
}
}
}
}
angular.module('oppia').directive('positionOfTermsEditor', downgradeComponent({
component: PositionOfTermsEditorComponent
}));
| {
"content_hash": "814faa1a9443ac6da8cd36f05b045013",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 79,
"avg_line_length": 36.306666666666665,
"alnum_prop": 0.7124495042232831,
"repo_name": "brianrodri/oppia",
"id": "cb0b7752e81ffc9f247fb533b0297b42b0b91057",
"size": "2723",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "extensions/objects/templates/position-of-terms-editor.component.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "487903"
},
{
"name": "HTML",
"bytes": "1748056"
},
{
"name": "JavaScript",
"bytes": "1176446"
},
{
"name": "PEG.js",
"bytes": "71377"
},
{
"name": "Python",
"bytes": "14169091"
},
{
"name": "Shell",
"bytes": "2239"
},
{
"name": "TypeScript",
"bytes": "13316709"
}
],
"symlink_target": ""
} |
// Copyright (c) 2007-2017 ppy Pty Ltd <contact@ppy.sh>.
// Licensed under the MIT Licence - https://raw.githubusercontent.com/ppy/osu/master/LICENCE
using osu.Framework.Allocation;
using osu.Framework.Graphics;
using osu.Game.Beatmaps;
using osu.Game.Rulesets.Objects.Drawables;
using osu.Game.Rulesets.Objects.Types;
using osu.Game.Rulesets.Replays;
using osu.Game.Rulesets.Scoring;
using osu.Game.Rulesets.Taiko.Beatmaps;
using osu.Game.Rulesets.Taiko.Judgements;
using osu.Game.Rulesets.Taiko.Objects;
using osu.Game.Rulesets.Taiko.Objects.Drawables;
using osu.Game.Rulesets.Taiko.Scoring;
using osu.Game.Rulesets.UI;
using osu.Game.Rulesets.Taiko.Replays;
using OpenTK;
using osu.Game.Rulesets.Beatmaps;
namespace osu.Game.Rulesets.Taiko.UI
{
public class TaikoHitRenderer : HitRenderer<TaikoHitObject, TaikoJudgement>
{
public TaikoHitRenderer(WorkingBeatmap beatmap)
: base(beatmap)
{
}
[BackgroundDependencyLoader]
private void load()
{
loadBarLines();
}
private void loadBarLines()
{
var taikoPlayfield = Playfield as TaikoPlayfield;
if (taikoPlayfield == null)
return;
TaikoHitObject lastObject = Beatmap.HitObjects[Beatmap.HitObjects.Count - 1];
double lastHitTime = 1 + (lastObject as IHasEndTime)?.EndTime ?? lastObject.StartTime;
var timingPoints = Beatmap.TimingInfo.ControlPoints.FindAll(cp => cp.TimingChange);
if (timingPoints.Count == 0)
return;
int currentIndex = 0;
int currentBeat = 0;
double time = timingPoints[currentIndex].Time;
while (time <= lastHitTime)
{
int nextIndex = currentIndex + 1;
if (nextIndex < timingPoints.Count && time > timingPoints[nextIndex].Time)
{
currentIndex = nextIndex;
time = timingPoints[currentIndex].Time;
currentBeat = 0;
}
var currentPoint = timingPoints[currentIndex];
var barLine = new BarLine
{
StartTime = time,
};
barLine.ApplyDefaults(Beatmap.TimingInfo, Beatmap.BeatmapInfo.Difficulty);
bool isMajor = currentBeat % (int)currentPoint.TimeSignature == 0;
taikoPlayfield.AddBarLine(isMajor ? new DrawableBarLineMajor(barLine) : new DrawableBarLine(barLine));
double bl = currentPoint.BeatLength;
if (bl < 800)
bl *= (int)currentPoint.TimeSignature;
time += bl;
currentBeat++;
}
}
protected override Vector2 GetPlayfieldAspectAdjust()
{
const float default_relative_height = TaikoPlayfield.DEFAULT_PLAYFIELD_HEIGHT / 768;
const float default_aspect = 16f / 9f;
float aspectAdjust = MathHelper.Clamp(DrawWidth / DrawHeight, 0.4f, 4) / default_aspect;
return new Vector2(1, default_relative_height * aspectAdjust);
}
public override ScoreProcessor CreateScoreProcessor() => new TaikoScoreProcessor(this);
protected override BeatmapConverter<TaikoHitObject> CreateBeatmapConverter() => new TaikoBeatmapConverter();
protected override Playfield<TaikoHitObject, TaikoJudgement> CreatePlayfield() => new TaikoPlayfield
{
Anchor = Anchor.CentreLeft,
Origin = Anchor.CentreLeft
};
protected override DrawableHitObject<TaikoHitObject, TaikoJudgement> GetVisualRepresentation(TaikoHitObject h)
{
var centreHit = h as CentreHit;
if (centreHit != null)
{
if (h.IsStrong)
return new DrawableCentreHitStrong(centreHit);
return new DrawableCentreHit(centreHit);
}
var rimHit = h as RimHit;
if (rimHit != null)
{
if (h.IsStrong)
return new DrawableRimHitStrong(rimHit);
return new DrawableRimHit(rimHit);
}
var drumRoll = h as DrumRoll;
if (drumRoll != null)
{
return new DrawableDrumRoll(drumRoll);
}
var swell = h as Swell;
if (swell != null)
return new DrawableSwell(swell);
return null;
}
protected override FramedReplayInputHandler CreateReplayInputHandler(Replay replay) => new TaikoFramedReplayInputHandler(replay);
}
}
| {
"content_hash": "858fe288aff996200378b57d78670825",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 137,
"avg_line_length": 34.94244604316547,
"alnum_prop": 0.5818406423718344,
"repo_name": "RedNesto/osu",
"id": "db15193ce56f7a026df0db28930d5fcb532e5c7b",
"size": "4859",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "osu.Game.Rulesets.Taiko/UI/TaikoHitRenderer.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "1502128"
}
],
"symlink_target": ""
} |
/**
* A script for GCC-dehydra to analyze the Mozilla codebase and catch
* patterns that are incorrect, but which cannot be detected by a compiler. */
/**
* Activate Treehydra outparams analysis if running in Treehydra.
*/
function treehydra_enabled() {
return this.hasOwnProperty('TREE_CODE');
}
include('unstable/getopt.js');
[options, args] = getopt();
sys.include_path.push(options.topsrcdir);
include('string-format.js');
let modules = [];
function LoadModules(modulelist)
{
if (modulelist == "")
return;
let modulenames = modulelist.split(',');
for each (let modulename in modulenames) {
let module = { __proto__: this };
include(modulename, module);
modules.push(module);
}
}
LoadModules(options['dehydra-modules']);
if (treehydra_enabled())
LoadModules(options['treehydra-modules']);
function process_type(c)
{
for each (let module in modules)
if (module.hasOwnProperty('process_type'))
module.process_type(c);
}
function hasAttribute(c, attrname)
{
var attr;
if (c.attributes === undefined)
return false;
for each (attr in c.attributes)
if (attr.name == 'user' && attr.value[0] == attrname)
return true;
return false;
}
// This is useful for detecting method overrides
function signaturesMatch(m1, m2)
{
if (m1.shortName != m2.shortName)
return false;
if ((!!m1.isVirtual) != (!!m2.isVirtual))
return false;
if (m1.isStatic != m2.isStatic)
return false;
let p1 = m1.type.parameters;
let p2 = m2.type.parameters;
if (p1.length != p2.length)
return false;
for (let i = 0; i < p1.length; ++i)
if (p1[i] !== p2[i])
return false;
return true;
}
const forward_functions = [
'process_type',
'process_tree_type',
'process_decl',
'process_tree_decl',
'process_function',
'process_tree',
'process_cp_pre_genericize',
'input_end'
];
function setup_forwarding(n)
{
this[n] = function() {
for each (let module in modules) {
if (module.hasOwnProperty(n)) {
module[n].apply(this, arguments);
}
}
}
}
for each (let n in forward_functions)
setup_forwarding(n);
| {
"content_hash": "b019edc37c8bc72283fbd62bfc26ba1c",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 78,
"avg_line_length": 19.86111111111111,
"alnum_prop": 0.6494172494172494,
"repo_name": "jubos/meguro",
"id": "7a64eeadb1ba819d4051ad40d80923f2a6af0954",
"size": "2145",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deps/spidermonkey/config/static-checking.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "4039"
},
{
"name": "C",
"bytes": "9744445"
},
{
"name": "C++",
"bytes": "6605756"
},
{
"name": "D",
"bytes": "3439"
},
{
"name": "JavaScript",
"bytes": "24155571"
},
{
"name": "Objective-C",
"bytes": "2071"
},
{
"name": "Perl",
"bytes": "291508"
},
{
"name": "Python",
"bytes": "597422"
},
{
"name": "Shell",
"bytes": "119472"
}
],
"symlink_target": ""
} |
export { default, initialize } from 'ember-es-adapter/initializers/index';
| {
"content_hash": "f96241a2f77ea73bce9a881d31ef345d",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 74,
"avg_line_length": 75,
"alnum_prop": 0.7733333333333333,
"repo_name": "seanstar12/ember-es-adapter",
"id": "8af782316c1d8a01f1cf66e69f3a153e9bdc4e38",
"size": "75",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/initializers/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1713"
},
{
"name": "JavaScript",
"bytes": "44290"
}
],
"symlink_target": ""
} |
// Thanks to Jacob Munkberg (jacob@cs.lth.se) for the shortcut of using SVD to do the equivalent of principal components analysis
// x10000000 5555.1x4 64p 2bi (30b)
#include "bits.h"
#include "tile.h"
#include "avpcl.h"
#include "nvcore/Debug.h"
#include "nvmath/Vector.inl"
#include "nvmath/Matrix.inl"
#include "nvmath/Fitting.h"
#include "utils.h"
#include "endpts.h"
#include <cstring>
#include <float.h>
#include "shapes_two.h"
using namespace nv;
using namespace AVPCL;
#define NLSBMODES 4 // number of different lsb modes per region. since we have two .1 per region, that can have 4 values
#define NINDICES 4
#define INDEXBITS 2
#define HIGH_INDEXBIT (1<<(INDEXBITS-1))
#define DENOM (NINDICES-1)
#define BIAS (DENOM/2)
// WORK: determine optimal traversal pattern to search for best shape -- what does the error curve look like?
// i.e. can we search shapes in a particular order so we can see the global error minima easily and
// stop without having to touch all shapes?
#define POS_TO_X(pos) ((pos)&3)
#define POS_TO_Y(pos) (((pos)>>2)&3)
#define NBITSIZES (NREGIONS*2)
#define ABITINDEX(region) (2*(region)+0)
#define BBITINDEX(region) (2*(region)+1)
struct ChanBits
{
int nbitsizes[NBITSIZES]; // bitsizes for one channel
};
struct Pattern
{
ChanBits chan[NCHANNELS_RGBA];// bit patterns used per channel
int transformed; // if 0, deltas are unsigned and no transform; otherwise, signed and transformed
int mode; // associated mode value
int modebits; // number of mode bits
char *encoding; // verilog description of encoding for this mode
};
#define NPATTERNS 1
#define NREGIONS 2
static Pattern patterns[NPATTERNS] =
{
// red green blue alpha xfm mode mb
5,5,5,5, 5,5,5,5, 5,5,5,5, 5,5,5,5, 0, 0x80, 8, "",
};
struct RegionPrec
{
int endpt_a_prec[NCHANNELS_RGBA];
int endpt_b_prec[NCHANNELS_RGBA];
};
struct PatternPrec
{
RegionPrec region_precs[NREGIONS];
};
// this is the precision for each channel and region
// NOTE: this MUST match the corresponding data in "patterns" above -- WARNING: there is NO nvAssert to check this!
static PatternPrec pattern_precs[NPATTERNS] =
{
5,5,5,5, 5,5,5,5, 5,5,5,5, 5,5,5,5,
};
// return # of bits needed to store n. handle signed or unsigned cases properly
static int nbits(int n, bool issigned)
{
int nb;
if (n==0)
return 0; // no bits needed for 0, signed or not
else if (n > 0)
{
for (nb=0; n; ++nb, n>>=1) ;
return nb + (issigned?1:0);
}
else
{
nvAssert (issigned);
for (nb=0; n<-1; ++nb, n>>=1) ;
return nb + 1;
}
}
static void transform_forward(IntEndptsRGBA_2 ep[NREGIONS])
{
nvUnreachable();
}
static void transform_inverse(IntEndptsRGBA_2 ep[NREGIONS])
{
nvUnreachable();
}
/*
we're using this table to assign lsbs
abgr >=2 correct
0000 0 0
0001 0 0
0010 0 0
0011 1 x1
0100 0 0
0101 1 x1
0110 1 x1
0111 1 1
1000 0 0
1001 1 x0
1010 1 x0
1011 1 1
1100 1 x0
1101 1 1
1110 1 1
1111 1 1
we need 8 0's and 8 1's. the x's can be either 0 or 1 as long as you get 8/8.
I choose to assign the lsbs so that the rgb channels are as good as possible.
*/
// 6666 ->5555.1, use the "correct" column above to assign the lsb
static void compress_one(const IntEndptsRGBA& endpts, IntEndptsRGBA_2& compr_endpts)
{
int onescnt;
onescnt = 0;
for (int j=0; j<NCHANNELS_RGBA; ++j)
{
// ignore the alpha channel in the count
onescnt += (j==CHANNEL_A) ? 0 : (endpts.A[j] & 1);
compr_endpts.A[j] = endpts.A[j] >> 1;
nvAssert (compr_endpts.A[j] < 32);
}
compr_endpts.a_lsb = onescnt >= 2;
onescnt = 0;
for (int j=0; j<NCHANNELS_RGBA; ++j)
{
onescnt += (j==CHANNEL_A) ? 0 : (endpts.B[j] & 1);
compr_endpts.B[j] = endpts.B[j] >> 1;
nvAssert (compr_endpts.B[j] < 32);
}
compr_endpts.b_lsb = onescnt >= 2;
}
static void uncompress_one(const IntEndptsRGBA_2& compr_endpts, IntEndptsRGBA& endpts)
{
for (int j=0; j<NCHANNELS_RGBA; ++j)
{
endpts.A[j] = (compr_endpts.A[j] << 1) | compr_endpts.a_lsb;
endpts.B[j] = (compr_endpts.B[j] << 1) | compr_endpts.b_lsb;
}
}
static void uncompress_endpoints(const IntEndptsRGBA_2 compr_endpts[NREGIONS], IntEndptsRGBA endpts[NREGIONS])
{
for (int i=0; i<NREGIONS; ++i)
uncompress_one(compr_endpts[i], endpts[i]);
}
static void compress_endpoints(const IntEndptsRGBA endpts[NREGIONS], IntEndptsRGBA_2 compr_endpts[NREGIONS])
{
for (int i=0; i<NREGIONS; ++i)
compress_one(endpts[i], compr_endpts[i]);
}
static void quantize_endpts(const FltEndpts endpts[NREGIONS], const PatternPrec &pattern_prec, IntEndptsRGBA_2 q_endpts[NREGIONS])
{
IntEndptsRGBA full_endpts[NREGIONS];
for (int region = 0; region < NREGIONS; ++region)
{
full_endpts[region].A[0] = Utils::quantize(endpts[region].A.x, pattern_prec.region_precs[region].endpt_a_prec[0]+1); // +1 since we are in uncompressed space
full_endpts[region].A[1] = Utils::quantize(endpts[region].A.y, pattern_prec.region_precs[region].endpt_a_prec[1]+1);
full_endpts[region].A[2] = Utils::quantize(endpts[region].A.z, pattern_prec.region_precs[region].endpt_a_prec[2]+1);
full_endpts[region].A[3] = Utils::quantize(endpts[region].A.w, pattern_prec.region_precs[region].endpt_a_prec[3]+1);
full_endpts[region].B[0] = Utils::quantize(endpts[region].B.x, pattern_prec.region_precs[region].endpt_b_prec[0]+1);
full_endpts[region].B[1] = Utils::quantize(endpts[region].B.y, pattern_prec.region_precs[region].endpt_b_prec[1]+1);
full_endpts[region].B[2] = Utils::quantize(endpts[region].B.z, pattern_prec.region_precs[region].endpt_b_prec[2]+1);
full_endpts[region].B[3] = Utils::quantize(endpts[region].B.w, pattern_prec.region_precs[region].endpt_b_prec[3]+1);
compress_one(full_endpts[region], q_endpts[region]);
}
}
// swap endpoints as needed to ensure that the indices at index_one and index_two have a 0 high-order bit
// index_two is 0 at x=0 y=0 and 15 at x=3 y=3 so y = (index >> 2) & 3 and x = index & 3
static void swap_indices(IntEndptsRGBA_2 endpts[NREGIONS], int indices[Tile::TILE_H][Tile::TILE_W], int shapeindex)
{
for (int region = 0; region < NREGIONS; ++region)
{
int position = SHAPEINDEX_TO_COMPRESSED_INDICES(shapeindex,region);
int x = POS_TO_X(position);
int y = POS_TO_Y(position);
nvAssert(REGION(x,y,shapeindex) == region); // double check the table
if (indices[y][x] & HIGH_INDEXBIT)
{
// high bit is set, swap the endpts and indices for this region
int t;
for (int i=0; i<NCHANNELS_RGBA; ++i)
{
t = endpts[region].A[i]; endpts[region].A[i] = endpts[region].B[i]; endpts[region].B[i] = t;
}
t = endpts[region].a_lsb; endpts[region].a_lsb = endpts[region].b_lsb; endpts[region].b_lsb = t;
for (int y = 0; y < Tile::TILE_H; y++)
for (int x = 0; x < Tile::TILE_W; x++)
if (REGION(x,y,shapeindex) == region)
indices[y][x] = NINDICES - 1 - indices[y][x];
}
}
}
static bool endpts_fit(IntEndptsRGBA_2 endpts[NREGIONS], const Pattern &p)
{
return true;
}
static void write_header(const IntEndptsRGBA_2 endpts[NREGIONS], int shapeindex, const Pattern &p, Bits &out)
{
out.write(p.mode, p.modebits);
out.write(shapeindex, SHAPEBITS);
for (int j=0; j<NCHANNELS_RGBA; ++j)
for (int i=0; i<NREGIONS; ++i)
{
out.write(endpts[i].A[j], p.chan[j].nbitsizes[ABITINDEX(i)]);
out.write(endpts[i].B[j], p.chan[j].nbitsizes[BBITINDEX(i)]);
}
for (int i=0; i<NREGIONS; ++i)
{
out.write(endpts[i].a_lsb, 1);
out.write(endpts[i].b_lsb, 1);
}
nvAssert (out.getptr() == 98);
}
static void read_header(Bits &in, IntEndptsRGBA_2 endpts[NREGIONS], int &shapeindex, Pattern &p, int &pat_index)
{
int mode = AVPCL::getmode(in);
pat_index = 0;
nvAssert (pat_index >= 0 && pat_index < NPATTERNS);
nvAssert (in.getptr() == patterns[pat_index].modebits);
shapeindex = in.read(SHAPEBITS);
p = patterns[pat_index];
for (int j=0; j<NCHANNELS_RGBA; ++j)
for (int i=0; i<NREGIONS; ++i)
{
endpts[i].A[j] = in.read(p.chan[j].nbitsizes[ABITINDEX(i)]);
endpts[i].B[j] = in.read(p.chan[j].nbitsizes[BBITINDEX(i)]);
}
for (int i=0; i<NREGIONS; ++i)
{
endpts[i].a_lsb = in.read(1);
endpts[i].b_lsb = in.read(1);
}
nvAssert (in.getptr() == 98);
}
// WORK PLACEHOLDER -- keep it simple for now
static void write_indices(const int indices[Tile::TILE_H][Tile::TILE_W], int shapeindex, Bits &out)
{
int positions[NREGIONS];
for (int r = 0; r < NREGIONS; ++r)
positions[r] = SHAPEINDEX_TO_COMPRESSED_INDICES(shapeindex,r);
for (int pos = 0; pos < Tile::TILE_TOTAL; ++pos)
{
int x = POS_TO_X(pos);
int y = POS_TO_Y(pos);
bool match = false;
for (int r = 0; r < NREGIONS; ++r)
if (positions[r] == pos) { match = true; break; }
out.write(indices[y][x], INDEXBITS - (match ? 1 : 0));
}
}
static void read_indices(Bits &in, int shapeindex, int indices[Tile::TILE_H][Tile::TILE_W])
{
int positions[NREGIONS];
for (int r = 0; r < NREGIONS; ++r)
positions[r] = SHAPEINDEX_TO_COMPRESSED_INDICES(shapeindex,r);
for (int pos = 0; pos < Tile::TILE_TOTAL; ++pos)
{
int x = POS_TO_X(pos);
int y = POS_TO_Y(pos);
bool match = false;
for (int r = 0; r < NREGIONS; ++r)
if (positions[r] == pos) { match = true; break; }
indices[y][x]= in.read(INDEXBITS - (match ? 1 : 0));
}
}
static void emit_block(const IntEndptsRGBA_2 endpts[NREGIONS], int shapeindex, const Pattern &p, const int indices[Tile::TILE_H][Tile::TILE_W], char *block)
{
Bits out(block, AVPCL::BITSIZE);
write_header(endpts, shapeindex, p, out);
write_indices(indices, shapeindex, out);
nvAssert(out.getptr() == AVPCL::BITSIZE);
}
static void generate_palette_quantized(const IntEndptsRGBA_2 &endpts_2, const RegionPrec ®ion_prec, Vector4 palette[NINDICES])
{
IntEndptsRGBA endpts;
uncompress_one(endpts_2, endpts);
// scale endpoints
int a, b; // really need a IntVec4...
a = Utils::unquantize(endpts.A[0], region_prec.endpt_a_prec[0]+1); // +1 since we are in uncompressed space
b = Utils::unquantize(endpts.B[0], region_prec.endpt_b_prec[0]+1);
// interpolate
for (int i = 0; i < NINDICES; ++i)
palette[i].x = float(Utils::lerp(a, b, i, BIAS, DENOM));
a = Utils::unquantize(endpts.A[1], region_prec.endpt_a_prec[1]+1);
b = Utils::unquantize(endpts.B[1], region_prec.endpt_b_prec[1]+1);
// interpolate
for (int i = 0; i < NINDICES; ++i)
palette[i].y = float(Utils::lerp(a, b, i, BIAS, DENOM));
a = Utils::unquantize(endpts.A[2], region_prec.endpt_a_prec[2]+1);
b = Utils::unquantize(endpts.B[2], region_prec.endpt_b_prec[2]+1);
// interpolate
for (int i = 0; i < NINDICES; ++i)
palette[i].z = float(Utils::lerp(a, b, i, BIAS, DENOM));
a = Utils::unquantize(endpts.A[3], region_prec.endpt_a_prec[3]+1);
b = Utils::unquantize(endpts.B[3], region_prec.endpt_b_prec[3]+1);
// interpolate
for (int i = 0; i < NINDICES; ++i)
palette[i].w = float(Utils::lerp(a, b, i, BIAS, DENOM));
}
// sign extend but only if it was transformed
static void sign_extend(Pattern &p, IntEndptsRGBA_2 endpts[NREGIONS])
{
nvUnreachable();
}
void AVPCL::decompress_mode7(const char *block, Tile &t)
{
Bits in(block, AVPCL::BITSIZE);
Pattern p;
IntEndptsRGBA_2 endpts[NREGIONS];
int shapeindex, pat_index;
read_header(in, endpts, shapeindex, p, pat_index);
if (p.transformed)
{
sign_extend(p, endpts);
transform_inverse(endpts);
}
Vector4 palette[NREGIONS][NINDICES];
for (int r = 0; r < NREGIONS; ++r)
generate_palette_quantized(endpts[r], pattern_precs[pat_index].region_precs[r], &palette[r][0]);
int indices[Tile::TILE_H][Tile::TILE_W];
read_indices(in, shapeindex, indices);
nvAssert(in.getptr() == AVPCL::BITSIZE);
// lookup
for (int y = 0; y < Tile::TILE_H; y++)
for (int x = 0; x < Tile::TILE_W; x++)
t.data[y][x] = palette[REGION(x,y,shapeindex)][indices[y][x]];
}
// given a collection of colors and quantized endpoints, generate a palette, choose best entries, and return a single toterr
static float map_colors(const Vector4 colors[], int np, const IntEndptsRGBA_2 &endpts, const RegionPrec ®ion_prec, float current_err, int indices[Tile::TILE_TOTAL])
{
Vector4 palette[NINDICES];
float toterr = 0;
Vector4 err;
generate_palette_quantized(endpts, region_prec, palette);
for (int i = 0; i < np; ++i)
{
float err, besterr = FLT_MAX;
for (int j = 0; j < NINDICES && besterr > 0; ++j)
{
err = !AVPCL::flag_premult ? Utils::metric4(colors[i], palette[j]) :
Utils::metric4premult(colors[i], palette[j]) ;
if (err > besterr) // error increased, so we're done searching
break;
if (err < besterr)
{
besterr = err;
indices[i] = j;
}
}
toterr += besterr;
// check for early exit
if (toterr > current_err)
{
// fill out bogus index values so it's initialized at least
for (int k = i; k < np; ++k)
indices[k] = -1;
return FLT_MAX;
}
}
return toterr;
}
// assign indices given a tile, shape, and quantized endpoints, return toterr for each region
static void assign_indices(const Tile &tile, int shapeindex, IntEndptsRGBA_2 endpts[NREGIONS], const PatternPrec &pattern_prec,
int indices[Tile::TILE_H][Tile::TILE_W], float toterr[NREGIONS])
{
// build list of possibles
Vector4 palette[NREGIONS][NINDICES];
for (int region = 0; region < NREGIONS; ++region)
{
generate_palette_quantized(endpts[region], pattern_prec.region_precs[region], &palette[region][0]);
toterr[region] = 0;
}
Vector4 err;
for (int y = 0; y < tile.size_y; y++)
for (int x = 0; x < tile.size_x; x++)
{
int region = REGION(x,y,shapeindex);
float err, besterr = FLT_MAX;
for (int i = 0; i < NINDICES && besterr > 0; ++i)
{
err = !AVPCL::flag_premult ? Utils::metric4(tile.data[y][x], palette[region][i]) :
Utils::metric4premult(tile.data[y][x], palette[region][i]) ;
if (err > besterr) // error increased, so we're done searching
break;
if (err < besterr)
{
besterr = err;
indices[y][x] = i;
}
}
toterr[region] += besterr;
}
}
// note: indices are valid only if the value returned is less than old_err; otherwise they contain -1's
// this function returns either old_err or a value smaller (if it was successful in improving the error)
static float perturb_one(const Vector4 colors[], int np, int ch, const RegionPrec ®ion_prec, const IntEndptsRGBA_2 &old_endpts, IntEndptsRGBA_2 &new_endpts,
float old_err, int do_b, int indices[Tile::TILE_TOTAL])
{
// we have the old endpoints: old_endpts
// we have the perturbed endpoints: new_endpts
// we have the temporary endpoints: temp_endpts
IntEndptsRGBA_2 temp_endpts;
float min_err = old_err; // start with the best current error
int beststep;
int temp_indices[Tile::TILE_TOTAL];
for (int i=0; i<np; ++i)
indices[i] = -1;
// copy real endpoints so we can perturb them
temp_endpts = new_endpts = old_endpts;
int prec = do_b ? region_prec.endpt_b_prec[ch] : region_prec.endpt_a_prec[ch];
// do a logarithmic search for the best error for this endpoint (which)
for (int step = 1 << (prec-1); step; step >>= 1)
{
bool improved = false;
for (int sign = -1; sign <= 1; sign += 2)
{
if (do_b == 0)
{
temp_endpts.A[ch] = new_endpts.A[ch] + sign * step;
if (temp_endpts.A[ch] < 0 || temp_endpts.A[ch] >= (1 << prec))
continue;
}
else
{
temp_endpts.B[ch] = new_endpts.B[ch] + sign * step;
if (temp_endpts.B[ch] < 0 || temp_endpts.B[ch] >= (1 << prec))
continue;
}
float err = map_colors(colors, np, temp_endpts, region_prec, min_err, temp_indices);
if (err < min_err)
{
improved = true;
min_err = err;
beststep = sign * step;
for (int i=0; i<np; ++i)
indices[i] = temp_indices[i];
}
}
// if this was an improvement, move the endpoint and continue search from there
if (improved)
{
if (do_b == 0)
new_endpts.A[ch] += beststep;
else
new_endpts.B[ch] += beststep;
}
}
return min_err;
}
// the larger the error the more time it is worth spending on an exhaustive search.
// perturb the endpoints at least -3 to 3.
// if err > 5000 perturb endpoints 50% of precision
// if err > 1000 25%
// if err > 200 12.5%
// if err > 40 6.25%
// for np = 16 -- adjust error thresholds as a function of np
// always ensure endpoint ordering is preserved (no need to overlap the scan)
// if orig_err returned from this is less than its input value, then indices[] will contain valid indices
static float exhaustive(const Vector4 colors[], int np, int ch, const RegionPrec ®ion_prec, float orig_err, IntEndptsRGBA_2 &opt_endpts, int indices[Tile::TILE_TOTAL])
{
IntEndptsRGBA_2 temp_endpts;
float best_err = orig_err;
int aprec = region_prec.endpt_a_prec[ch];
int bprec = region_prec.endpt_b_prec[ch];
int good_indices[Tile::TILE_TOTAL];
int temp_indices[Tile::TILE_TOTAL];
for (int i=0; i<np; ++i)
indices[i] = -1;
float thr_scale = (float)np / (float)Tile::TILE_TOTAL;
if (orig_err == 0) return orig_err;
int adelta = 0, bdelta = 0;
if (orig_err > 5000.0*thr_scale) { adelta = (1 << aprec)/2; bdelta = (1 << bprec)/2; }
else if (orig_err > 1000.0*thr_scale) { adelta = (1 << aprec)/4; bdelta = (1 << bprec)/4; }
else if (orig_err > 200.0*thr_scale) { adelta = (1 << aprec)/8; bdelta = (1 << bprec)/8; }
else if (orig_err > 40.0*thr_scale) { adelta = (1 << aprec)/16; bdelta = (1 << bprec)/16; }
adelta = max(adelta, 3);
bdelta = max(bdelta, 3);
#ifdef DISABLE_EXHAUSTIVE
adelta = bdelta = 3;
#endif
temp_endpts = opt_endpts;
// ok figure out the range of A and B
int alow = max(0, opt_endpts.A[ch] - adelta);
int ahigh = min((1<<aprec)-1, opt_endpts.A[ch] + adelta);
int blow = max(0, opt_endpts.B[ch] - bdelta);
int bhigh = min((1<<bprec)-1, opt_endpts.B[ch] + bdelta);
// now there's no need to swap the ordering of A and B
bool a_le_b = opt_endpts.A[ch] <= opt_endpts.B[ch];
int amin, bmin;
if (opt_endpts.A[ch] <= opt_endpts.B[ch])
{
// keep a <= b
for (int a = alow; a <= ahigh; ++a)
for (int b = max(a, blow); b < bhigh; ++b)
{
temp_endpts.A[ch] = a;
temp_endpts.B[ch] = b;
float err = map_colors(colors, np, temp_endpts, region_prec, best_err, temp_indices);
if (err < best_err)
{
amin = a;
bmin = b;
best_err = err;
for (int i=0; i<np; ++i)
good_indices[i] = temp_indices[i];
}
}
}
else
{
// keep b <= a
for (int b = blow; b < bhigh; ++b)
for (int a = max(b, alow); a <= ahigh; ++a)
{
temp_endpts.A[ch] = a;
temp_endpts.B[ch] = b;
float err = map_colors(colors, np, temp_endpts, region_prec, best_err, temp_indices);
if (err < best_err)
{
amin = a;
bmin = b;
best_err = err;
for (int i=0; i<np; ++i)
good_indices[i] = temp_indices[i];
}
}
}
if (best_err < orig_err)
{
opt_endpts.A[ch] = amin;
opt_endpts.B[ch] = bmin;
orig_err = best_err;
// if we actually improved, update the indices
for (int i=0; i<np; ++i)
indices[i] = good_indices[i];
}
return best_err;
}
static float optimize_one(const Vector4 colors[], int np, float orig_err, const IntEndptsRGBA_2 &orig_endpts, const RegionPrec ®ion_prec, IntEndptsRGBA_2 &opt_endpts)
{
float opt_err = orig_err;
opt_endpts = orig_endpts;
/*
err0 = perturb(rgb0, delta0)
err1 = perturb(rgb1, delta1)
if (err0 < err1)
if (err0 >= initial_error) break
rgb0 += delta0
next = 1
else
if (err1 >= initial_error) break
rgb1 += delta1
next = 0
initial_err = map()
for (;;)
err = perturb(next ? rgb1:rgb0, delta)
if (err >= initial_err) break
next? rgb1 : rgb0 += delta
initial_err = err
*/
IntEndptsRGBA_2 new_a, new_b;
IntEndptsRGBA_2 new_endpt;
int do_b;
int orig_indices[Tile::TILE_TOTAL];
int new_indices[Tile::TILE_TOTAL];
int temp_indices0[Tile::TILE_TOTAL];
int temp_indices1[Tile::TILE_TOTAL];
// now optimize each channel separately
// for the first error improvement, we save the indices. then, for any later improvement, we compare the indices
// if they differ, we restart the loop (which then falls back to looking for a first improvement.)
for (int ch = 0; ch < NCHANNELS_RGBA; ++ch)
{
// figure out which endpoint when perturbed gives the most improvement and start there
// if we just alternate, we can easily end up in a local minima
float err0 = perturb_one(colors, np, ch, region_prec, opt_endpts, new_a, opt_err, 0, temp_indices0); // perturb endpt A
float err1 = perturb_one(colors, np, ch, region_prec, opt_endpts, new_b, opt_err, 1, temp_indices1); // perturb endpt B
if (err0 < err1)
{
if (err0 >= opt_err)
continue;
for (int i=0; i<np; ++i)
{
new_indices[i] = orig_indices[i] = temp_indices0[i];
nvAssert (orig_indices[i] != -1);
}
opt_endpts.A[ch] = new_a.A[ch];
opt_err = err0;
do_b = 1; // do B next
}
else
{
if (err1 >= opt_err)
continue;
for (int i=0; i<np; ++i)
{
new_indices[i] = orig_indices[i] = temp_indices1[i];
nvAssert (orig_indices[i] != -1);
}
opt_endpts.B[ch] = new_b.B[ch];
opt_err = err1;
do_b = 0; // do A next
}
// now alternate endpoints and keep trying until there is no improvement
for (;;)
{
float err = perturb_one(colors, np, ch, region_prec, opt_endpts, new_endpt, opt_err, do_b, temp_indices0);
if (err >= opt_err)
break;
for (int i=0; i<np; ++i)
{
new_indices[i] = temp_indices0[i];
nvAssert (new_indices[i] != -1);
}
if (do_b == 0)
opt_endpts.A[ch] = new_endpt.A[ch];
else
opt_endpts.B[ch] = new_endpt.B[ch];
opt_err = err;
do_b = 1 - do_b; // now move the other endpoint
}
// see if the indices have changed
int i;
for (i=0; i<np; ++i)
if (orig_indices[i] != new_indices[i])
break;
if (i<np)
ch = -1; // start over
}
// finally, do a small exhaustive search around what we think is the global minima to be sure
// note this is independent of the above search, so we don't care about the indices from the above
// we don't care about the above because if they differ, so what? we've already started at ch=0
bool first = true;
for (int ch = 0; ch < NCHANNELS_RGBA; ++ch)
{
float new_err = exhaustive(colors, np, ch, region_prec, opt_err, opt_endpts, temp_indices0);
if (new_err < opt_err)
{
opt_err = new_err;
if (first)
{
for (int i=0; i<np; ++i)
{
orig_indices[i] = temp_indices0[i];
nvAssert (orig_indices[i] != -1);
}
first = false;
}
else
{
// see if the indices have changed
int i;
for (i=0; i<np; ++i)
if (orig_indices[i] != temp_indices0[i])
break;
if (i<np)
{
ch = -1; // start over
first = true;
}
}
}
}
return opt_err;
}
static void optimize_endpts(const Tile &tile, int shapeindex, const float orig_err[NREGIONS],
IntEndptsRGBA_2 orig_endpts[NREGIONS], const PatternPrec &pattern_prec, float opt_err[NREGIONS], IntEndptsRGBA_2 opt_endpts[NREGIONS])
{
Vector4 pixels[Tile::TILE_TOTAL];
IntEndptsRGBA_2 temp_in, temp_out;
int temp_indices[Tile::TILE_TOTAL];
for (int region=0; region<NREGIONS; ++region)
{
// collect the pixels in the region
int np = 0;
for (int y = 0; y < tile.size_y; y++)
for (int x = 0; x < tile.size_x; x++)
if (REGION(x,y,shapeindex) == region)
pixels[np++] = tile.data[y][x];
opt_endpts[region] = temp_in = orig_endpts[region];
opt_err[region] = orig_err[region];
float best_err = orig_err[region];
// try all lsb modes as we search for better endpoints
for (int lsbmode=0; lsbmode<NLSBMODES; ++lsbmode)
{
temp_in.a_lsb = lsbmode & 1;
temp_in.b_lsb = (lsbmode >> 1) & 1;
// make sure we have a valid error for temp_in
// we use FLT_MAX here because we want an accurate temp_in_err, no shortcuts
// (mapcolors will compute a mapping but will stop if the error exceeds the value passed in the FLT_MAX position)
float temp_in_err = map_colors(pixels, np, temp_in, pattern_prec.region_precs[region], FLT_MAX, temp_indices);
// now try to optimize these endpoints
float temp_out_err = optimize_one(pixels, np, temp_in_err, temp_in, pattern_prec.region_precs[region], temp_out);
// if we find an improvement, update the best so far and correct the output endpoints and errors
if (temp_out_err < best_err)
{
best_err = temp_out_err;
opt_err[region] = temp_out_err;
opt_endpts[region] = temp_out;
}
}
}
}
/* optimization algorithm
for each pattern
convert endpoints using pattern precision
assign indices and get initial error
compress indices (and possibly reorder endpoints)
transform endpoints
if transformed endpoints fit pattern
get original endpoints back
optimize endpoints, get new endpoints, new indices, and new error // new error will almost always be better
compress new indices
transform new endpoints
if new endpoints fit pattern AND if error is improved
emit compressed block with new data
else
emit compressed block with original data // to try to preserve maximum endpoint precision
*/
static float refine(const Tile &tile, int shapeindex_best, const FltEndpts endpts[NREGIONS], char *block)
{
float orig_err[NREGIONS], opt_err[NREGIONS], orig_toterr, opt_toterr, expected_opt_err[NREGIONS];
IntEndptsRGBA_2 orig_endpts[NREGIONS], opt_endpts[NREGIONS];
int orig_indices[Tile::TILE_H][Tile::TILE_W], opt_indices[Tile::TILE_H][Tile::TILE_W];
for (int sp = 0; sp < NPATTERNS; ++sp)
{
quantize_endpts(endpts, pattern_precs[sp], orig_endpts);
assign_indices(tile, shapeindex_best, orig_endpts, pattern_precs[sp], orig_indices, orig_err);
swap_indices(orig_endpts, orig_indices, shapeindex_best);
if (patterns[sp].transformed)
transform_forward(orig_endpts);
// apply a heuristic here -- we check if the endpoints fit before we try to optimize them.
// the assumption made is that if they don't fit now, they won't fit after optimizing.
if (endpts_fit(orig_endpts, patterns[sp]))
{
if (patterns[sp].transformed)
transform_inverse(orig_endpts);
optimize_endpts(tile, shapeindex_best, orig_err, orig_endpts, pattern_precs[sp], expected_opt_err, opt_endpts);
assign_indices(tile, shapeindex_best, opt_endpts, pattern_precs[sp], opt_indices, opt_err);
// (nreed) Commented out asserts because they go off all the time...not sure why
//for (int i=0; i<NREGIONS; ++i)
// nvAssert(expected_opt_err[i] == opt_err[i]);
swap_indices(opt_endpts, opt_indices, shapeindex_best);
if (patterns[sp].transformed)
transform_forward(opt_endpts);
orig_toterr = opt_toterr = 0;
for (int i=0; i < NREGIONS; ++i) { orig_toterr += orig_err[i]; opt_toterr += opt_err[i]; }
if (endpts_fit(opt_endpts, patterns[sp]) && opt_toterr < orig_toterr)
{
emit_block(opt_endpts, shapeindex_best, patterns[sp], opt_indices, block);
return opt_toterr;
}
else
{
// either it stopped fitting when we optimized it, or there was no improvement
// so go back to the unoptimized endpoints which we know will fit
if (patterns[sp].transformed)
transform_forward(orig_endpts);
emit_block(orig_endpts, shapeindex_best, patterns[sp], orig_indices, block);
return orig_toterr;
}
}
}
throw "No candidate found, should never happen (avpcl mode 7).";
}
static void clamp(Vector4 &v)
{
if (v.x < 0.0f) v.x = 0.0f;
if (v.x > 255.0f) v.x = 255.0f;
if (v.y < 0.0f) v.y = 0.0f;
if (v.y > 255.0f) v.y = 255.0f;
if (v.z < 0.0f) v.z = 0.0f;
if (v.z > 255.0f) v.z = 255.0f;
if (v.w < 0.0f) v.w = 0.0f;
if (v.w > 255.0f) v.w = 255.0f;
}
static void generate_palette_unquantized(const FltEndpts endpts[NREGIONS], Vector4 palette[NREGIONS][NINDICES])
{
for (int region = 0; region < NREGIONS; ++region)
for (int i = 0; i < NINDICES; ++i)
palette[region][i] = Utils::lerp(endpts[region].A, endpts[region].B, i, 0, DENOM);
}
// generate a palette from unquantized endpoints, then pick best palette color for all pixels in each region, return toterr for all regions combined
static float map_colors(const Tile &tile, int shapeindex, const FltEndpts endpts[NREGIONS])
{
// build list of possibles
Vector4 palette[NREGIONS][NINDICES];
generate_palette_unquantized(endpts, palette);
float toterr = 0;
Vector4 err;
for (int y = 0; y < tile.size_y; y++)
for (int x = 0; x < tile.size_x; x++)
{
int region = REGION(x,y,shapeindex);
float err, besterr = FLT_MAX;
for (int i = 0; i < NINDICES && besterr > 0; ++i)
{
err = Utils::metric4(tile.data[y][x], palette[region][i]);
if (err > besterr) // error increased, so we're done searching. this works for most norms.
break;
if (err < besterr)
besterr = err;
}
toterr += besterr;
}
return toterr;
}
static float rough(const Tile &tile, int shapeindex, FltEndpts endpts[NREGIONS])
{
for (int region=0; region<NREGIONS; ++region)
{
int np = 0;
Vector4 colors[Tile::TILE_TOTAL];
Vector4 mean(0,0,0,0);
for (int y = 0; y < tile.size_y; y++)
for (int x = 0; x < tile.size_x; x++)
if (REGION(x,y,shapeindex) == region)
{
colors[np] = tile.data[y][x];
mean += tile.data[y][x];
++np;
}
// handle simple cases
if (np == 0)
{
Vector4 zero(0,0,0,255.0f);
endpts[region].A = zero;
endpts[region].B = zero;
continue;
}
else if (np == 1)
{
endpts[region].A = colors[0];
endpts[region].B = colors[0];
continue;
}
else if (np == 2)
{
endpts[region].A = colors[0];
endpts[region].B = colors[1];
continue;
}
mean /= float(np);
Vector4 direction = Fit::computePrincipalComponent_EigenSolver(np, colors);
// project each pixel value along the principal direction
float minp = FLT_MAX, maxp = -FLT_MAX;
for (int i = 0; i < np; i++)
{
float dp = dot(colors[i]-mean, direction);
if (dp < minp) minp = dp;
if (dp > maxp) maxp = dp;
}
// choose as endpoints 2 points along the principal direction that span the projections of all of the pixel values
endpts[region].A = mean + minp*direction;
endpts[region].B = mean + maxp*direction;
// clamp endpoints
// the argument for clamping is that the actual endpoints need to be clamped and thus we need to choose the best
// shape based on endpoints being clamped
clamp(endpts[region].A);
clamp(endpts[region].B);
}
return map_colors(tile, shapeindex, endpts);
}
static void swap(float *list1, int *list2, int i, int j)
{
float t = list1[i]; list1[i] = list1[j]; list1[j] = t;
int t1 = list2[i]; list2[i] = list2[j]; list2[j] = t1;
}
float AVPCL::compress_mode7(const Tile &t, char *block)
{
// number of rough cases to look at. reasonable values of this are 1, NSHAPES/4, and NSHAPES
// NSHAPES/4 gets nearly all the cases; you can increase that a bit (say by 3 or 4) if you really want to squeeze the last bit out
const int NITEMS=NSHAPES/4;
// pick the best NITEMS shapes and refine these.
struct {
FltEndpts endpts[NREGIONS];
} all[NSHAPES];
float roughmse[NSHAPES];
int index[NSHAPES];
char tempblock[AVPCL::BLOCKSIZE];
float msebest = FLT_MAX;
for (int i=0; i<NSHAPES; ++i)
{
roughmse[i] = rough(t, i, &all[i].endpts[0]);
index[i] = i;
}
// bubble sort -- only need to bubble up the first NITEMS items
for (int i=0; i<NITEMS; ++i)
for (int j=i+1; j<NSHAPES; ++j)
if (roughmse[i] > roughmse[j])
swap(roughmse, index, i, j);
for (int i=0; i<NITEMS && msebest>0; ++i)
{
int shape = index[i];
float mse = refine(t, shape, &all[shape].endpts[0], tempblock);
if (mse < msebest)
{
memcpy(block, tempblock, sizeof(tempblock));
msebest = mse;
}
}
return msebest;
}
| {
"content_hash": "55df8d24a4f07e66e6a85a3eb1210fdc",
"timestamp": "",
"source": "github",
"line_count": 1077,
"max_line_length": 170,
"avg_line_length": 30.202414113277623,
"alnum_prop": 0.6302877520905067,
"repo_name": "salamanderrake/nvidia-texture-tools",
"id": "fe72d51c354e50bbdb11149c0e5da9d971cd68e0",
"size": "33096",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/nvtt/bc7/avpcl_mode7.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "55378"
},
{
"name": "C#",
"bytes": "56520"
},
{
"name": "C++",
"bytes": "1896574"
},
{
"name": "Cuda",
"bytes": "65176"
},
{
"name": "Objective-C",
"bytes": "47313"
},
{
"name": "Shell",
"bytes": "339"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<ProgressBar xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/bt_loading_progress_bar"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:indeterminateDuration="512" />
<!-- From: file:/Users/vidsouza/Documents/work/AndroidWorkSpace/BT_Andriod/braintree_android/Drop-In/src/main/res/layout/bt_secure_loading_progress_bar.xml --> | {
"content_hash": "77c4a754cad8b4fb9494c7bdb53be1b8",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 159,
"avg_line_length": 62.857142857142854,
"alnum_prop": 0.7454545454545455,
"repo_name": "vinucondev/BTBeckAndroidDemo",
"id": "fcec2826814b9baafa7272d7491935aa65bdba48",
"size": "440",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Demo/build/intermediates/exploded-aar/braintree_android/Drop-In/unspecified/res/layout/bt_secure_loading_progress_bar.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "559712"
},
{
"name": "Ruby",
"bytes": "6658"
},
{
"name": "Shell",
"bytes": "3264"
}
],
"symlink_target": ""
} |
#ifndef __EXIF_H
#define __EXIF_H
#include <string>
#include <stdio.h>
#include <algorithm>
using std::string;
//
// Class responsible for storing and parsing EXIF information from a JPEG blob
//
class EXIFInfo {
public:
// Parsing function for an entire JPEG image buffer.
//
// PARAM 'data': A pointer to a JPEG image.
// PARAM 'length': The length of the JPEG image.
// RETURN: PARSE_EXIF_SUCCESS (0) on success with 'result' filled out
// error code otherwise, as defined by the PARSE_EXIF_ERROR_* macros
int parseFrom(const unsigned char *data, unsigned length);
int parseFrom(const std::string &data);
// Parsing function for an EXIF segment. This is used internally by parseFrom()
// but can be called for special cases where only the EXIF section is
// available (i.e., a blob starting with the bytes "Exif\0\0").
int parseFromEXIFSegment(const unsigned char *buf, unsigned len);
// Set all data members to default values.
void clear();
// Data fields filled out by parseFrom()
char ByteAlign; // 0 = Motorola byte alignment, 1 = Intel
std::string ImageDescription; // Image description
std::string Make; // Camera manufacturer's name
std::string Model; // Camera model
unsigned short Orientation; // Image orientation, start of data corresponds to
// 0: unspecified in EXIF data
// 1: upper left of image
// 3: lower right of image
// 6: upper right of image
// 8: lower left of image
// 9: undefined
unsigned short BitsPerSample; // Number of bits per component
std::string Software; // Software used
std::string DateTime; // File change date and time
std::string DateTimeOriginal; // Original file date and time (may not exist)
std::string DateTimeDigitized; // Digitization date and time (may not exist)
std::string SubSecTimeOriginal; // Sub-second time that original picture was taken
std::string Copyright; // File copyright information
double ExposureTime; // Exposure time in seconds
double FNumber; // F/stop
unsigned short ISOSpeedRatings; // ISO speed
double ShutterSpeedValue; // Shutter speed (reciprocal of exposure time)
double ExposureBiasValue; // Exposure bias value in EV
double SubjectDistance; // Distance to focus point in meters
double FocalLength; // Focal length of lens in millimeters
unsigned short FocalLengthIn35mm; // Focal length in 35mm film
double FocalPlaneXResolution; // Indicates the number of pixels in the image width (X) direction per FocalPlaneResolutionUnit on the camera focal plane (may not exist)
double FocalPlaneYResolution; // Indicates the number of pixels in the image width (Y) direction per FocalPlaneResolutionUnit on the camera focal plane (may not exist)
unsigned short FocalPlaneResolutionUnit;// Indicates the unit for measuring FocalPlaneXResolution and FocalPlaneYResolution (may not exist)
// 0: unspecified in EXIF data
// 1: no absolute unit of measurement
// 2: inch
// 3: centimeter
char Flash; // 0 = no flash, 1 = flash used
unsigned short MeteringMode; // Metering mode
// 1: average
// 2: center weighted average
// 3: spot
// 4: multi-spot
// 5: multi-segment
unsigned ImageWidth; // Image width reported in EXIF data
unsigned ImageHeight; // Image height reported in EXIF data
struct Geolocation_t { // GPS information embedded in file
double Latitude; // Image latitude expressed as decimal
double Longitude; // Image longitude expressed as decimal
double Altitude; // Altitude in meters, relative to sea level
char AltitudeRef; // 0 = above sea level, -1 = below sea level
struct Coord_t {
double degrees;
double minutes;
double seconds;
char direction;
} LatComponents, LonComponents; // Latitude, Longitude expressed in deg/min/sec
} GeoLocation;
EXIFInfo() {
clear();
}
};
// Parse was successful
#define PARSE_EXIF_SUCCESS 0
// No JPEG markers found in buffer, possibly invalid JPEG file
#define PARSE_EXIF_ERROR_NO_JPEG 1982
// No EXIF header found in JPEG file.
#define PARSE_EXIF_ERROR_NO_EXIF 1983
// Byte alignment specified in EXIF file was unknown (not Motorola or Intel).
#define PARSE_EXIF_ERROR_UNKNOWN_BYTEALIGN 1984
// EXIF header was found, but data was corrupted.
#define PARSE_EXIF_ERROR_CORRUPT 1985
// IF Entry
struct IFEntry {
// Raw fields
unsigned short tag;
unsigned short format;
unsigned data;
unsigned length;
// Parsed fields
string val_string;
unsigned short val_16;
unsigned val_32;
double val_rational;
unsigned char val_byte;
};
// Helper functions
unsigned int parse32(const unsigned char *buf, bool intel) {
if (intel)
return ((unsigned)buf[3]<<24) |
((unsigned)buf[2]<<16) |
((unsigned)buf[1]<<8) |
buf[0];
return ((unsigned)buf[0]<<24) |
((unsigned)buf[1]<<16) |
((unsigned)buf[2]<<8) |
buf[3];
}
unsigned short parse16(const unsigned char *buf, bool intel) {
if (intel)
return ((unsigned) buf[1]<<8) | buf[0];
return ((unsigned) buf[0]<<8) | buf[1];
}
string parseEXIFString(const unsigned char *buf,
const unsigned num_components,
const unsigned data,
const unsigned base,
const unsigned len) {
string value;
if (num_components <= 4)
value.assign( (const char*)&data, num_components );
else {
if (base+data+num_components <= len)
value.assign( (const char*)(buf+base+data), num_components );
}
return value;
}
double parseEXIFRational(const unsigned char *buf, bool intel) {
double numerator = 0;
double denominator = 1;
numerator = (double) parse32(buf, intel);
denominator= (double) parse32(buf+4, intel);
if(denominator < 1e-20)
return 0;
return numerator/denominator;
}
IFEntry parseIFEntry(const unsigned char *buf,
const unsigned offs,
const bool alignIntel,
const unsigned base,
const unsigned len) {
IFEntry result;
// Each directory entry is composed of:
// 2 bytes: tag number (data field)
// 2 bytes: data format
// 4 bytes: number of components
// 4 bytes: data value or offset to data value
result.tag = parse16(buf + offs, alignIntel);
result.format = parse16(buf + offs + 2, alignIntel);
result.length = parse32(buf + offs + 4, alignIntel);
result.data = parse32(buf + offs + 8, alignIntel);
// Parse value in specified format
switch (result.format) {
case 1:
result.val_byte = (unsigned char) *(buf + offs + 8);
break;
case 2:
result.val_string = parseEXIFString(buf, result.length, result.data, base, len);
break;
case 3:
result.val_16 = parse16((const unsigned char *) buf + offs + 8, alignIntel);
break;
case 4:
result.val_32 = result.data;
break;
case 5:
if (base + result.data + 8 <= len)
result.val_rational = parseEXIFRational(buf + base + result.data, alignIntel);
break;
case 7:
case 9:
case 10:
break;
default:
result.tag = 0xFF;
}
return result;
}
}
//
// Locates the EXIF segment and parses it using parseFromEXIFSegment
//
int EXIFInfo::parseFrom(const unsigned char *buf, unsigned len) {
// Sanity check: all JPEG files start with 0xFFD8 and end with 0xFFD9
// This check also ensures that the user has supplied a correct value for len.
if (!buf || len < 4)
return PARSE_EXIF_ERROR_NO_EXIF;
// JPEG file start check
if (buf[0] != 0xFF || buf[1] != 0xD8)
return PARSE_EXIF_ERROR_NO_JPEG;
// JPEG file end check
// if (buf[len-2] != 0xFF || buf[len-1] != 0xD9)
// return PARSE_EXIF_ERROR_NO_JPEG;
clear();
// Scan for EXIF header (bytes 0xFF 0xE1) and do a sanity check by
// looking for bytes "Exif\0\0". The marker length data is in Motorola
// byte order, which results in the 'false' parameter to parse16().
// The marker has to contain at least the TIFF header, otherwise the
// EXIF data is corrupt. So the minimum length specified here has to be:
// 2 bytes: section size
// 6 bytes: "Exif\0\0" string
// 2 bytes: TIFF header (either "II" or "MM" string)
// 2 bytes: TIFF magic (short 0x2a00 in Motorola byte order)
// 4 bytes: Offset to first IFD
// =========
// 16 bytes
unsigned offs = 0; // current offset into buffer
for (offs = 0; offs < len-1; offs++)
if (buf[offs] == 0xFF && buf[offs+1] == 0xE1)
break;
if (offs + 4 > len)
return PARSE_EXIF_ERROR_NO_EXIF;
offs += 2;
unsigned short section_length = parse16(buf + offs, false);
if (offs + section_length > len || section_length < 16)
return PARSE_EXIF_ERROR_CORRUPT;
offs += 2;
return parseFromEXIFSegment(buf + offs, len - offs);
}
int EXIFInfo::parseFrom(const string &data) {
return parseFrom((const unsigned char *)data.data(), data.length());
}
//
// Main parsing function for an EXIF segment.
//
// PARAM: 'buf' start of the EXIF TIFF, which must be the bytes "Exif\0\0".
// PARAM: 'len' length of buffer
//
int EXIFInfo::parseFromEXIFSegment(const unsigned char *buf, unsigned len) {
bool alignIntel = true; // byte alignment (defined in EXIF header)
unsigned offs = 0; // current offset into buffer
if (!buf || len < 6)
return PARSE_EXIF_ERROR_NO_EXIF;
if (!std::equal(buf, buf+6, "Exif\0\0"))
return PARSE_EXIF_ERROR_NO_EXIF;
offs += 6;
// Now parsing the TIFF header. The first two bytes are either "II" or
// "MM" for Intel or Motorola byte alignment. Sanity check by parsing
// the unsigned short that follows, making sure it equals 0x2a. The
// last 4 bytes are an offset into the first IFD, which are added to
// the global offset counter. For this block, we expect the following
// minimum size:
// 2 bytes: 'II' or 'MM'
// 2 bytes: 0x002a
// 4 bytes: offset to first IDF
// -----------------------------
// 8 bytes
if (offs + 8 > len)
return PARSE_EXIF_ERROR_CORRUPT;
unsigned tiff_header_start = offs;
if (buf[offs] == 'I' && buf[offs+1] == 'I')
alignIntel = true;
else {
if(buf[offs] == 'M' && buf[offs+1] == 'M')
alignIntel = false;
else
return PARSE_EXIF_ERROR_UNKNOWN_BYTEALIGN;
}
this->ByteAlign = alignIntel;
offs += 2;
if (0x2a != parse16(buf+offs, alignIntel))
return PARSE_EXIF_ERROR_CORRUPT;
offs += 2;
unsigned first_ifd_offset = parse32(buf + offs, alignIntel);
offs += first_ifd_offset - 4;
if (offs >= len)
return PARSE_EXIF_ERROR_CORRUPT;
// Now parsing the first Image File Directory (IFD0, for the main image).
// An IFD consists of a variable number of 12-byte directory entries. The
// first two bytes of the IFD section contain the number of directory
// entries in the section. The last 4 bytes of the IFD contain an offset
// to the next IFD, which means this IFD must contain exactly 6 + 12 * num
// bytes of data.
if (offs + 2 > len)
return PARSE_EXIF_ERROR_CORRUPT;
int num_entries = parse16(buf + offs, alignIntel);
if (offs + 6 + 12 * num_entries > len)
return PARSE_EXIF_ERROR_CORRUPT;
offs += 2;
unsigned exif_sub_ifd_offset = len;
unsigned gps_sub_ifd_offset = len;
while (--num_entries >= 0) {
IFEntry result = parseIFEntry(buf, offs, alignIntel, tiff_header_start, len);
offs += 12;
switch(result.tag) {
case 0x102:
// Bits per sample
if (result.format == 3)
this->BitsPerSample = result.val_16;
break;
case 0x10E:
// Image description
if (result.format == 2)
this->ImageDescription = result.val_string;
break;
case 0x10F:
// Digicam make
if (result.format == 2)
this->Make = result.val_string;
break;
case 0x110:
// Digicam model
if (result.format == 2)
this->Model = result.val_string;
break;
case 0x112:
// Orientation of image
if (result.format == 3)
this->Orientation = result.val_16;
break;
case 0x131:
// Software used for image
if (result.format == 2)
this->Software = result.val_string;
break;
case 0x132:
// EXIF/TIFF date/time of image modification
if (result.format == 2)
this->DateTime = result.val_string;
break;
case 0x8298:
// Copyright information
if (result.format == 2)
this->Copyright = result.val_string;
break;
case 0x8825:
// GPS IFS offset
gps_sub_ifd_offset = tiff_header_start + result.data;
break;
case 0x8769:
// EXIF SubIFD offset
exif_sub_ifd_offset = tiff_header_start + result.data;
break;
}
}
// Jump to the EXIF SubIFD if it exists and parse all the information
// there. Note that it's possible that the EXIF SubIFD doesn't exist.
// The EXIF SubIFD contains most of the interesting information that a
// typical user might want.
if (exif_sub_ifd_offset + 4 <= len) {
offs = exif_sub_ifd_offset;
int num_entries = parse16(buf + offs, alignIntel);
if (offs + 6 + 12 * num_entries > len)
return PARSE_EXIF_ERROR_CORRUPT;
offs += 2;
while (--num_entries >= 0) {
IFEntry result = parseIFEntry(buf, offs, alignIntel, tiff_header_start, len);
switch(result.tag) {
case 0x829a:
// Exposure time in seconds
if (result.format == 5)
this->ExposureTime = result.val_rational;
break;
case 0x829d:
// FNumber
if (result.format == 5)
this->FNumber = result.val_rational;
break;
case 0x8827:
// ISO Speed Rating
if (result.format == 3)
this->ISOSpeedRatings = result.val_16;
break;
case 0x9003:
// Original date and time
if (result.format == 2)
this->DateTimeOriginal = result.val_string;
break;
case 0x9004:
// Digitization date and time
if (result.format == 2)
this->DateTimeDigitized = result.val_string;
break;
case 0x9201:
// Shutter speed value
if (result.format == 5)
this->ShutterSpeedValue = result.val_rational;
break;
case 0x9204:
// Exposure bias value
if (result.format == 5)
this->ExposureBiasValue = result.val_rational;
break;
case 0x9206:
// Subject distance
if (result.format == 5)
this->SubjectDistance = result.val_rational;
break;
case 0x9209:
// Flash used
if (result.format == 3)
this->Flash = result.data ? 1 : 0;
break;
case 0x920a:
// Focal length
if (result.format == 5)
this->FocalLength = result.val_rational;
break;
case 0x9207:
// Metering mode
if (result.format == 3)
this->MeteringMode = result.val_16;
break;
case 0x9291:
// Subsecond original time
if (result.format == 2)
this->SubSecTimeOriginal = result.val_string;
break;
case 0xa002:
// EXIF Image width
if (result.format == 4)
this->ImageWidth = result.val_32;
else
if (result.format == 3)
this->ImageWidth = result.val_16;
else
this->ImageWidth = result.data;
break;
case 0xa003:
// EXIF Image height
if (result.format == 4)
this->ImageHeight = result.val_32;
else
if (result.format == 3)
this->ImageHeight = result.val_16;
else
this->ImageHeight = result.data;
break;
case 0xa405:
// Focal length in 35mm film
if (result.format == 3)
this->FocalLengthIn35mm = result.val_16;
break;
}
offs += 12;
}
}
// Jump to the GPS SubIFD if it exists and parse all the information
// there. Note that it's possible that the GPS SubIFD doesn't exist.
if (gps_sub_ifd_offset + 4 <= len) {
offs = gps_sub_ifd_offset;
int num_entries = parse16(buf + offs, alignIntel);
if (offs + 6 + 12 * num_entries > len)
return PARSE_EXIF_ERROR_CORRUPT;
offs += 2;
while (--num_entries >= 0) {
unsigned short tag = parse16(buf + offs, alignIntel);
unsigned short format = parse16(buf + offs + 2, alignIntel);
unsigned length = parse32(buf + offs + 4, alignIntel);
unsigned data = parse32(buf + offs + 8, alignIntel);
switch(tag) {
case 1:
// GPS north or south
this->GeoLocation.LatComponents.direction = *(buf + offs + 8);
if ('S' == this->GeoLocation.LatComponents.direction)
this->GeoLocation.Latitude = -this->GeoLocation.Latitude;
break;
case 2:
// GPS latitude
if (format == 5 && length == 3) {
this->GeoLocation.LatComponents.degrees =
parseEXIFRational(buf + data + tiff_header_start, alignIntel);
this->GeoLocation.LatComponents.minutes =
parseEXIFRational(buf + data + tiff_header_start + 8, alignIntel);
this->GeoLocation.LatComponents.seconds =
parseEXIFRational(buf + data + tiff_header_start + 16, alignIntel);
this->GeoLocation.Latitude =
this->GeoLocation.LatComponents.degrees +
this->GeoLocation.LatComponents.minutes / 60 +
this->GeoLocation.LatComponents.seconds / 3600;
if ('S' == this->GeoLocation.LatComponents.direction)
this->GeoLocation.Latitude = -this->GeoLocation.Latitude;
}
break;
case 3:
// GPS east or west
this->GeoLocation.LonComponents.direction = *(buf + offs + 8);
if ('W' == this->GeoLocation.LonComponents.direction)
this->GeoLocation.Longitude = -this->GeoLocation.Longitude;
break;
case 4:
// GPS longitude
if (format == 5 && length == 3) {
this->GeoLocation.LonComponents.degrees =
parseEXIFRational(buf + data + tiff_header_start, alignIntel);
this->GeoLocation.LonComponents.minutes =
parseEXIFRational(buf + data + tiff_header_start + 8, alignIntel);
this->GeoLocation.LonComponents.seconds =
parseEXIFRational(buf + data + tiff_header_start + 16, alignIntel);
this->GeoLocation.Longitude =
this->GeoLocation.LonComponents.degrees +
this->GeoLocation.LonComponents.minutes / 60 +
this->GeoLocation.LonComponents.seconds / 3600;
if ('W' == this->GeoLocation.LonComponents.direction)
this->GeoLocation.Longitude = -this->GeoLocation.Longitude;
}
break;
case 5:
// GPS altitude reference (below or above sea level)
this->GeoLocation.AltitudeRef = *(buf + offs + 8);
if (1 == this->GeoLocation.AltitudeRef)
this->GeoLocation.Altitude = -this->GeoLocation.Altitude;
break;
case 6:
// GPS altitude reference
if (format == 5) {
this->GeoLocation.Altitude =
parseEXIFRational(buf + data + tiff_header_start, alignIntel);
if (1 == this->GeoLocation.AltitudeRef)
this->GeoLocation.Altitude = -this->GeoLocation.Altitude;
}
break;
}
offs += 12;
}
}
return PARSE_EXIF_SUCCESS;
}
void EXIFInfo::clear() {
// Strings
ImageDescription = "";
Make = "";
Model = "";
Software = "";
DateTime = "";
DateTimeOriginal = "";
DateTimeDigitized = "";
SubSecTimeOriginal= "";
Copyright = "";
// Shorts / unsigned / double
ByteAlign = 0;
Orientation = 0;
BitsPerSample = 0;
ExposureTime = 0;
FNumber = 0;
ISOSpeedRatings = 0;
ShutterSpeedValue = 0;
ExposureBiasValue = 0;
SubjectDistance = 0;
FocalLength = 0;
FocalLengthIn35mm = 0;
Flash = 0;
MeteringMode = 0;
ImageWidth = 0;
ImageHeight = 0;
// Geolocation
GeoLocation.Latitude = 0;
GeoLocation.Longitude = 0;
GeoLocation.Altitude = 0;
GeoLocation.AltitudeRef = 0;
GeoLocation.LatComponents.degrees = 0;
GeoLocation.LatComponents.minutes = 0;
GeoLocation.LatComponents.seconds = 0;
GeoLocation.LatComponents.direction = 0;
GeoLocation.LonComponents.degrees = 0;
GeoLocation.LonComponents.minutes = 0;
GeoLocation.LonComponents.seconds = 0;
GeoLocation.LonComponents.direction = 0;
}
#endif
| {
"content_hash": "c0d4818810e71f9fbe2770d71c1a34e3",
"timestamp": "",
"source": "github",
"line_count": 635,
"max_line_length": 173,
"avg_line_length": 34.8251968503937,
"alnum_prop": 0.5826625666998282,
"repo_name": "cmgladding/aslam-project",
"id": "123753f4115696418a80edf672d211eeb5996918",
"size": "24464",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "include/aslam_project/exif_corrected.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "188025"
},
{
"name": "CMake",
"bytes": "3147"
},
{
"name": "HTML",
"bytes": "11984"
},
{
"name": "Shell",
"bytes": "7467"
}
],
"symlink_target": ""
} |
const express = require('express');
const app = express();
const server = require('http').createServer(app);
const io = require('socket.io')(server);
const bodyParser = require('body-parser');
// configure app to use bodyParser()
// this will let us get the data from a POST
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
app.use(express.static(__dirname + '/public'));
var port = process.env.PORT || 80;
// MODELS
var Beacon = require('./app/models/beacon');
// ROUTES FOR OUR API
// =============================================================================
var router = express.Router(); // get an instance of the express Router
// middleware to use for all requests
router.use(function(req, res, next) {
// do logging
console.log('Something is happening.');
next(); // make sure we go to the next routes and don't stop here
});
// test route to make sure everything is working (accessed at GET http://localhost:8080/api)
router.get('/', function(req, res) {
res.json({ message: 'hooray! welcome to our api!' });
});
// more routes for our API will happen here
// on routes that end in /bears
// ----------------------------------------------------
router.route('/beacons')
// create a bear (accessed at POST http://localhost:80/api/beacons)
.post(function(req, res) {
var beacon = new Beacon(); // create a new instance of the Beacon model
beacon.id = req.body.id; // set the beacons id (comes from the request)
// save the beacon and check for errors
beacon.save(function(err) {
if (err)
res.send(err);
res.json({ message: 'Beacon created!' });
});
})
// get all the beacons (accessed at GET http://localhost:8080/api/beacons)
.get(function(req, res) {
Beacon.find(function(err, beacons) {
if (err)
res.send(err);
res.json(beacons);
});
});
// REGISTER OUR ROUTES -------------------------------
// all of our routes will be prefixed with /api
app.use('/api', router);
var mongoose = require('mongoose');
//mongoose.connect('mongodb://node:node@novus.modulusmongo.net:27017/Iganiq8o'); // connect to our database
// START THE SERVER
// =============================================================================
server.listen(port);
console.log("webserver started on port: "+port);
io.on('connection',function(socket){
console.log("client "+socket['id']+" connected");
socket.on('subscribe',function(roomName){
socket.join(roomName);
console.log("client "+socket['id']+" joined room "+roomName);
if(roomName=='webclients'){
}
});
socket.on('unsubscribe',function(roomName){
socket.leave(roomName);
console.log("client "+socket['id']+" left room "+roomName);
});
socket.on('register',function(deviceInfo){
});
socket.on('disconnect', function(){
console.log("client "+socket+" disconnected");
});
}); | {
"content_hash": "2cd6470a64bee11fc6506b20a6f546f8",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 107,
"avg_line_length": 25.913793103448278,
"alnum_prop": 0.58416500332668,
"repo_name": "strontium5/airogateway",
"id": "e636d78bdfbe1896e2ddd39caf62690a61c6c27a",
"size": "3006",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2637"
},
{
"name": "JavaScript",
"bytes": "3218"
}
],
"symlink_target": ""
} |
<?php
/**
* @file
* Contains \Drupal\Tests\Core\Routing\ContentTypeHeaderMatcherTest.
*/
namespace Drupal\Tests\Core\Routing;
use Drupal\Core\Routing\ContentTypeHeaderMatcher;
use Drupal\Tests\Core\Routing\RoutingFixtures;
use Drupal\Tests\UnitTestCase;
use Symfony\Component\HttpFoundation\Request;
/**
* Confirm that the content types partial matcher is functioning properly.
*
* @group Routing
*/
class ContentTypeHeaderMatcherTest extends UnitTestCase {
/**
* A collection of shared fixture data for tests.
*
* @var RoutingFixtures
*/
protected $fixtures;
/**
* The matcher object that is going to be tested.
*
* @var \Drupal\Core\Routing\ContentTypeHeaderMatcher
*/
protected $matcher;
/**
* {@inheritdoc}
*/
protected function setUp() {
parent::setUp();
$this->fixtures = new RoutingFixtures();
$this->matcher = new ContentTypeHeaderMatcher();
}
/**
* Tests that routes are not filtered on GET requests.
*/
public function testGetRequestFilter() {
$collection = $this->fixtures->sampleRouteCollection();
$collection->addCollection($this->fixtures->contentRouteCollection());
$request = Request::create('path/two', 'GET');
$routes = $this->matcher->filter($collection, $request);
$this->assertEquals(count($routes), 7, 'The correct number of routes was found.');
}
/**
* Tests that XML-restricted routes get filtered out on JSON requests.
*/
public function testJsonRequest() {
$collection = $this->fixtures->sampleRouteCollection();
$collection->addCollection($this->fixtures->contentRouteCollection());
$request = Request::create('path/two', 'POST');
$request->headers->set('Content-type', 'application/json');
$routes = $this->matcher->filter($collection, $request);
$this->assertEquals(count($routes), 6, 'The correct number of routes was found.');
$this->assertNotNull($routes->get('route_f'), 'The json route was found.');
$this->assertNull($routes->get('route_g'), 'The xml route was not found.');
foreach ($routes as $name => $route) {
$this->assertEquals($name, 'route_f', 'The json route is the first one in the collection.');
break;
}
}
/**
* Tests route filtering on POST form submission requests.
*/
public function testPostForm() {
$collection = $this->fixtures->sampleRouteCollection();
$collection->addCollection($this->fixtures->contentRouteCollection());
// Test that all XML and JSON restricted routes get filtered out on a POST
// form submission.
$request = Request::create('path/two', 'POST');
$request->headers->set('Content-type', 'application/www-form-urlencoded');
$routes = $this->matcher->filter($collection, $request);
$this->assertEquals(count($routes), 5, 'The correct number of routes was found.');
$this->assertNull($routes->get('route_f'), 'The json route was found.');
$this->assertNull($routes->get('route_g'), 'The xml route was not found.');
}
/**
* Confirms that the matcher throws an exception for no-route.
*
* @expectedException \Symfony\Component\HttpKernel\Exception\UnsupportedMediaTypeHttpException
* @expectedExceptionMessage No route found that matches the Content-Type header.
*/
public function testNoRouteFound() {
$matcher = new ContentTypeHeaderMatcher();
$routes = $this->fixtures->contentRouteCollection();
$request = Request::create('path/two', 'POST');
$request->headers->set('Content-type', 'application/hal+json');
$matcher->filter($routes, $request);
$this->fail('No exception was thrown.');
}
}
| {
"content_hash": "cb652a9891b3746d940fa776a1b5dc2a",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 98,
"avg_line_length": 33.03636363636364,
"alnum_prop": 0.6832691249312053,
"repo_name": "komejo/d8demo-dev",
"id": "02775850fda337ac58a781a484a419287c0f1c1a",
"size": "3634",
"binary": false,
"copies": "47",
"ref": "refs/heads/master",
"path": "web/core/tests/Drupal/Tests/Core/Routing/ContentTypeHeaderMatcherTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "9118"
},
{
"name": "C++",
"bytes": "13044"
},
{
"name": "CSS",
"bytes": "291673"
},
{
"name": "HTML",
"bytes": "322832"
},
{
"name": "JavaScript",
"bytes": "838628"
},
{
"name": "PHP",
"bytes": "27260329"
},
{
"name": "Shell",
"bytes": "45206"
}
],
"symlink_target": ""
} |
import { CUSTOM_ELEMENTS_SCHEMA, NgModule } from '@angular/core';
import { RouterModule } from '@angular/router';
import { ManagementPortalSharedModule } from '../shared';
import {
adminState,
AuditsComponent,
AuditsService,
JhiConfigurationComponent,
JhiConfigurationService,
JhiDocsComponent,
JhiHealthCheckComponent,
JhiHealthModalComponent,
JhiHealthService,
JhiMetricsMonitoringComponent,
JhiMetricsMonitoringModalComponent,
JhiMetricsService,
LogsComponent,
LogsService,
UserDeleteDialogComponent,
UserDialogComponent,
UserMgmtComponent,
UserMgmtDeleteDialogComponent,
UserMgmtDetailComponent,
UserMgmtDialogComponent,
UserModalService,
UserResolve,
UserResolvePagingParams,
UserSendActivationLinkComponent,
UserSendActivationLinkDialogComponent,
} from './';
import { RoleComponent } from './user-management/role.component';
@NgModule({
imports: [
ManagementPortalSharedModule,
RouterModule.forRoot(adminState, {useHash: true}),
],
declarations: [
AuditsComponent,
UserMgmtComponent,
UserDialogComponent,
UserDeleteDialogComponent,
UserMgmtDetailComponent,
UserMgmtDialogComponent,
UserMgmtDeleteDialogComponent,
LogsComponent,
JhiConfigurationComponent,
JhiHealthCheckComponent,
JhiHealthModalComponent,
JhiDocsComponent,
JhiMetricsMonitoringComponent,
JhiMetricsMonitoringModalComponent,
RoleComponent,
UserSendActivationLinkComponent,
UserSendActivationLinkDialogComponent,
],
entryComponents: [
UserMgmtDialogComponent,
UserMgmtDeleteDialogComponent,
UserSendActivationLinkDialogComponent,
JhiHealthModalComponent,
JhiMetricsMonitoringModalComponent,
],
providers: [
AuditsService,
JhiConfigurationService,
JhiHealthService,
JhiMetricsService,
LogsService,
UserResolvePagingParams,
UserResolve,
UserModalService,
],
schemas: [CUSTOM_ELEMENTS_SCHEMA],
})
export class ManagementPortalAdminModule {
}
| {
"content_hash": "66c361bc929dc1c4d514c383808bcec2",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 65,
"avg_line_length": 27.911392405063292,
"alnum_prop": 0.7097505668934241,
"repo_name": "RADAR-CNS/ManagementPortal",
"id": "d31fbf9643fbfd8e6913f2402f053667af3863d4",
"size": "2205",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/webapp/app/admin/admin.module.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "15325"
},
{
"name": "Gherkin",
"bytes": "179"
},
{
"name": "HTML",
"bytes": "230378"
},
{
"name": "Java",
"bytes": "722213"
},
{
"name": "JavaScript",
"bytes": "15606"
},
{
"name": "Scala",
"bytes": "19464"
},
{
"name": "Shell",
"bytes": "2830"
},
{
"name": "TypeScript",
"bytes": "399308"
}
],
"symlink_target": ""
} |
@interface DZYQuickLoginButton : UIButton
@end
| {
"content_hash": "b2e8b3854646543402e31b6e29b0ccd9",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 41,
"avg_line_length": 12.25,
"alnum_prop": 0.7959183673469388,
"repo_name": "dongzhenyu/BS",
"id": "250d7ce00d2dabe3dd9c8df7ab9cc8e0fa339e05",
"size": "205",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "BS/BS/Classes/FriendTrends-关注/View/DZYQuickLoginButton.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Objective-C",
"bytes": "175874"
}
],
"symlink_target": ""
} |
package javax.mail.internet;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.StringTokenizer;
import javax.activation.DataHandler;
import javax.activation.DataSource;
import javax.mail.EncodingAware;
import javax.mail.MessagingException;
import org.apache.geronimo.mail.util.ASCIIUtil;
import org.apache.geronimo.mail.util.Base64;
import org.apache.geronimo.mail.util.Base64DecoderStream;
import org.apache.geronimo.mail.util.Base64Encoder;
import org.apache.geronimo.mail.util.Base64EncoderStream;
import org.apache.geronimo.mail.util.QuotedPrintableDecoderStream;
import org.apache.geronimo.mail.util.QuotedPrintableEncoder;
import org.apache.geronimo.mail.util.QuotedPrintableEncoderStream;
import org.apache.geronimo.mail.util.SessionUtil;
import org.apache.geronimo.mail.util.UUDecoderStream;
import org.apache.geronimo.mail.util.UUEncoderStream;
// encodings include "base64", "quoted-printable", "7bit", "8bit" and "binary".
// In addition, "uuencode" is also supported. The
/**
* @version $Rev$ $Date$
*/
public class MimeUtility {
private static final String MIME_FOLDENCODEDWORDS = "mail.mime.foldencodedwords";
private static final String MIME_DECODE_TEXT_STRICT = "mail.mime.decodetext.strict";
private static final String MIME_FOLDTEXT = "mail.mime.foldtext";
private static final int FOLD_THRESHOLD = 76;
private MimeUtility() {
}
public static final int ALL = -1;
private static String escapedChars = "\"\\\r\n";
private static String linearWhiteSpace = " \t\r\n";
private static String QP_WORD_SPECIALS = "=_?\"#$%&'(),.:;<>@[\\]^`{|}~";
private static String QP_TEXT_SPECIALS = "=_?";
// the javamail spec includes the ability to map java encoding names to MIME-specified names. Normally,
// these values are loaded from a character mapping file.
private static Map java2mime;
private static Map mime2java;
static {
// we need to load the mapping tables used by javaCharset() and mimeCharset().
loadCharacterSetMappings();
}
public static InputStream decode(final InputStream in, String encoding) throws MessagingException {
encoding = encoding.toLowerCase();
// some encodies are just pass-throughs, with no real decoding.
if (encoding.equals("binary") || encoding.equals("7bit") || encoding.equals("8bit")) {
return in;
}
else if (encoding.equals("base64")) {
return new Base64DecoderStream(in);
}
// UUEncode is known by a couple historical extension names too.
else if (encoding.equals("uuencode") || encoding.equals("x-uuencode") || encoding.equals("x-uue")) {
return new UUDecoderStream(in);
}
else if (encoding.equals("quoted-printable")) {
return new QuotedPrintableDecoderStream(in);
}
else {
throw new MessagingException("Unknown encoding " + encoding);
}
}
/**
* Decode a string of text obtained from a mail header into
* it's proper form. The text generally will consist of a
* string of tokens, some of which may be encoded using
* base64 encoding.
*
* @param text The text to decode.
*
* @return The decoded test string.
* @exception UnsupportedEncodingException
*/
public static String decodeText(final String text) throws UnsupportedEncodingException {
// if the text contains any encoded tokens, those tokens will be marked with "=?". If the
// source string doesn't contain that sequent, no decoding is required.
if (text.indexOf("=?") < 0) {
return text;
}
// we have two sets of rules we can apply.
if (!SessionUtil.getBooleanProperty(MIME_DECODE_TEXT_STRICT, true)) {
return decodeTextNonStrict(text);
}
int offset = 0;
final int endOffset = text.length();
int startWhiteSpace = -1;
int endWhiteSpace = -1;
final StringBuffer decodedText = new StringBuffer(text.length());
boolean previousTokenEncoded = false;
while (offset < endOffset) {
char ch = text.charAt(offset);
// is this a whitespace character?
if (linearWhiteSpace.indexOf(ch) != -1) {
startWhiteSpace = offset;
while (offset < endOffset) {
// step over the white space characters.
ch = text.charAt(offset);
if (linearWhiteSpace.indexOf(ch) != -1) {
offset++;
}
else {
// record the location of the first non lwsp and drop down to process the
// token characters.
endWhiteSpace = offset;
break;
}
}
}
else {
// we have a word token. We need to scan over the word and then try to parse it.
final int wordStart = offset;
while (offset < endOffset) {
// step over the white space characters.
ch = text.charAt(offset);
if (linearWhiteSpace.indexOf(ch) == -1) {
offset++;
}
else {
break;
}
//NB: Trailing whitespace on these header strings will just be discarded.
}
// pull out the word token.
final String word = text.substring(wordStart, offset);
// is the token encoded? decode the word
if (word.startsWith("=?")) {
try {
// if this gives a parsing failure, treat it like a non-encoded word.
final String decodedWord = decodeWord(word);
// are any whitespace characters significant? Append 'em if we've got 'em.
if (!previousTokenEncoded) {
if (startWhiteSpace != -1) {
decodedText.append(text.substring(startWhiteSpace, endWhiteSpace));
startWhiteSpace = -1;
}
}
// this is definitely a decoded token.
previousTokenEncoded = true;
// and add this to the text.
decodedText.append(decodedWord);
// we continue parsing from here...we allow parsing errors to fall through
// and get handled as normal text.
continue;
} catch (final ParseException e) {
}
}
// this is a normal token, so it doesn't matter what the previous token was. Add the white space
// if we have it.
if (startWhiteSpace != -1) {
decodedText.append(text.substring(startWhiteSpace, endWhiteSpace));
startWhiteSpace = -1;
}
// this is not a decoded token.
previousTokenEncoded = false;
decodedText.append(word);
}
}
return decodedText.toString();
}
/**
* Decode a string of text obtained from a mail header into
* it's proper form. The text generally will consist of a
* string of tokens, some of which may be encoded using
* base64 encoding. This is for non-strict decoded for mailers that
* violate the RFC 2047 restriction that decoded tokens must be delimited
* by linear white space. This will scan tokens looking for inner tokens
* enclosed in "=?" -- "?=" pairs.
*
* @param text The text to decode.
*
* @return The decoded test string.
* @exception UnsupportedEncodingException
*/
private static String decodeTextNonStrict(final String text) throws UnsupportedEncodingException {
int offset = 0;
final int endOffset = text.length();
int startWhiteSpace = -1;
int endWhiteSpace = -1;
final StringBuffer decodedText = new StringBuffer(text.length());
boolean previousTokenEncoded = false;
while (offset < endOffset) {
char ch = text.charAt(offset);
// is this a whitespace character?
if (linearWhiteSpace.indexOf(ch) != -1) {
startWhiteSpace = offset;
while (offset < endOffset) {
// step over the white space characters.
ch = text.charAt(offset);
if (linearWhiteSpace.indexOf(ch) != -1) {
offset++;
}
else {
// record the location of the first non lwsp and drop down to process the
// token characters.
endWhiteSpace = offset;
break;
}
}
}
else {
// we're at the start of a word token. We potentially need to break this up into subtokens
final int wordStart = offset;
while (offset < endOffset) {
// step over the white space characters.
ch = text.charAt(offset);
if (linearWhiteSpace.indexOf(ch) == -1) {
offset++;
}
else {
break;
}
//NB: Trailing whitespace on these header strings will just be discarded.
}
// pull out the word token.
final String word = text.substring(wordStart, offset);
int decodeStart = 0;
// now scan and process each of the bits within here.
while (decodeStart < word.length()) {
final int tokenStart = word.indexOf("=?", decodeStart);
if (tokenStart == -1) {
// this is a normal token, so it doesn't matter what the previous token was. Add the white space
// if we have it.
if (startWhiteSpace != -1) {
decodedText.append(text.substring(startWhiteSpace, endWhiteSpace));
startWhiteSpace = -1;
}
// this is not a decoded token.
previousTokenEncoded = false;
decodedText.append(word.substring(decodeStart));
// we're finished.
break;
}
// we have something to process
else {
// we might have a normal token preceeding this.
if (tokenStart != decodeStart) {
// this is a normal token, so it doesn't matter what the previous token was. Add the white space
// if we have it.
if (startWhiteSpace != -1) {
decodedText.append(text.substring(startWhiteSpace, endWhiteSpace));
startWhiteSpace = -1;
}
// this is not a decoded token.
previousTokenEncoded = false;
decodedText.append(word.substring(decodeStart, tokenStart));
}
// now find the end marker.
final int tokenEnd = word.indexOf("?=", tokenStart);
// sigh, an invalid token. Treat this as plain text.
if (tokenEnd == -1) {
// this is a normal token, so it doesn't matter what the previous token was. Add the white space
// if we have it.
if (startWhiteSpace != -1) {
decodedText.append(text.substring(startWhiteSpace, endWhiteSpace));
startWhiteSpace = -1;
}
// this is not a decoded token.
previousTokenEncoded = false;
decodedText.append(word.substring(tokenStart));
// we're finished.
break;
}
else {
// update our ticker
decodeStart = tokenEnd + 2;
final String token = word.substring(tokenStart, tokenEnd);
try {
// if this gives a parsing failure, treat it like a non-encoded word.
final String decodedWord = decodeWord(token);
// are any whitespace characters significant? Append 'em if we've got 'em.
if (!previousTokenEncoded) {
if (startWhiteSpace != -1) {
decodedText.append(text.substring(startWhiteSpace, endWhiteSpace));
startWhiteSpace = -1;
}
}
// this is definitely a decoded token.
previousTokenEncoded = true;
// and add this to the text.
decodedText.append(decodedWord);
// we continue parsing from here...we allow parsing errors to fall through
// and get handled as normal text.
continue;
} catch (final ParseException e) {
}
// this is a normal token, so it doesn't matter what the previous token was. Add the white space
// if we have it.
if (startWhiteSpace != -1) {
decodedText.append(text.substring(startWhiteSpace, endWhiteSpace));
startWhiteSpace = -1;
}
// this is not a decoded token.
previousTokenEncoded = false;
decodedText.append(token);
}
}
}
}
}
return decodedText.toString();
}
/**
* Parse a string using the RFC 2047 rules for an "encoded-word"
* type. This encoding has the syntax:
*
* encoded-word = "=?" charset "?" encoding "?" encoded-text "?="
*
* @param word The possibly encoded word value.
*
* @return The decoded word.
* @exception ParseException
* @exception UnsupportedEncodingException
*/
public static String decodeWord(final String word) throws ParseException, UnsupportedEncodingException {
// encoded words start with the characters "=?". If this not an encoded word, we throw a
// ParseException for the caller.
if (!word.startsWith("=?")) {
throw new ParseException("Invalid RFC 2047 encoded-word: " + word);
}
final int charsetPos = word.indexOf('?', 2);
if (charsetPos == -1) {
throw new ParseException("Missing charset in RFC 2047 encoded-word: " + word);
}
// pull out the character set information (this is the MIME name at this point).
final String charset = word.substring(2, charsetPos).toLowerCase();
// now pull out the encoding token the same way.
final int encodingPos = word.indexOf('?', charsetPos + 1);
if (encodingPos == -1) {
throw new ParseException("Missing encoding in RFC 2047 encoded-word: " + word);
}
final String encoding = word.substring(charsetPos + 1, encodingPos);
// and finally the encoded text.
final int encodedTextPos = word.indexOf("?=", encodingPos + 1);
if (encodedTextPos == -1) {
throw new ParseException("Missing encoded text in RFC 2047 encoded-word: " + word);
}
final String encodedText = word.substring(encodingPos + 1, encodedTextPos);
// seems a bit silly to encode a null string, but easy to deal with.
if (encodedText.length() == 0) {
return "";
}
try {
// the decoder writes directly to an output stream.
final ByteArrayOutputStream out = new ByteArrayOutputStream(encodedText.length());
final byte[] encodedData = encodedText.getBytes("US-ASCII");
// Base64 encoded?
if (encoding.equals("B")) {
Base64.decode(encodedData, out);
}
// maybe quoted printable.
else if (encoding.equals("Q")) {
final QuotedPrintableEncoder dataEncoder = new QuotedPrintableEncoder();
dataEncoder.decodeWord(encodedData, out);
}
else {
throw new UnsupportedEncodingException("Unknown RFC 2047 encoding: " + encoding);
}
// get the decoded byte data and convert into a string.
final byte[] decodedData = out.toByteArray();
return new String(decodedData, javaCharset(charset));
} catch (final IOException e) {
throw new UnsupportedEncodingException("Invalid RFC 2047 encoding");
}
}
/**
* Wrap an encoder around a given output stream.
*
* @param out The output stream to wrap.
* @param encoding The name of the encoding.
*
* @return A instance of FilterOutputStream that manages on the fly
* encoding for the requested encoding type.
* @exception MessagingException
*/
public static OutputStream encode(final OutputStream out, String encoding) throws MessagingException {
// no encoding specified, so assume it goes out unchanged.
if (encoding == null) {
return out;
}
encoding = encoding.toLowerCase();
// some encodies are just pass-throughs, with no real decoding.
if (encoding.equals("binary") || encoding.equals("7bit") || encoding.equals("8bit")) {
return out;
}
else if (encoding.equals("base64")) {
return new Base64EncoderStream(out);
}
// UUEncode is known by a couple historical extension names too.
else if (encoding.equals("uuencode") || encoding.equals("x-uuencode") || encoding.equals("x-uue")) {
return new UUEncoderStream(out);
}
else if (encoding.equals("quoted-printable")) {
return new QuotedPrintableEncoderStream(out);
}
else {
throw new MessagingException("Unknown encoding " + encoding);
}
}
/**
* Wrap an encoder around a given output stream.
*
* @param out The output stream to wrap.
* @param encoding The name of the encoding.
* @param filename The filename of the data being sent (only used for UUEncode).
*
* @return A instance of FilterOutputStream that manages on the fly
* encoding for the requested encoding type.
* @exception MessagingException
*/
public static OutputStream encode(final OutputStream out, String encoding, final String filename) throws MessagingException {
encoding = encoding.toLowerCase();
// some encodies are just pass-throughs, with no real decoding.
if (encoding.equals("binary") || encoding.equals("7bit") || encoding.equals("8bit")) {
return out;
}
else if (encoding.equals("base64")) {
return new Base64EncoderStream(out);
}
// UUEncode is known by a couple historical extension names too.
else if (encoding.equals("uuencode") || encoding.equals("x-uuencode") || encoding.equals("x-uue")) {
return new UUEncoderStream(out, filename);
}
else if (encoding.equals("quoted-printable")) {
return new QuotedPrintableEncoderStream(out);
}
else {
throw new MessagingException("Unknown encoding " + encoding);
}
}
public static String encodeText(final String word) throws UnsupportedEncodingException {
return encodeText(word, null, null);
}
public static String encodeText(final String word, final String charset, final String encoding) throws UnsupportedEncodingException {
return encodeWord(word, charset, encoding, false);
}
public static String encodeWord(final String word) throws UnsupportedEncodingException {
return encodeWord(word, null, null);
}
public static String encodeWord(final String word, final String charset, final String encoding) throws UnsupportedEncodingException {
return encodeWord(word, charset, encoding, true);
}
private static String encodeWord(final String word, String charset, final String encoding, final boolean encodingWord) throws UnsupportedEncodingException {
// figure out what we need to encode this.
String encoder = ASCIIUtil.getTextTransferEncoding(word);
// all ascii? We can return this directly,
if (encoder.equals("7bit")) {
return word;
}
// if not given a charset, use the default.
if (charset == null) {
charset = getDefaultMIMECharset();
}
// sort out the encoder. If not explicitly given, use the best guess we've already established.
if (encoding != null) {
if (encoding.equalsIgnoreCase("B")) {
encoder = "base64";
}
else if (encoding.equalsIgnoreCase("Q")) {
encoder = "quoted-printable";
}
else {
throw new UnsupportedEncodingException("Unknown transfer encoding: " + encoding);
}
}
try {
// we'll format this directly into the string buffer
final StringBuffer result = new StringBuffer();
// this is the maximum size of a segment of encoded data, which is based off
// of a 75 character size limit and all of the encoding overhead elements.
final int sizeLimit = 75 - 7 - charset.length();
// now do the appropriate encoding work
if (encoder.equals("base64")) {
final Base64Encoder dataEncoder = new Base64Encoder();
// this may recurse on the encoding if the string is too long. The left-most will not
// get a segment delimiter
encodeBase64(word, result, sizeLimit, charset, dataEncoder, true, SessionUtil.getBooleanProperty(MIME_FOLDENCODEDWORDS, false));
}
else {
final QuotedPrintableEncoder dataEncoder = new QuotedPrintableEncoder();
encodeQuotedPrintable(word, result, sizeLimit, charset, dataEncoder, true,
SessionUtil.getBooleanProperty(MIME_FOLDENCODEDWORDS, false), encodingWord ? QP_WORD_SPECIALS : QP_TEXT_SPECIALS);
}
return result.toString();
} catch (final IOException e) {
throw new UnsupportedEncodingException("Invalid encoding");
}
}
/**
* Encode a string into base64 encoding, taking into
* account the maximum segment length.
*
* @param data The string data to encode.
* @param out The output buffer used for the result.
* @param sizeLimit The maximum amount of encoded data we're allowed
* to have in a single encoded segment.
* @param charset The character set marker that needs to be added to the
* encoding header.
* @param encoder The encoder instance we're using.
* @param firstSegment
* If true, this is the first (left-most) segment in the
* data. Used to determine if segment delimiters need to
* be added between sections.
* @param foldSegments
* Indicates the type of delimiter to use (blank or newline sequence).
*/
static private void encodeBase64(final String data, final StringBuffer out, final int sizeLimit, final String charset, final Base64Encoder encoder, final boolean firstSegment, final boolean foldSegments) throws IOException
{
// this needs to be converted into the appropriate transfer encoding.
final byte [] bytes = data.getBytes(javaCharset(charset));
final int estimatedSize = encoder.estimateEncodedLength(bytes);
// if the estimated encoding size is over our segment limit, split the string in half and
// recurse. Eventually we'll reach a point where things are small enough.
if (estimatedSize > sizeLimit) {
// the first segment indicator travels with the left half.
encodeBase64(data.substring(0, data.length() / 2), out, sizeLimit, charset, encoder, firstSegment, foldSegments);
// the second half can never be the first segment
encodeBase64(data.substring(data.length() / 2), out, sizeLimit, charset, encoder, false, foldSegments);
}
else
{
// if this is not the first sement of the encoding, we need to add either a blank or
// a newline sequence to the data
if (!firstSegment) {
if (foldSegments) {
out.append("\r\n");
}
else {
out.append(' ');
}
}
// do the encoding of the segment.
encoder.encodeWord(bytes, out, charset);
}
}
/**
* Encode a string into quoted printable encoding, taking into
* account the maximum segment length.
*
* @param data The string data to encode.
* @param out The output buffer used for the result.
* @param sizeLimit The maximum amount of encoded data we're allowed
* to have in a single encoded segment.
* @param charset The character set marker that needs to be added to the
* encoding header.
* @param encoder The encoder instance we're using.
* @param firstSegment
* If true, this is the first (left-most) segment in the
* data. Used to determine if segment delimiters need to
* be added between sections.
* @param foldSegments
* Indicates the type of delimiter to use (blank or newline sequence).
*/
static private void encodeQuotedPrintable(final String data, final StringBuffer out, final int sizeLimit, final String charset, final QuotedPrintableEncoder encoder,
final boolean firstSegment, final boolean foldSegments, final String specials) throws IOException
{
// this needs to be converted into the appropriate transfer encoding.
final byte [] bytes = data.getBytes(javaCharset(charset));
final int estimatedSize = encoder.estimateEncodedLength(bytes, specials);
// if the estimated encoding size is over our segment limit, split the string in half and
// recurse. Eventually we'll reach a point where things are small enough.
if (estimatedSize > sizeLimit) {
// the first segment indicator travels with the left half.
encodeQuotedPrintable(data.substring(0, data.length() / 2), out, sizeLimit, charset, encoder, firstSegment, foldSegments, specials);
// the second half can never be the first segment
encodeQuotedPrintable(data.substring(data.length() / 2), out, sizeLimit, charset, encoder, false, foldSegments, specials);
}
else
{
// if this is not the first sement of the encoding, we need to add either a blank or
// a newline sequence to the data
if (!firstSegment) {
if (foldSegments) {
out.append("\r\n");
}
else {
out.append(' ');
}
}
// do the encoding of the segment.
encoder.encodeWord(bytes, out, charset, specials);
}
}
/**
* Examine the content of a data source and decide what type
* of transfer encoding should be used. For text streams,
* we'll decided between 7bit, quoted-printable, and base64.
* For binary content types, we'll use either 7bit or base64.
*
* @param handler The DataHandler associated with the content.
*
* @return The string name of an encoding used to transfer the content.
*/
public static String getEncoding(final DataHandler handler) {
// if this handler has an associated data source, we can read directly from the
// data source to make this judgment. This is generally MUCH faster than asking the
// DataHandler to write out the data for us.
final DataSource ds = handler.getDataSource();
if (ds != null) {
return getEncoding(ds);
}
try {
// get a parser that allows us to make comparisons.
final ContentType content = new ContentType(handler.getContentType());
// The only access to the content bytes at this point is by asking the handler to write
// the information out to a stream. We're going to pipe this through a special stream
// that examines the bytes as they go by.
final ContentCheckingOutputStream checker = new ContentCheckingOutputStream();
handler.writeTo(checker);
// figure this out based on whether we believe this to be a text type or not.
if (content.match("text/*")) {
return checker.getTextTransferEncoding();
}
else {
return checker.getBinaryTransferEncoding();
}
} catch (final Exception e) {
// any unexpected I/O exceptions we'll force to a "safe" fallback position.
return "base64";
}
}
/**
* Determine the what transfer encoding should be used for
* data retrieved from a DataSource.
*
* @param source The DataSource for the transmitted data.
*
* @return The string name of the encoding form that should be used for
* the data.
*/
public static String getEncoding(final DataSource source) {
if(source instanceof EncodingAware) {
final String encoding = ((EncodingAware) source).getEncoding();
if(encoding != null) {
return encoding;
}
}
InputStream in = null;
try {
// get a parser that allows us to make comparisons.
final ContentType content = new ContentType(source.getContentType());
// we're probably going to have to scan the data.
in = source.getInputStream();
if (!content.match("text/*")) {
// Not purporting to be a text type? Examine the content to see we might be able to
// at least pretend it is an ascii type.
return ASCIIUtil.getBinaryTransferEncoding(in);
}
else {
return ASCIIUtil.getTextTransferEncoding(in);
}
} catch (final Exception e) {
// this was a problem...not sure what makes sense here, so we'll assume it's binary
// and we need to transfer this using Base64 encoding.
return "base64";
} finally {
// make sure we close the stream
try {
if (in != null) {
in.close();
}
} catch (final IOException e) {
}
}
}
/**
* Quote a "word" value. If the word contains any character from
* the specified "specials" list, this value is returned as a
* quoted strong. Otherwise, it is returned unchanged (an "atom").
*
* @param word The word requiring quoting.
* @param specials The set of special characters that can't appear in an unquoted
* string.
*
* @return The quoted value. This will be unchanged if the word doesn't contain
* any of the designated special characters.
*/
public static String quote(final String word, final String specials) {
final int wordLength = word.length();
// scan the string looking for problem characters
for (int i =0; i < wordLength; i++) {
final char ch = word.charAt(i);
// special escaped characters require escaping, which also implies quoting.
if (escapedChars.indexOf(ch) >= 0) {
return quoteAndEscapeString(word);
}
// now check for control characters or the designated special characters.
if (ch < 32 || ch >= 127 || specials.indexOf(ch) >= 0) {
// we know this requires quoting, but we still need to scan the entire string to
// see if contains chars that require escaping. Just go ahead and treat it as if it does.
return quoteAndEscapeString(word);
}
}
return word;
}
/**
* Take a string and return it as a formatted quoted string, with
* all characters requiring escaping handled properly.
*
* @param word The string to quote.
*
* @return The quoted string.
*/
private static String quoteAndEscapeString(final String word) {
final int wordLength = word.length();
// allocate at least enough for the string and two quotes plus a reasonable number of escaped chars.
final StringBuffer buffer = new StringBuffer(wordLength + 10);
// add the leading quote.
buffer.append('"');
for (int i = 0; i < wordLength; i++) {
final char ch = word.charAt(i);
// is this an escaped char?
if (escapedChars.indexOf(ch) >= 0) {
// add the escape marker before appending.
buffer.append('\\');
}
buffer.append(ch);
}
// now the closing quote
buffer.append('"');
return buffer.toString();
}
/**
* Translate a MIME standard character set name into the Java
* equivalent.
*
* @param charset The MIME standard name.
*
* @return The Java equivalent for this name.
*/
public static String javaCharset(final String charset) {
// nothing in, nothing out.
if (charset == null) {
return null;
}
final String mappedCharset = (String)mime2java.get(charset.toLowerCase());
// if there is no mapping, then the original name is used. Many of the MIME character set
// names map directly back into Java. The reverse isn't necessarily true.
return mappedCharset == null ? charset : mappedCharset;
}
/**
* Map a Java character set name into the MIME equivalent.
*
* @param charset The java character set name.
*
* @return The MIME standard equivalent for this character set name.
*/
public static String mimeCharset(final String charset) {
// nothing in, nothing out.
if (charset == null) {
return null;
}
final String mappedCharset = (String)java2mime.get(charset.toLowerCase());
// if there is no mapping, then the original name is used. Many of the MIME character set
// names map directly back into Java. The reverse isn't necessarily true.
return mappedCharset == null ? charset : mappedCharset;
}
/**
* Get the default character set to use, in Java name format.
* This either be the value set with the mail.mime.charset
* system property or obtained from the file.encoding system
* property. If neither of these is set, we fall back to
* 8859_1 (basically US-ASCII).
*
* @return The character string value of the default character set.
*/
public static String getDefaultJavaCharset() {
final String charset = SessionUtil.getProperty("mail.mime.charset");
if (charset != null) {
return javaCharset(charset);
}
return SessionUtil.getProperty("file.encoding", "8859_1");
}
/**
* Get the default character set to use, in MIME name format.
* This either be the value set with the mail.mime.charset
* system property or obtained from the file.encoding system
* property. If neither of these is set, we fall back to
* 8859_1 (basically US-ASCII).
*
* @return The character string value of the default character set.
*/
static String getDefaultMIMECharset() {
// if the property is specified, this can be used directly.
final String charset = SessionUtil.getProperty("mail.mime.charset");
if (charset != null) {
return charset;
}
// get the Java-defined default and map back to a MIME name.
return mimeCharset(SessionUtil.getProperty("file.encoding", "8859_1"));
}
/**
* Load the default mapping tables used by the javaCharset()
* and mimeCharset() methods. By default, these tables are
* loaded from the /META-INF/javamail.charset.map file. If
* something goes wrong loading that file, we configure things
* with a default mapping table (which just happens to mimic
* what's in the default mapping file).
*/
static private void loadCharacterSetMappings() {
java2mime = new HashMap();
mime2java = new HashMap();
// normally, these come from a character map file contained in the jar file.
try {
final InputStream map = javax.mail.internet.MimeUtility.class.getResourceAsStream("/META-INF/javamail.charset.map");
if (map != null) {
// get a reader for this so we can load.
final BufferedReader reader = new BufferedReader(new InputStreamReader(map));
readMappings(reader, java2mime);
readMappings(reader, mime2java);
}
} catch (final Exception e) {
}
// if any sort of error occurred reading the preferred file version, we could end up with empty
// mapping tables. This could cause all sorts of difficulty, so ensure they are populated with at
// least a reasonable set of defaults.
// these mappings echo what's in the default file.
if (java2mime.isEmpty()) {
java2mime.put("8859_1", "ISO-8859-1");
java2mime.put("iso8859_1", "ISO-8859-1");
java2mime.put("iso8859-1", "ISO-8859-1");
java2mime.put("8859_2", "ISO-8859-2");
java2mime.put("iso8859_2", "ISO-8859-2");
java2mime.put("iso8859-2", "ISO-8859-2");
java2mime.put("8859_3", "ISO-8859-3");
java2mime.put("iso8859_3", "ISO-8859-3");
java2mime.put("iso8859-3", "ISO-8859-3");
java2mime.put("8859_4", "ISO-8859-4");
java2mime.put("iso8859_4", "ISO-8859-4");
java2mime.put("iso8859-4", "ISO-8859-4");
java2mime.put("8859_5", "ISO-8859-5");
java2mime.put("iso8859_5", "ISO-8859-5");
java2mime.put("iso8859-5", "ISO-8859-5");
java2mime.put ("8859_6", "ISO-8859-6");
java2mime.put("iso8859_6", "ISO-8859-6");
java2mime.put("iso8859-6", "ISO-8859-6");
java2mime.put("8859_7", "ISO-8859-7");
java2mime.put("iso8859_7", "ISO-8859-7");
java2mime.put("iso8859-7", "ISO-8859-7");
java2mime.put("8859_8", "ISO-8859-8");
java2mime.put("iso8859_8", "ISO-8859-8");
java2mime.put("iso8859-8", "ISO-8859-8");
java2mime.put("8859_9", "ISO-8859-9");
java2mime.put("iso8859_9", "ISO-8859-9");
java2mime.put("iso8859-9", "ISO-8859-9");
java2mime.put("sjis", "Shift_JIS");
java2mime.put ("jis", "ISO-2022-JP");
java2mime.put("iso2022jp", "ISO-2022-JP");
java2mime.put("euc_jp", "euc-jp");
java2mime.put("koi8_r", "koi8-r");
java2mime.put("euc_cn", "euc-cn");
java2mime.put("euc_tw", "euc-tw");
java2mime.put("euc_kr", "euc-kr");
}
if (mime2java.isEmpty ()) {
mime2java.put("iso-2022-cn", "ISO2022CN");
mime2java.put("iso-2022-kr", "ISO2022KR");
mime2java.put("utf-8", "UTF8");
mime2java.put("utf8", "UTF8");
mime2java.put("ja_jp.iso2022-7", "ISO2022JP");
mime2java.put("ja_jp.eucjp", "EUCJIS");
mime2java.put ("euc-kr", "KSC5601");
mime2java.put("euckr", "KSC5601");
mime2java.put("us-ascii", "ISO-8859-1");
mime2java.put("x-us-ascii", "ISO-8859-1");
}
}
/**
* Read a section of a character map table and populate the
* target mapping table with the information. The table end
* is marked by a line starting with "--" and also ending with
* "--". Blank lines and comment lines (beginning with '#') are
* ignored.
*
* @param reader The source of the file information.
* @param table The mapping table used to store the information.
*/
static private void readMappings(final BufferedReader reader, final Map table) throws IOException {
// process lines to the EOF or the end of table marker.
while (true) {
String line = reader.readLine();
// no line returned is an EOF
if (line == null) {
return;
}
// trim so we're not messed up by trailing blanks
line = line.trim();
if (line.length() == 0 || line.startsWith("#")) {
continue;
}
// stop processing if this is the end-of-table marker.
if (line.startsWith("--") && line.endsWith("--")) {
return;
}
// we allow either blanks or tabs as token delimiters.
final StringTokenizer tokenizer = new StringTokenizer(line, " \t");
try {
final String from = tokenizer.nextToken().toLowerCase();
final String to = tokenizer.nextToken();
table.put(from, to);
} catch (final NoSuchElementException e) {
// just ignore the line if invalid.
}
}
}
/**
* Perform RFC 2047 text folding on a string of text.
*
* @param used The amount of text already "used up" on this line. This is
* typically the length of a message header that this text
* get getting added to.
* @param s The text to fold.
*
* @return The input text, with linebreaks inserted at appropriate fold points.
*/
public static String fold(int used, String s) {
// if folding is disable, unfolding is also. Return the string unchanged.
if (!SessionUtil.getBooleanProperty(MIME_FOLDTEXT, true)) {
return s;
}
int end;
// now we need to strip off any trailing "whitespace", where whitespace is blanks, tabs,
// and line break characters.
for (end = s.length() - 1; end >= 0; end--) {
final int ch = s.charAt(end);
if (ch != ' ' && ch != '\t' ) {
break;
}
}
// did we actually find something to remove? Shorten the String to the trimmed length
if (end != s.length() - 1) {
s = s.substring(0, end + 1);
}
// does the string as it exists now not require folding? We can just had that back right off.
if (s.length() + used <= FOLD_THRESHOLD) {
return s;
}
// get a buffer for the length of the string, plus room for a few line breaks.
// these are soft line breaks, so we generally need more that just the line breaks (an escape +
// CR + LF + leading space on next line);
final StringBuffer newString = new StringBuffer(s.length() + 8);
// now keep chopping this down until we've accomplished what we need.
while (used + s.length() > FOLD_THRESHOLD) {
int breakPoint = -1;
char breakChar = 0;
// now scan for the next place where we can break.
for (int i = 0; i < s.length(); i++) {
// have we passed the fold limit?
if (used + i > FOLD_THRESHOLD) {
// if we've already seen a blank, then stop now. Otherwise
// we keep going until we hit a fold point.
if (breakPoint != -1) {
break;
}
}
char ch = s.charAt(i);
// a white space character?
if (ch == ' ' || ch == '\t') {
// this might be a run of white space, so skip over those now.
breakPoint = i;
// we need to maintain the same character type after the inserted linebreak.
breakChar = ch;
i++;
while (i < s.length()) {
ch = s.charAt(i);
if (ch != ' ' && ch != '\t') {
break;
}
i++;
}
}
// found an embedded new line. Escape this so that the unfolding process preserves it.
else if (ch == '\n') {
newString.append('\\');
newString.append('\n');
}
else if (ch == '\r') {
newString.append('\\');
newString.append('\n');
i++;
// if this is a CRLF pair, add the second char also
if (i < s.length() && s.charAt(i) == '\n') {
newString.append('\r');
}
}
}
// no fold point found, we punt, append the remainder and leave.
if (breakPoint == -1) {
newString.append(s);
return newString.toString();
}
newString.append(s.substring(0, breakPoint));
newString.append("\r\n");
newString.append(breakChar);
// chop the string
s = s.substring(breakPoint + 1);
// start again, and we've used the first char of the limit already with the whitespace char.
used = 1;
}
// add on the remainder, and return
newString.append(s);
return newString.toString();
}
/**
* Unfold a folded string. The unfolding process will remove
* any line breaks that are not escaped and which are also followed
* by whitespace characters.
*
* @param s The folded string.
*
* @return A new string with unfolding rules applied.
*/
public static String unfold(final String s) {
// if folding is disable, unfolding is also. Return the string unchanged.
if (!SessionUtil.getBooleanProperty(MIME_FOLDTEXT, true)) {
return s;
}
// if there are no line break characters in the string, we can just return this.
if (s.indexOf('\n') < 0 && s.indexOf('\r') < 0) {
return s;
}
// we need to scan and fix things up.
final int length = s.length();
final StringBuffer newString = new StringBuffer(length);
// scan the entire string
for (int i = 0; i < length; i++) {
final char ch = s.charAt(i);
// we have a backslash. In folded strings, escape characters are only processed as such if
// they precede line breaks. Otherwise, we leave it be.
if (ch == '\\') {
// escape at the very end? Just add the character.
if (i == length - 1) {
newString.append(ch);
}
else {
final int nextChar = s.charAt(i + 1);
// naked newline? Add the new line to the buffer, and skip the escape char.
if (nextChar == '\n') {
newString.append('\n');
i++;
}
else if (nextChar == '\r') {
// just the CR left? Add it, removing the escape.
if (i == length - 2 || s.charAt(i + 2) != '\r') {
newString.append('\r');
i++;
}
else {
// toss the escape, add both parts of the CRLF, and skip over two chars.
newString.append('\r');
newString.append('\n');
i += 2;
}
}
else {
// an escape for another purpose, just copy it over.
newString.append(ch);
}
}
}
// we have an unescaped line break
else if (ch == '\n' || ch == '\r') {
// remember the position in case we need to backtrack.
boolean CRLF = false;
if (ch == '\r') {
// check to see if we need to step over this.
if (i < length - 1 && s.charAt(i + 1) == '\n') {
i++;
// flag the type so we know what we might need to preserve.
CRLF = true;
}
}
// get a temp position scanner.
final int scan = i + 1;
// does a blank follow this new line? we need to scrap the new line and reduce the leading blanks
// down to a single blank.
if (scan < length && s.charAt(scan) == ' ') {
// add the character
newString.append(' ');
// scan over the rest of the blanks
i = scan + 1;
while (i < length && s.charAt(i) == ' ') {
i++;
}
// we'll increment down below, so back up to the last blank as the current char.
i--;
}
else {
// we must keep this line break. Append the appropriate style.
if (CRLF) {
newString.append("\r\n");
}
else {
newString.append(ch);
}
}
}
else {
// just a normal, ordinary character
newString.append(ch);
}
}
return newString.toString();
}
}
/**
* Utility class for examining content information written out
* by a DataHandler object. This stream gathers statistics on
* the stream so it can make transfer encoding determinations.
*/
class ContentCheckingOutputStream extends OutputStream {
private int asciiChars = 0;
private int nonAsciiChars = 0;
private boolean containsLongLines = false;
private boolean containsMalformedEOL = false;
private int previousChar = 0;
private int span = 0;
ContentCheckingOutputStream() {
}
@Override
public void write(final byte[] data) throws IOException {
write(data, 0, data.length);
}
@Override
public void write(final byte[] data, final int offset, final int length) throws IOException {
for (int i = 0; i < length; i++) {
write(data[offset + i]);
}
}
@Override
public void write(final int ch) {
// we found a linebreak. Reset the line length counters on either one. We don't
// really need to validate here.
if (ch == '\n' || ch == '\r') {
// we found a newline, this is only valid if the previous char was the '\r'
if (ch == '\n') {
// malformed linebreak? force this to base64 encoding.
if (previousChar != '\r') {
containsMalformedEOL = true;
}
}
// hit a line end, reset our line length counter
span = 0;
}
else {
span++;
// the text has long lines, we can't transfer this as unencoded text.
if (span > 998) {
containsLongLines = true;
}
// non-ascii character, we have to transfer this in binary.
if (!ASCIIUtil.isAscii(ch)) {
nonAsciiChars++;
}
else {
asciiChars++;
}
}
previousChar = ch;
}
public String getBinaryTransferEncoding() {
if (nonAsciiChars != 0 || containsLongLines || containsMalformedEOL) {
return "base64";
}
else {
return "7bit";
}
}
public String getTextTransferEncoding() {
// looking good so far, only valid chars here.
if (nonAsciiChars == 0) {
// does this contain long text lines? We need to use a Q-P encoding which will
// be only slightly longer, but handles folding the longer lines.
if (containsLongLines) {
return "quoted-printable";
}
else {
// ideal! Easiest one to handle.
return "7bit";
}
}
else {
// mostly characters requiring encoding? Base64 is our best bet.
if (nonAsciiChars > asciiChars) {
return "base64";
}
else {
// Q-P encoding will use fewer bytes than the full Base64.
return "quoted-printable";
}
}
}
}
| {
"content_hash": "d8aae2a3fe84eb5606b9a25fb9cef97f",
"timestamp": "",
"source": "github",
"line_count": 1376,
"max_line_length": 226,
"avg_line_length": 40.40406976744186,
"alnum_prop": 0.5406324195985323,
"repo_name": "apache/geronimo-specs",
"id": "32d2b6363f879a74240f7c57303e7492c69543e3",
"size": "56402",
"binary": false,
"copies": "4",
"ref": "refs/heads/trunk",
"path": "geronimo-javamail_1.5_spec/src/main/java/javax/mail/internet/MimeUtility.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "9144"
},
{
"name": "Java",
"bytes": "9975580"
},
{
"name": "Shell",
"bytes": "13639"
}
],
"symlink_target": ""
} |
module MiniAppStudy
class AnswerModel < ActiveRecord::Base
attr_accessible :answer, :question_id, :result, :index
belongs_to :question, :class_name => "QuestionModel", :foreign_key => "question_id"
has_many :answer_users, :foreign_key => "answer_id"
has_many :users, :through => :answer_users, :source => :user
end
end
| {
"content_hash": "844d73d97b7722623bbd74a0dc24d321",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 87,
"avg_line_length": 33.9,
"alnum_prop": 0.6843657817109144,
"repo_name": "jimrok/mini_app_study",
"id": "9731b453fc917169d4675f96227b97184e25f080",
"size": "339",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/models/mini_app_study/answer_model.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "955"
},
{
"name": "JavaScript",
"bytes": "5399"
},
{
"name": "Ruby",
"bytes": "11387"
}
],
"symlink_target": ""
} |
ZTRESTConnection
================
A lightweight block based REST library for OBJ-C | {
"content_hash": "f811a820d141d1fce212ae756c8f2cac",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 48,
"avg_line_length": 20.75,
"alnum_prop": 0.6746987951807228,
"repo_name": "Nub/ZTRESTConnection",
"id": "23d042fe498db467d7a60e7e91f224d520c141f4",
"size": "83",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "30730"
},
{
"name": "Ruby",
"bytes": "467"
}
],
"symlink_target": ""
} |
How to install Simple Chat?
1. Copy Web dir to your "document root" directory.
2. Set up the database params in the app/Configurations/database.json
3. Set up the standard language and include rewrite if installed mod_rewrite to "beautiful routes".
4. Import the structure of the tables in the database.
| {
"content_hash": "2275490ca356f106ae053c6a204cde5e",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 99,
"avg_line_length": 50.833333333333336,
"alnum_prop": 0.7868852459016393,
"repo_name": "HiNeX/SimpleChat",
"id": "79a04f7c9a8028cbe34694eb60c6d55083c2c7be",
"size": "320",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "18436"
},
{
"name": "JavaScript",
"bytes": "88"
},
{
"name": "PHP",
"bytes": "32651"
}
],
"symlink_target": ""
} |
'use strict';
Meteor.methods({
payment: function (doc, id) {
check(doc.$set || doc, app.paymentSchema);
check(id, Match.Optional(String));
if(!Meteor.userId()) {
throw new Meteor.Error('not-authorized');
}
_.extend(doc.$set || doc, { user: Meteor.userId() });
if(id) {
db.transactions.update(id, doc);
doc = doc.$set;
}
else {
_.extend(doc, {
type: 'Payment',
deposited: false,
account: db.accounts.findOne({ name: 'Undeposited Funds' }, { fields: { _id: 1 } })._id,
});
db.transactions.insert(doc);
}
if(Meteor.isServer) {
db.customers.update(doc.customer, {
$set: { balance: app.calculateCustomerBalance(doc.customer) },
});
}
},
});
| {
"content_hash": "2ed2c8a063cc32935fca9a6ba5e7dff5",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 92,
"avg_line_length": 21.363636363636363,
"alnum_prop": 0.5971631205673759,
"repo_name": "ja-mes/experiments",
"id": "8589b206f7951d4efceb3361b2da56325a8f225b",
"size": "705",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Old/Random/old-rent-meteor/transactions/payments/lib/payment.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "22389"
},
{
"name": "CoffeeScript",
"bytes": "3456"
},
{
"name": "HTML",
"bytes": "180454"
},
{
"name": "JavaScript",
"bytes": "161865"
},
{
"name": "Objective-C",
"bytes": "80018"
},
{
"name": "Ruby",
"bytes": "468722"
},
{
"name": "Shell",
"bytes": "34283"
},
{
"name": "Swift",
"bytes": "774937"
}
],
"symlink_target": ""
} |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.api.ads.admanager.jaxws.v202202;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for CreativePreviewError.Reason.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType name="CreativePreviewError.Reason">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="CANNOT_GENERATE_PREVIEW_URL"/>
* <enumeration value="CANNOT_GENERATE_PREVIEW_URL_FOR_NATIVE_CREATIVES"/>
* <enumeration value="HTML_SNIPPET_REQUIRED_FOR_THIRD_PARTY_CREATIVE"/>
* <enumeration value="UNKNOWN"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlType(name = "CreativePreviewError.Reason")
@XmlEnum
public enum CreativePreviewErrorReason {
/**
*
* The creative cannot be previewed on this page.
*
*
*/
CANNOT_GENERATE_PREVIEW_URL,
/**
*
* Preview URLs for native creatives must be retrieved with {@link
* LineItemCreativeAssociationService#getPreviewUrlsForNativeStyles}.
*
*
*/
CANNOT_GENERATE_PREVIEW_URL_FOR_NATIVE_CREATIVES,
/**
*
* Third party creatives must have an html snippet set in order to obtain a preview URL.
*
*
*/
HTML_SNIPPET_REQUIRED_FOR_THIRD_PARTY_CREATIVE,
/**
*
* The value returned if the actual value is not exposed by the requested API version.
*
*
*/
UNKNOWN;
public String value() {
return name();
}
public static CreativePreviewErrorReason fromValue(String v) {
return valueOf(v);
}
}
| {
"content_hash": "9120e476bc078da17ee335309cecd902",
"timestamp": "",
"source": "github",
"line_count": 85,
"max_line_length": 108,
"avg_line_length": 28.68235294117647,
"alnum_prop": 0.6369975389663659,
"repo_name": "googleads/googleads-java-lib",
"id": "93d509a5e95efe5198a1e6c325db78ceaf61bba6",
"size": "2438",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202202/CreativePreviewErrorReason.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "81068791"
}
],
"symlink_target": ""
} |
// Copyright 2012 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_HEAP_HEAP_H_
#define V8_HEAP_HEAP_H_
#include <cmath>
#include <map>
// Clients of this interface shouldn't depend on lots of heap internals.
// Do not include anything from src/heap here!
#include "include/v8.h"
#include "src/allocation.h"
#include "src/assert-scope.h"
#include "src/atomic-utils.h"
#include "src/globals.h"
#include "src/heap-symbols.h"
// TODO(mstarzinger): Two more includes to kill!
#include "src/heap/spaces.h"
#include "src/heap/store-buffer.h"
#include "src/list.h"
namespace v8 {
namespace internal {
using v8::MemoryPressureLevel;
// Defines all the roots in Heap.
#define STRONG_ROOT_LIST(V) \
V(Map, byte_array_map, ByteArrayMap) \
V(Map, free_space_map, FreeSpaceMap) \
V(Map, one_pointer_filler_map, OnePointerFillerMap) \
V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
/* Cluster the most popular ones in a few cache lines here at the top. */ \
V(Oddball, uninitialized_value, UninitializedValue) \
V(Oddball, undefined_value, UndefinedValue) \
V(Map, cell_map, CellMap) \
V(Oddball, null_value, NullValue) \
V(Oddball, true_value, TrueValue) \
V(Oddball, false_value, FalseValue) \
V(String, empty_string, empty_string) \
V(Oddball, the_hole_value, TheHoleValue) \
V(Map, global_property_cell_map, GlobalPropertyCellMap) \
V(Map, shared_function_info_map, SharedFunctionInfoMap) \
V(Map, meta_map, MetaMap) \
V(Map, heap_number_map, HeapNumberMap) \
V(Map, mutable_heap_number_map, MutableHeapNumberMap) \
V(Map, float32x4_map, Float32x4Map) \
V(Map, int32x4_map, Int32x4Map) \
V(Map, uint32x4_map, Uint32x4Map) \
V(Map, bool32x4_map, Bool32x4Map) \
V(Map, int16x8_map, Int16x8Map) \
V(Map, uint16x8_map, Uint16x8Map) \
V(Map, bool16x8_map, Bool16x8Map) \
V(Map, int8x16_map, Int8x16Map) \
V(Map, uint8x16_map, Uint8x16Map) \
V(Map, bool8x16_map, Bool8x16Map) \
V(Map, native_context_map, NativeContextMap) \
V(Map, fixed_array_map, FixedArrayMap) \
V(Map, code_map, CodeMap) \
V(Map, scope_info_map, ScopeInfoMap) \
V(Map, fixed_cow_array_map, FixedCOWArrayMap) \
V(Map, fixed_double_array_map, FixedDoubleArrayMap) \
V(Map, weak_cell_map, WeakCellMap) \
V(Map, transition_array_map, TransitionArrayMap) \
V(Map, one_byte_string_map, OneByteStringMap) \
V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap) \
V(Map, function_context_map, FunctionContextMap) \
V(FixedArray, empty_fixed_array, EmptyFixedArray) \
V(ByteArray, empty_byte_array, EmptyByteArray) \
V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
/* The roots above this line should be boring from a GC point of view. */ \
/* This means they are never in new space and never on a page that is */ \
/* being compacted. */ \
V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
V(Oddball, arguments_marker, ArgumentsMarker) \
V(Oddball, exception, Exception) \
V(Oddball, termination_exception, TerminationException) \
V(Oddball, optimized_out, OptimizedOut) \
V(FixedArray, number_string_cache, NumberStringCache) \
V(Object, instanceof_cache_function, InstanceofCacheFunction) \
V(Object, instanceof_cache_map, InstanceofCacheMap) \
V(Object, instanceof_cache_answer, InstanceofCacheAnswer) \
V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
V(FixedArray, string_split_cache, StringSplitCache) \
V(FixedArray, regexp_multiple_cache, RegExpMultipleCache) \
V(Smi, hash_seed, HashSeed) \
V(Map, hash_table_map, HashTableMap) \
V(Map, ordered_hash_table_map, OrderedHashTableMap) \
V(Map, symbol_map, SymbolMap) \
V(Map, string_map, StringMap) \
V(Map, cons_one_byte_string_map, ConsOneByteStringMap) \
V(Map, cons_string_map, ConsStringMap) \
V(Map, sliced_string_map, SlicedStringMap) \
V(Map, sliced_one_byte_string_map, SlicedOneByteStringMap) \
V(Map, external_string_map, ExternalStringMap) \
V(Map, external_string_with_one_byte_data_map, \
ExternalStringWithOneByteDataMap) \
V(Map, external_one_byte_string_map, ExternalOneByteStringMap) \
V(Map, native_source_string_map, NativeSourceStringMap) \
V(Map, short_external_string_map, ShortExternalStringMap) \
V(Map, short_external_string_with_one_byte_data_map, \
ShortExternalStringWithOneByteDataMap) \
V(Map, internalized_string_map, InternalizedStringMap) \
V(Map, external_internalized_string_map, ExternalInternalizedStringMap) \
V(Map, external_internalized_string_with_one_byte_data_map, \
ExternalInternalizedStringWithOneByteDataMap) \
V(Map, external_one_byte_internalized_string_map, \
ExternalOneByteInternalizedStringMap) \
V(Map, short_external_internalized_string_map, \
ShortExternalInternalizedStringMap) \
V(Map, short_external_internalized_string_with_one_byte_data_map, \
ShortExternalInternalizedStringWithOneByteDataMap) \
V(Map, short_external_one_byte_internalized_string_map, \
ShortExternalOneByteInternalizedStringMap) \
V(Map, short_external_one_byte_string_map, ShortExternalOneByteStringMap) \
V(Map, fixed_uint8_array_map, FixedUint8ArrayMap) \
V(Map, fixed_int8_array_map, FixedInt8ArrayMap) \
V(Map, fixed_uint16_array_map, FixedUint16ArrayMap) \
V(Map, fixed_int16_array_map, FixedInt16ArrayMap) \
V(Map, fixed_uint32_array_map, FixedUint32ArrayMap) \
V(Map, fixed_int32_array_map, FixedInt32ArrayMap) \
V(Map, fixed_float32_array_map, FixedFloat32ArrayMap) \
V(Map, fixed_float64_array_map, FixedFloat64ArrayMap) \
V(Map, fixed_uint8_clamped_array_map, FixedUint8ClampedArrayMap) \
V(FixedTypedArrayBase, empty_fixed_uint8_array, EmptyFixedUint8Array) \
V(FixedTypedArrayBase, empty_fixed_int8_array, EmptyFixedInt8Array) \
V(FixedTypedArrayBase, empty_fixed_uint16_array, EmptyFixedUint16Array) \
V(FixedTypedArrayBase, empty_fixed_int16_array, EmptyFixedInt16Array) \
V(FixedTypedArrayBase, empty_fixed_uint32_array, EmptyFixedUint32Array) \
V(FixedTypedArrayBase, empty_fixed_int32_array, EmptyFixedInt32Array) \
V(FixedTypedArrayBase, empty_fixed_float32_array, EmptyFixedFloat32Array) \
V(FixedTypedArrayBase, empty_fixed_float64_array, EmptyFixedFloat64Array) \
V(FixedTypedArrayBase, empty_fixed_uint8_clamped_array, \
EmptyFixedUint8ClampedArray) \
V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap) \
V(Map, catch_context_map, CatchContextMap) \
V(Map, with_context_map, WithContextMap) \
V(Map, debug_evaluate_context_map, DebugEvaluateContextMap) \
V(Map, block_context_map, BlockContextMap) \
V(Map, module_context_map, ModuleContextMap) \
V(Map, script_context_map, ScriptContextMap) \
V(Map, script_context_table_map, ScriptContextTableMap) \
V(Map, undefined_map, UndefinedMap) \
V(Map, the_hole_map, TheHoleMap) \
V(Map, null_map, NullMap) \
V(Map, boolean_map, BooleanMap) \
V(Map, uninitialized_map, UninitializedMap) \
V(Map, arguments_marker_map, ArgumentsMarkerMap) \
V(Map, no_interceptor_result_sentinel_map, NoInterceptorResultSentinelMap) \
V(Map, exception_map, ExceptionMap) \
V(Map, termination_exception_map, TerminationExceptionMap) \
V(Map, optimized_out_map, OptimizedOutMap) \
V(Map, message_object_map, JSMessageObjectMap) \
V(Map, foreign_map, ForeignMap) \
V(Map, neander_map, NeanderMap) \
V(Map, external_map, ExternalMap) \
V(HeapNumber, nan_value, NanValue) \
V(HeapNumber, infinity_value, InfinityValue) \
V(HeapNumber, minus_zero_value, MinusZeroValue) \
V(HeapNumber, minus_infinity_value, MinusInfinityValue) \
V(JSObject, message_listeners, MessageListeners) \
V(UnseededNumberDictionary, code_stubs, CodeStubs) \
V(UnseededNumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \
V(PolymorphicCodeCache, polymorphic_code_cache, PolymorphicCodeCache) \
V(Code, js_entry_code, JsEntryCode) \
V(Code, js_construct_entry_code, JsConstructEntryCode) \
V(FixedArray, natives_source_cache, NativesSourceCache) \
V(FixedArray, experimental_natives_source_cache, \
ExperimentalNativesSourceCache) \
V(FixedArray, extra_natives_source_cache, ExtraNativesSourceCache) \
V(FixedArray, experimental_extra_natives_source_cache, \
ExperimentalExtraNativesSourceCache) \
V(Script, empty_script, EmptyScript) \
V(NameDictionary, intrinsic_function_names, IntrinsicFunctionNames) \
V(NameDictionary, empty_properties_dictionary, EmptyPropertiesDictionary) \
V(Cell, undefined_cell, UndefinedCell) \
V(JSObject, observation_state, ObservationState) \
V(Object, symbol_registry, SymbolRegistry) \
V(Object, script_list, ScriptList) \
V(SeededNumberDictionary, empty_slow_element_dictionary, \
EmptySlowElementDictionary) \
V(FixedArray, materialized_objects, MaterializedObjects) \
V(FixedArray, microtask_queue, MicrotaskQueue) \
V(TypeFeedbackVector, dummy_vector, DummyVector) \
V(FixedArray, cleared_optimized_code_map, ClearedOptimizedCodeMap) \
V(FixedArray, detached_contexts, DetachedContexts) \
V(ArrayList, retained_maps, RetainedMaps) \
V(WeakHashTable, weak_object_to_code_table, WeakObjectToCodeTable) \
V(PropertyCell, array_protector, ArrayProtector) \
V(PropertyCell, empty_property_cell, EmptyPropertyCell) \
V(Object, weak_stack_trace_list, WeakStackTraceList) \
V(Object, noscript_shared_function_infos, NoScriptSharedFunctionInfos) \
V(Map, bytecode_array_map, BytecodeArrayMap) \
V(WeakCell, empty_weak_cell, EmptyWeakCell) \
V(PropertyCell, species_protector, SpeciesProtector)
// Entries in this list are limited to Smis and are not visited during GC.
#define SMI_ROOT_LIST(V) \
V(Smi, stack_limit, StackLimit) \
V(Smi, real_stack_limit, RealStackLimit) \
V(Smi, last_script_id, LastScriptId) \
V(Smi, arguments_adaptor_deopt_pc_offset, ArgumentsAdaptorDeoptPCOffset) \
V(Smi, construct_stub_deopt_pc_offset, ConstructStubDeoptPCOffset) \
V(Smi, getter_stub_deopt_pc_offset, GetterStubDeoptPCOffset) \
V(Smi, setter_stub_deopt_pc_offset, SetterStubDeoptPCOffset)
#define ROOT_LIST(V) \
STRONG_ROOT_LIST(V) \
SMI_ROOT_LIST(V) \
V(StringTable, string_table, StringTable)
// Heap roots that are known to be immortal immovable, for which we can safely
// skip write barriers. This list is not complete and has omissions.
#define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
V(ByteArrayMap) \
V(BytecodeArrayMap) \
V(FreeSpaceMap) \
V(OnePointerFillerMap) \
V(TwoPointerFillerMap) \
V(UndefinedValue) \
V(TheHoleValue) \
V(NullValue) \
V(TrueValue) \
V(FalseValue) \
V(UninitializedValue) \
V(CellMap) \
V(GlobalPropertyCellMap) \
V(SharedFunctionInfoMap) \
V(MetaMap) \
V(HeapNumberMap) \
V(MutableHeapNumberMap) \
V(Float32x4Map) \
V(Int32x4Map) \
V(Uint32x4Map) \
V(Bool32x4Map) \
V(Int16x8Map) \
V(Uint16x8Map) \
V(Bool16x8Map) \
V(Int8x16Map) \
V(Uint8x16Map) \
V(Bool8x16Map) \
V(NativeContextMap) \
V(FixedArrayMap) \
V(CodeMap) \
V(ScopeInfoMap) \
V(FixedCOWArrayMap) \
V(FixedDoubleArrayMap) \
V(WeakCellMap) \
V(TransitionArrayMap) \
V(NoInterceptorResultSentinel) \
V(HashTableMap) \
V(OrderedHashTableMap) \
V(EmptyFixedArray) \
V(EmptyByteArray) \
V(EmptyDescriptorArray) \
V(ArgumentsMarker) \
V(SymbolMap) \
V(SloppyArgumentsElementsMap) \
V(FunctionContextMap) \
V(CatchContextMap) \
V(WithContextMap) \
V(BlockContextMap) \
V(ModuleContextMap) \
V(ScriptContextMap) \
V(UndefinedMap) \
V(TheHoleMap) \
V(NullMap) \
V(BooleanMap) \
V(UninitializedMap) \
V(ArgumentsMarkerMap) \
V(JSMessageObjectMap) \
V(ForeignMap) \
V(NeanderMap) \
V(NanValue) \
V(InfinityValue) \
V(MinusZeroValue) \
V(MinusInfinityValue) \
V(EmptyWeakCell) \
V(empty_string) \
PRIVATE_SYMBOL_LIST(V)
// Forward declarations.
class AllocationObserver;
class ArrayBufferTracker;
class GCIdleTimeAction;
class GCIdleTimeHandler;
class GCIdleTimeHeapState;
class GCTracer;
class HeapObjectsFilter;
class HeapStats;
class HistogramTimer;
class Isolate;
class MemoryReducer;
class ObjectStats;
class Scavenger;
class ScavengeJob;
class WeakObjectRetainer;
typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);
// A queue of objects promoted during scavenge. Each object is accompanied
// by it's size to avoid dereferencing a map pointer for scanning.
// The last page in to-space is used for the promotion queue. On conflict
// during scavenge, the promotion queue is allocated externally and all
// entries are copied to the external queue.
class PromotionQueue {
public:
explicit PromotionQueue(Heap* heap)
: front_(NULL),
rear_(NULL),
limit_(NULL),
emergency_stack_(0),
heap_(heap) {}
void Initialize();
void Destroy() {
DCHECK(is_empty());
delete emergency_stack_;
emergency_stack_ = NULL;
}
Page* GetHeadPage() {
return Page::FromAllocationTop(reinterpret_cast<Address>(rear_));
}
void SetNewLimit(Address limit) {
// If we are already using an emergency stack, we can ignore it.
if (emergency_stack_) return;
// If the limit is not on the same page, we can ignore it.
if (Page::FromAllocationTop(limit) != GetHeadPage()) return;
limit_ = reinterpret_cast<struct Entry*>(limit);
if (limit_ <= rear_) {
return;
}
RelocateQueueHead();
}
bool IsBelowPromotionQueue(Address to_space_top) {
// If an emergency stack is used, the to-space address cannot interfere
// with the promotion queue.
if (emergency_stack_) return true;
// If the given to-space top pointer and the head of the promotion queue
// are not on the same page, then the to-space objects are below the
// promotion queue.
if (GetHeadPage() != Page::FromAddress(to_space_top)) {
return true;
}
// If the to space top pointer is smaller or equal than the promotion
// queue head, then the to-space objects are below the promotion queue.
return reinterpret_cast<struct Entry*>(to_space_top) <= rear_;
}
bool is_empty() {
return (front_ == rear_) &&
(emergency_stack_ == NULL || emergency_stack_->length() == 0);
}
inline void insert(HeapObject* target, int32_t size, bool was_marked_black);
void remove(HeapObject** target, int32_t* size, bool* was_marked_black) {
DCHECK(!is_empty());
if (front_ == rear_) {
Entry e = emergency_stack_->RemoveLast();
*target = e.obj_;
*size = e.size_;
*was_marked_black = e.was_marked_black_;
return;
}
struct Entry* entry = reinterpret_cast<struct Entry*>(--front_);
*target = entry->obj_;
*size = entry->size_;
*was_marked_black = entry->was_marked_black_;
// Assert no underflow.
SemiSpace::AssertValidRange(reinterpret_cast<Address>(rear_),
reinterpret_cast<Address>(front_));
}
private:
struct Entry {
Entry(HeapObject* obj, int32_t size, bool was_marked_black)
: obj_(obj), size_(size), was_marked_black_(was_marked_black) {}
HeapObject* obj_;
int32_t size_ : 31;
bool was_marked_black_ : 1;
};
void RelocateQueueHead();
// The front of the queue is higher in the memory page chain than the rear.
struct Entry* front_;
struct Entry* rear_;
struct Entry* limit_;
List<Entry>* emergency_stack_;
Heap* heap_;
DISALLOW_COPY_AND_ASSIGN(PromotionQueue);
};
enum ArrayStorageAllocationMode {
DONT_INITIALIZE_ARRAY_ELEMENTS,
INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
};
enum class ClearRecordedSlots { kYes, kNo };
class Heap {
public:
// Declare all the root indices. This defines the root list order.
enum RootListIndex {
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
#define STRING_INDEX_DECLARATION(name, str) k##name##RootIndex,
INTERNALIZED_STRING_LIST(STRING_INDEX_DECLARATION)
#undef STRING_DECLARATION
#define SYMBOL_INDEX_DECLARATION(name) k##name##RootIndex,
PRIVATE_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
#undef SYMBOL_INDEX_DECLARATION
#define SYMBOL_INDEX_DECLARATION(name, description) k##name##RootIndex,
PUBLIC_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
WELL_KNOWN_SYMBOL_LIST(SYMBOL_INDEX_DECLARATION)
#undef SYMBOL_INDEX_DECLARATION
// Utility type maps
#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex,
STRUCT_LIST(DECLARE_STRUCT_MAP)
#undef DECLARE_STRUCT_MAP
kStringTableRootIndex,
#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex,
SMI_ROOT_LIST(ROOT_INDEX_DECLARATION)
#undef ROOT_INDEX_DECLARATION
kRootListLength,
kStrongRootListLength = kStringTableRootIndex,
kSmiRootsStart = kStringTableRootIndex + 1
};
enum FindMementoMode { kForRuntime, kForGC };
enum HeapState { NOT_IN_GC, SCAVENGE, MARK_COMPACT };
// Indicates whether live bytes adjustment is triggered
// - from within the GC code before sweeping started (SEQUENTIAL_TO_SWEEPER),
// - or from within GC (CONCURRENT_TO_SWEEPER),
// - or mutator code (CONCURRENT_TO_SWEEPER).
enum InvocationMode { SEQUENTIAL_TO_SWEEPER, CONCURRENT_TO_SWEEPER };
enum UpdateAllocationSiteMode { kGlobal, kCached };
// Taking this lock prevents the GC from entering a phase that relocates
// object references.
class RelocationLock {
public:
explicit RelocationLock(Heap* heap) : heap_(heap) {
heap_->relocation_mutex_.Lock();
}
~RelocationLock() { heap_->relocation_mutex_.Unlock(); }
private:
Heap* heap_;
};
// Support for partial snapshots. After calling this we have a linear
// space to write objects in each space.
struct Chunk {
uint32_t size;
Address start;
Address end;
};
typedef List<Chunk> Reservation;
static const intptr_t kMinimumOldGenerationAllocationLimit =
8 * (Page::kPageSize > MB ? Page::kPageSize : MB);
static const int kInitalOldGenerationLimitFactor = 2;
#if V8_OS_ANDROID
// Don't apply pointer multiplier on Android since it has no swap space and
// should instead adapt it's heap size based on available physical memory.
static const int kPointerMultiplier = 1;
#else
static const int kPointerMultiplier = i::kPointerSize / 4;
#endif
// The new space size has to be a power of 2. Sizes are in MB.
static const int kMaxSemiSpaceSizeLowMemoryDevice = 1 * kPointerMultiplier;
static const int kMaxSemiSpaceSizeMediumMemoryDevice = 4 * kPointerMultiplier;
static const int kMaxSemiSpaceSizeHighMemoryDevice = 8 * kPointerMultiplier;
static const int kMaxSemiSpaceSizeHugeMemoryDevice = 8 * kPointerMultiplier;
// The old space size has to be a multiple of Page::kPageSize.
// Sizes are in MB.
static const int kMaxOldSpaceSizeLowMemoryDevice = 128 * kPointerMultiplier;
static const int kMaxOldSpaceSizeMediumMemoryDevice =
256 * kPointerMultiplier;
static const int kMaxOldSpaceSizeHighMemoryDevice = 512 * kPointerMultiplier;
static const int kMaxOldSpaceSizeHugeMemoryDevice = 700 * kPointerMultiplier;
// The executable size has to be a multiple of Page::kPageSize.
// Sizes are in MB.
static const int kMaxExecutableSizeLowMemoryDevice = 96 * kPointerMultiplier;
static const int kMaxExecutableSizeMediumMemoryDevice =
192 * kPointerMultiplier;
static const int kMaxExecutableSizeHighMemoryDevice =
256 * kPointerMultiplier;
static const int kMaxExecutableSizeHugeMemoryDevice =
256 * kPointerMultiplier;
static const int kTraceRingBufferSize = 512;
static const int kStacktraceBufferSize = 512;
static const double kMinHeapGrowingFactor;
static const double kMaxHeapGrowingFactor;
static const double kMaxHeapGrowingFactorMemoryConstrained;
static const double kMaxHeapGrowingFactorIdle;
static const double kTargetMutatorUtilization;
static const int kNoGCFlags = 0;
static const int kReduceMemoryFootprintMask = 1;
static const int kAbortIncrementalMarkingMask = 2;
static const int kFinalizeIncrementalMarkingMask = 4;
// Making the heap iterable requires us to abort incremental marking.
static const int kMakeHeapIterableMask = kAbortIncrementalMarkingMask;
// The roots that have an index less than this are always in old space.
static const int kOldSpaceRoots = 0x20;
// The minimum size of a HeapObject on the heap.
static const int kMinObjectSizeInWords = 2;
STATIC_ASSERT(kUndefinedValueRootIndex ==
Internals::kUndefinedValueRootIndex);
STATIC_ASSERT(kTheHoleValueRootIndex == Internals::kTheHoleValueRootIndex);
STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
STATIC_ASSERT(kTrueValueRootIndex == Internals::kTrueValueRootIndex);
STATIC_ASSERT(kFalseValueRootIndex == Internals::kFalseValueRootIndex);
STATIC_ASSERT(kempty_stringRootIndex == Internals::kEmptyStringRootIndex);
// Calculates the maximum amount of filler that could be required by the
// given alignment.
static int GetMaximumFillToAlign(AllocationAlignment alignment);
// Calculates the actual amount of filler required for a given address at the
// given alignment.
static int GetFillToAlign(Address address, AllocationAlignment alignment);
template <typename T>
static inline bool IsOneByte(T t, int chars);
static void FatalProcessOutOfMemory(const char* location,
bool is_heap_oom = false);
static bool RootIsImmortalImmovable(int root_index);
// Checks whether the space is valid.
static bool IsValidAllocationSpace(AllocationSpace space);
// Generated code can embed direct references to non-writable roots if
// they are in new space.
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
// Zapping is needed for verify heap, and always done in debug builds.
static inline bool ShouldZapGarbage() {
#ifdef DEBUG
return true;
#else
#ifdef VERIFY_HEAP
return FLAG_verify_heap;
#else
return false;
#endif
#endif
}
static double HeapGrowingFactor(double gc_speed, double mutator_speed);
// Copy block of memory from src to dst. Size of block should be aligned
// by pointer size.
static inline void CopyBlock(Address dst, Address src, int byte_size);
// Determines a static visitor id based on the given {map} that can then be
// stored on the map to facilitate fast dispatch for {StaticVisitorBase}.
static int GetStaticVisitorIdForMap(Map* map);
// Notifies the heap that is ok to start marking or other activities that
// should not happen during deserialization.
void NotifyDeserializationComplete();
intptr_t old_generation_allocation_limit() const {
return old_generation_allocation_limit_;
}
bool always_allocate() { return always_allocate_scope_count_.Value() != 0; }
Address* NewSpaceAllocationTopAddress() {
return new_space_.allocation_top_address();
}
Address* NewSpaceAllocationLimitAddress() {
return new_space_.allocation_limit_address();
}
Address* OldSpaceAllocationTopAddress() {
return old_space_->allocation_top_address();
}
Address* OldSpaceAllocationLimitAddress() {
return old_space_->allocation_limit_address();
}
// TODO(hpayer): There is still a missmatch between capacity and actual
// committed memory size.
bool CanExpandOldGeneration(int size = 0) {
if (force_oom_) return false;
return (CommittedOldGenerationMemory() + size) < MaxOldGenerationSize();
}
// Clear the Instanceof cache (used when a prototype changes).
inline void ClearInstanceofCache();
// FreeSpace objects have a null map after deserialization. Update the map.
void RepairFreeListsAfterDeserialization();
// Move len elements within a given array from src_index index to dst_index
// index.
void MoveElements(FixedArray* array, int dst_index, int src_index, int len);
// Initialize a filler object to keep the ability to iterate over the heap
// when introducing gaps within pages. If slots could have been recorded in
// the freed area, then pass ClearRecordedSlots::kYes as the mode. Otherwise,
// pass ClearRecordedSlots::kNo.
void CreateFillerObjectAt(Address addr, int size, ClearRecordedSlots mode);
bool CanMoveObjectStart(HeapObject* object);
// Maintain consistency of live bytes during incremental marking.
void AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode);
// Trim the given array from the left. Note that this relocates the object
// start and hence is only valid if there is only a single reference to it.
FixedArrayBase* LeftTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
// Trim the given array from the right.
template<Heap::InvocationMode mode>
void RightTrimFixedArray(FixedArrayBase* obj, int elements_to_trim);
// Converts the given boolean condition to JavaScript boolean value.
inline Oddball* ToBoolean(bool condition);
// Check whether the heap is currently iterable.
bool IsHeapIterable();
// Notify the heap that a context has been disposed.
int NotifyContextDisposed(bool dependant_context);
void set_native_contexts_list(Object* object) {
native_contexts_list_ = object;
}
Object* native_contexts_list() const { return native_contexts_list_; }
void set_allocation_sites_list(Object* object) {
allocation_sites_list_ = object;
}
Object* allocation_sites_list() { return allocation_sites_list_; }
// Used in CreateAllocationSiteStub and the (de)serializer.
Object** allocation_sites_list_address() { return &allocation_sites_list_; }
void set_encountered_weak_collections(Object* weak_collection) {
encountered_weak_collections_ = weak_collection;
}
Object* encountered_weak_collections() const {
return encountered_weak_collections_;
}
void set_encountered_weak_cells(Object* weak_cell) {
encountered_weak_cells_ = weak_cell;
}
Object* encountered_weak_cells() const { return encountered_weak_cells_; }
void set_encountered_transition_arrays(Object* transition_array) {
encountered_transition_arrays_ = transition_array;
}
Object* encountered_transition_arrays() const {
return encountered_transition_arrays_;
}
// Number of mark-sweeps.
int ms_count() const { return ms_count_; }
// Checks whether the given object is allowed to be migrated from it's
// current space into the given destination space. Used for debugging.
inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
void CheckHandleCount();
// Number of "runtime allocations" done so far.
uint32_t allocations_count() { return allocations_count_; }
// Print short heap statistics.
void PrintShortHeapStatistics();
inline HeapState gc_state() { return gc_state_; }
inline bool IsInGCPostProcessing() { return gc_post_processing_depth_ > 0; }
// If an object has an AllocationMemento trailing it, return it, otherwise
// return NULL;
template <FindMementoMode mode>
inline AllocationMemento* FindAllocationMemento(HeapObject* object);
// Returns false if not able to reserve.
bool ReserveSpace(Reservation* reservations);
//
// Support for the API.
//
void CreateApiObjects();
// Implements the corresponding V8 API function.
bool IdleNotification(double deadline_in_seconds);
bool IdleNotification(int idle_time_in_ms);
void MemoryPressureNotification(MemoryPressureLevel level,
bool is_isolate_locked);
void CheckMemoryPressure();
double MonotonicallyIncreasingTimeInMs();
void RecordStats(HeapStats* stats, bool take_snapshot = false);
// Check new space expansion criteria and expand semispaces if it was hit.
void CheckNewSpaceExpansionCriteria();
inline bool HeapIsFullEnoughToStartIncrementalMarking(intptr_t limit) {
if (FLAG_stress_compaction && (gc_count_ & 1) != 0) return true;
intptr_t adjusted_allocation_limit = limit - new_space_.Capacity();
if (PromotedTotalSize() >= adjusted_allocation_limit) return true;
if (HighMemoryPressure()) return true;
return false;
}
void VisitExternalResources(v8::ExternalResourceVisitor* visitor);
// An object should be promoted if the object has survived a
// scavenge operation.
inline bool ShouldBePromoted(Address old_address, int object_size);
void ClearNormalizedMapCaches();
void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
inline bool OldGenerationAllocationLimitReached();
void QueueMemoryChunkForFree(MemoryChunk* chunk);
void FreeQueuedChunks(MemoryChunk* list_head);
void FreeQueuedChunks();
void WaitUntilUnmappingOfFreeChunksCompleted();
// Completely clear the Instanceof cache (to stop it keeping objects alive
// around a GC).
inline void CompletelyClearInstanceofCache();
inline uint32_t HashSeed();
inline int NextScriptId();
inline void SetArgumentsAdaptorDeoptPCOffset(int pc_offset);
inline void SetConstructStubDeoptPCOffset(int pc_offset);
inline void SetGetterStubDeoptPCOffset(int pc_offset);
inline void SetSetterStubDeoptPCOffset(int pc_offset);
// For post mortem debugging.
void RememberUnmappedPage(Address page, bool compacted);
// Global inline caching age: it is incremented on some GCs after context
// disposal. We use it to flush inline caches.
int global_ic_age() { return global_ic_age_; }
void AgeInlineCaches() {
global_ic_age_ = (global_ic_age_ + 1) & SharedFunctionInfo::ICAgeBits::kMax;
}
int64_t amount_of_external_allocated_memory() {
return amount_of_external_allocated_memory_;
}
void update_amount_of_external_allocated_memory(int64_t delta) {
amount_of_external_allocated_memory_ += delta;
}
void DeoptMarkedAllocationSites();
bool DeoptMaybeTenuredAllocationSites() {
return new_space_.IsAtMaximumCapacity() && maximum_size_scavenges_ == 0;
}
void AddWeakObjectToCodeDependency(Handle<HeapObject> obj,
Handle<DependentCode> dep);
DependentCode* LookupWeakObjectToCodeDependency(Handle<HeapObject> obj);
void AddRetainedMap(Handle<Map> map);
// This event is triggered after successful allocation of a new object made
// by runtime. Allocations of target space for object evacuation do not
// trigger the event. In order to track ALL allocations one must turn off
// FLAG_inline_new and FLAG_use_allocation_folding.
inline void OnAllocationEvent(HeapObject* object, int size_in_bytes);
// This event is triggered after object is moved to a new place.
inline void OnMoveEvent(HeapObject* target, HeapObject* source,
int size_in_bytes);
bool deserialization_complete() const { return deserialization_complete_; }
bool HasLowAllocationRate();
bool HasHighFragmentation();
bool HasHighFragmentation(intptr_t used, intptr_t committed);
void SetOptimizeForLatency() { optimize_for_memory_usage_ = false; }
void SetOptimizeForMemoryUsage();
bool ShouldOptimizeForMemoryUsage() {
return optimize_for_memory_usage_ || HighMemoryPressure();
}
bool HighMemoryPressure() {
return memory_pressure_level_.Value() != MemoryPressureLevel::kNone;
}
// ===========================================================================
// Initialization. ===========================================================
// ===========================================================================
// Configure heap size in MB before setup. Return false if the heap has been
// set up already.
bool ConfigureHeap(int max_semi_space_size, int max_old_space_size,
int max_executable_size, size_t code_range_size);
bool ConfigureHeapDefault();
// Prepares the heap, setting up memory areas that are needed in the isolate
// without actually creating any objects.
bool SetUp();
// Bootstraps the object heap with the core set of objects required to run.
// Returns whether it succeeded.
bool CreateHeapObjects();
// Destroys all memory allocated by the heap.
void TearDown();
// Returns whether SetUp has been called.
bool HasBeenSetUp();
// ===========================================================================
// Getters for spaces. =======================================================
// ===========================================================================
Address NewSpaceTop() { return new_space_.top(); }
NewSpace* new_space() { return &new_space_; }
OldSpace* old_space() { return old_space_; }
OldSpace* code_space() { return code_space_; }
MapSpace* map_space() { return map_space_; }
LargeObjectSpace* lo_space() { return lo_space_; }
PagedSpace* paged_space(int idx) {
switch (idx) {
case OLD_SPACE:
return old_space();
case MAP_SPACE:
return map_space();
case CODE_SPACE:
return code_space();
case NEW_SPACE:
case LO_SPACE:
UNREACHABLE();
}
return NULL;
}
Space* space(int idx) {
switch (idx) {
case NEW_SPACE:
return new_space();
case LO_SPACE:
return lo_space();
default:
return paged_space(idx);
}
}
// Returns name of the space.
const char* GetSpaceName(int idx);
// ===========================================================================
// API. ======================================================================
// ===========================================================================
void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
void RegisterExternallyReferencedObject(Object** object);
// ===========================================================================
// Getters to other components. ==============================================
// ===========================================================================
GCTracer* tracer() { return tracer_; }
EmbedderHeapTracer* embedder_heap_tracer() { return embedder_heap_tracer_; }
PromotionQueue* promotion_queue() { return &promotion_queue_; }
inline Isolate* isolate();
MarkCompactCollector* mark_compact_collector() {
return mark_compact_collector_;
}
// ===========================================================================
// Root set access. ==========================================================
// ===========================================================================
// Heap root getters.
#define ROOT_ACCESSOR(type, name, camel_name) inline type* name();
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
// Utility type maps.
#define STRUCT_MAP_ACCESSOR(NAME, Name, name) inline Map* name##_map();
STRUCT_LIST(STRUCT_MAP_ACCESSOR)
#undef STRUCT_MAP_ACCESSOR
#define STRING_ACCESSOR(name, str) inline String* name();
INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
#undef STRING_ACCESSOR
#define SYMBOL_ACCESSOR(name) inline Symbol* name();
PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
#undef SYMBOL_ACCESSOR
#define SYMBOL_ACCESSOR(name, description) inline Symbol* name();
PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
#undef SYMBOL_ACCESSOR
Object* root(RootListIndex index) { return roots_[index]; }
Handle<Object> root_handle(RootListIndex index) {
return Handle<Object>(&roots_[index]);
}
// Generated code can embed this address to get access to the roots.
Object** roots_array_start() { return roots_; }
// Sets the stub_cache_ (only used when expanding the dictionary).
void SetRootCodeStubs(UnseededNumberDictionary* value) {
roots_[kCodeStubsRootIndex] = value;
}
// Sets the non_monomorphic_cache_ (only used when expanding the dictionary).
void SetRootNonMonomorphicCache(UnseededNumberDictionary* value) {
roots_[kNonMonomorphicCacheRootIndex] = value;
}
void SetRootMaterializedObjects(FixedArray* objects) {
roots_[kMaterializedObjectsRootIndex] = objects;
}
void SetRootScriptList(Object* value) {
roots_[kScriptListRootIndex] = value;
}
void SetRootStringTable(StringTable* value) {
roots_[kStringTableRootIndex] = value;
}
void SetRootNoScriptSharedFunctionInfos(Object* value) {
roots_[kNoScriptSharedFunctionInfosRootIndex] = value;
}
// Set the stack limit in the roots_ array. Some architectures generate
// code that looks here, because it is faster than loading from the static
// jslimit_/real_jslimit_ variable in the StackGuard.
void SetStackLimits();
// The stack limit is thread-dependent. To be able to reproduce the same
// snapshot blob, we need to reset it before serializing.
void ClearStackLimits();
// Generated code can treat direct references to this root as constant.
bool RootCanBeTreatedAsConstant(RootListIndex root_index);
Map* MapForFixedTypedArray(ExternalArrayType array_type);
RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
void RegisterStrongRoots(Object** start, Object** end);
void UnregisterStrongRoots(Object** start);
// ===========================================================================
// Inline allocation. ========================================================
// ===========================================================================
// Indicates whether inline bump-pointer allocation has been disabled.
bool inline_allocation_disabled() { return inline_allocation_disabled_; }
// Switch whether inline bump-pointer allocation should be used.
void EnableInlineAllocation();
void DisableInlineAllocation();
// ===========================================================================
// Methods triggering GCs. ===================================================
// ===========================================================================
// Performs garbage collection operation.
// Returns whether there is a chance that another major GC could
// collect more garbage.
inline bool CollectGarbage(
AllocationSpace space, const char* gc_reason = NULL,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Performs a full garbage collection. If (flags & kMakeHeapIterableMask) is
// non-zero, then the slower precise sweeper is used, which leaves the heap
// in a state where we can iterate over the heap visiting all objects.
void CollectAllGarbage(
int flags = kFinalizeIncrementalMarkingMask, const char* gc_reason = NULL,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Last hope GC, should try to squeeze as much as possible.
void CollectAllAvailableGarbage(const char* gc_reason = NULL);
// Reports and external memory pressure event, either performs a major GC or
// completes incremental marking in order to free external resources.
void ReportExternalMemoryPressure(const char* gc_reason = NULL);
// Invoked when GC was requested via the stack guard.
void HandleGCRequest();
// ===========================================================================
// Iterators. ================================================================
// ===========================================================================
// Iterates over all roots in the heap.
void IterateRoots(ObjectVisitor* v, VisitMode mode);
// Iterates over all strong roots in the heap.
void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
// Iterates over entries in the smi roots list. Only interesting to the
// serializer/deserializer, since GC does not care about smis.
void IterateSmiRoots(ObjectVisitor* v);
// Iterates over all the other roots in the heap.
void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
// Iterate pointers of promoted objects.
void IteratePromotedObject(HeapObject* target, int size,
bool was_marked_black,
ObjectSlotCallback callback);
void IteratePromotedObjectPointers(HeapObject* object, Address start,
Address end, bool record_slots,
ObjectSlotCallback callback);
// ===========================================================================
// Store buffer API. =========================================================
// ===========================================================================
// Write barrier support for object[offset] = o;
inline void RecordWrite(Object* object, int offset, Object* o);
Address* store_buffer_top_address() { return store_buffer()->top_address(); }
void ClearRecordedSlot(HeapObject* object, Object** slot);
void ClearRecordedSlotRange(Address start, Address end);
// ===========================================================================
// Incremental marking API. ==================================================
// ===========================================================================
// Start incremental marking and ensure that idle time handler can perform
// incremental steps.
void StartIdleIncrementalMarking();
// Starts incremental marking assuming incremental marking is currently
// stopped.
void StartIncrementalMarking(int gc_flags = kNoGCFlags,
const GCCallbackFlags gc_callback_flags =
GCCallbackFlags::kNoGCCallbackFlags,
const char* reason = nullptr);
void FinalizeIncrementalMarkingIfComplete(const char* comment);
bool TryFinalizeIdleIncrementalMarking(double idle_time_in_ms);
void RegisterReservationsForBlackAllocation(Reservation* reservations);
IncrementalMarking* incremental_marking() { return incremental_marking_; }
// ===========================================================================
// External string table API. ================================================
// ===========================================================================
// Registers an external string.
inline void RegisterExternalString(String* string);
// Finalizes an external string by deleting the associated external
// data and clearing the resource pointer.
inline void FinalizeExternalString(String* string);
// ===========================================================================
// Methods checking/returning the space of a given object/address. ===========
// ===========================================================================
// Returns whether the object resides in new space.
inline bool InNewSpace(Object* object);
inline bool InFromSpace(Object* object);
inline bool InToSpace(Object* object);
// Returns whether the object resides in old space.
inline bool InOldSpace(Object* object);
// Checks whether an address/object in the heap (including auxiliary
// area and unused area).
bool Contains(HeapObject* value);
// Checks whether an address/object in a space.
// Currently used by tests, serialization and heap verification only.
bool InSpace(HeapObject* value, AllocationSpace space);
// Slow methods that can be used for verification as they can also be used
// with off-heap Addresses.
bool ContainsSlow(Address addr);
bool InSpaceSlow(Address addr, AllocationSpace space);
inline bool InNewSpaceSlow(Address address);
inline bool InOldSpaceSlow(Address address);
// ===========================================================================
// Object statistics tracking. ===============================================
// ===========================================================================
// Returns the number of buckets used by object statistics tracking during a
// major GC. Note that the following methods fail gracefully when the bounds
// are exceeded though.
size_t NumberOfTrackedHeapObjectTypes();
// Returns object statistics about count and size at the last major GC.
// Objects are being grouped into buckets that roughly resemble existing
// instance types.
size_t ObjectCountAtLastGC(size_t index);
size_t ObjectSizeAtLastGC(size_t index);
// Retrieves names of buckets used by object statistics tracking.
bool GetObjectTypeName(size_t index, const char** object_type,
const char** object_sub_type);
// ===========================================================================
// GC statistics. ============================================================
// ===========================================================================
// Returns the maximum amount of memory reserved for the heap.
intptr_t MaxReserved() {
return 2 * max_semi_space_size_ + max_old_generation_size_;
}
int MaxSemiSpaceSize() { return max_semi_space_size_; }
int InitialSemiSpaceSize() { return initial_semispace_size_; }
intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
intptr_t MaxExecutableSize() { return max_executable_size_; }
// Returns the capacity of the heap in bytes w/o growing. Heap grows when
// more spaces are needed until it reaches the limit.
intptr_t Capacity();
// Returns the amount of memory currently committed for the heap.
intptr_t CommittedMemory();
// Returns the amount of memory currently committed for the old space.
intptr_t CommittedOldGenerationMemory();
// Returns the amount of executable memory currently committed for the heap.
intptr_t CommittedMemoryExecutable();
// Returns the amount of phyical memory currently committed for the heap.
size_t CommittedPhysicalMemory();
// Returns the maximum amount of memory ever committed for the heap.
intptr_t MaximumCommittedMemory() { return maximum_committed_; }
// Updates the maximum committed memory for the heap. Should be called
// whenever a space grows.
void UpdateMaximumCommitted();
// Returns the available bytes in space w/o growing.
// Heap doesn't guarantee that it can allocate an object that requires
// all available bytes. Check MaxHeapObjectSize() instead.
intptr_t Available();
// Returns of size of all objects residing in the heap.
intptr_t SizeOfObjects();
void UpdateSurvivalStatistics(int start_new_space_size);
inline void IncrementPromotedObjectsSize(intptr_t object_size) {
DCHECK_GE(object_size, 0);
promoted_objects_size_ += object_size;
}
inline intptr_t promoted_objects_size() { return promoted_objects_size_; }
inline void IncrementSemiSpaceCopiedObjectSize(intptr_t object_size) {
DCHECK_GE(object_size, 0);
semi_space_copied_object_size_ += object_size;
}
inline intptr_t semi_space_copied_object_size() {
return semi_space_copied_object_size_;
}
inline intptr_t SurvivedNewSpaceObjectSize() {
return promoted_objects_size_ + semi_space_copied_object_size_;
}
inline void IncrementNodesDiedInNewSpace() { nodes_died_in_new_space_++; }
inline void IncrementNodesCopiedInNewSpace() { nodes_copied_in_new_space_++; }
inline void IncrementNodesPromoted() { nodes_promoted_++; }
inline void IncrementYoungSurvivorsCounter(intptr_t survived) {
DCHECK_GE(survived, 0);
survived_last_scavenge_ = survived;
survived_since_last_expansion_ += survived;
}
inline intptr_t PromotedTotalSize() {
int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
if (total > std::numeric_limits<intptr_t>::max()) {
// TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations.
return std::numeric_limits<intptr_t>::max();
}
if (total < 0) return 0;
return static_cast<intptr_t>(total);
}
void UpdateNewSpaceAllocationCounter() {
new_space_allocation_counter_ = NewSpaceAllocationCounter();
}
size_t NewSpaceAllocationCounter() {
return new_space_allocation_counter_ + new_space()->AllocatedSinceLastGC();
}
// This should be used only for testing.
void set_new_space_allocation_counter(size_t new_value) {
new_space_allocation_counter_ = new_value;
}
void UpdateOldGenerationAllocationCounter() {
old_generation_allocation_counter_ = OldGenerationAllocationCounter();
}
size_t OldGenerationAllocationCounter() {
return old_generation_allocation_counter_ + PromotedSinceLastGC();
}
// This should be used only for testing.
void set_old_generation_allocation_counter(size_t new_value) {
old_generation_allocation_counter_ = new_value;
}
size_t PromotedSinceLastGC() {
return PromotedSpaceSizeOfObjects() - old_generation_size_at_last_gc_;
}
int gc_count() const { return gc_count_; }
// Returns the size of objects residing in non new spaces.
intptr_t PromotedSpaceSizeOfObjects();
double total_regexp_code_generated() { return total_regexp_code_generated_; }
void IncreaseTotalRegexpCodeGenerated(int size) {
total_regexp_code_generated_ += size;
}
void IncrementCodeGeneratedBytes(bool is_crankshafted, int size) {
if (is_crankshafted) {
crankshaft_codegen_bytes_generated_ += size;
} else {
full_codegen_bytes_generated_ += size;
}
}
// ===========================================================================
// Prologue/epilogue callback methods.========================================
// ===========================================================================
void AddGCPrologueCallback(v8::Isolate::GCCallback callback,
GCType gc_type_filter, bool pass_isolate = true);
void RemoveGCPrologueCallback(v8::Isolate::GCCallback callback);
void AddGCEpilogueCallback(v8::Isolate::GCCallback callback,
GCType gc_type_filter, bool pass_isolate = true);
void RemoveGCEpilogueCallback(v8::Isolate::GCCallback callback);
void CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags);
void CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags flags);
// ===========================================================================
// Allocation methods. =======================================================
// ===========================================================================
// Creates a filler object and returns a heap object immediately after it.
MUST_USE_RESULT HeapObject* PrecedeWithFiller(HeapObject* object,
int filler_size);
// Creates a filler object if needed for alignment and returns a heap object
// immediately after it. If any space is left after the returned object,
// another filler object is created so the over allocated memory is iterable.
MUST_USE_RESULT HeapObject* AlignWithFiller(HeapObject* object,
int object_size,
int allocation_size,
AllocationAlignment alignment);
// ===========================================================================
// ArrayBuffer tracking. =====================================================
// ===========================================================================
void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
void UnregisterArrayBuffer(JSArrayBuffer* buffer);
inline ArrayBufferTracker* array_buffer_tracker() {
return array_buffer_tracker_;
}
// ===========================================================================
// Allocation site tracking. =================================================
// ===========================================================================
// Updates the AllocationSite of a given {object}. If the global prenuring
// storage is passed as {pretenuring_feedback} the memento found count on
// the corresponding allocation site is immediately updated and an entry
// in the hash map is created. Otherwise the entry (including a the count
// value) is cached on the local pretenuring feedback.
template <UpdateAllocationSiteMode mode>
inline void UpdateAllocationSite(HeapObject* object,
HashMap* pretenuring_feedback);
// Removes an entry from the global pretenuring storage.
inline void RemoveAllocationSitePretenuringFeedback(AllocationSite* site);
// Merges local pretenuring feedback into the global one. Note that this
// method needs to be called after evacuation, as allocation sites may be
// evacuated and this method resolves forward pointers accordingly.
void MergeAllocationSitePretenuringFeedback(
const HashMap& local_pretenuring_feedback);
// =============================================================================
#ifdef VERIFY_HEAP
// Verify the heap is in its normal state before or after a GC.
void Verify();
#endif
#ifdef DEBUG
void set_allocation_timeout(int timeout) { allocation_timeout_ = timeout; }
void TracePathToObjectFrom(Object* target, Object* root);
void TracePathToObject(Object* target);
void TracePathToGlobal();
void Print();
void PrintHandles();
// Report heap statistics.
void ReportHeapStatistics(const char* title);
void ReportCodeStatistics(const char* title);
#endif
#ifdef ENABLE_SLOW_DCHECKS
int CountHandlesForObject(Object* object);
#endif
private:
class PretenuringScope;
class UnmapFreeMemoryTask;
// External strings table is a place where all external strings are
// registered. We need to keep track of such strings to properly
// finalize them.
class ExternalStringTable {
public:
// Registers an external string.
inline void AddString(String* string);
inline void Iterate(ObjectVisitor* v);
// Restores internal invariant and gets rid of collected strings.
// Must be called after each Iterate() that modified the strings.
void CleanUp();
// Destroys all allocated memory.
void TearDown();
private:
explicit ExternalStringTable(Heap* heap) : heap_(heap) {}
inline void Verify();
inline void AddOldString(String* string);
// Notifies the table that only a prefix of the new list is valid.
inline void ShrinkNewStrings(int position);
// To speed up scavenge collections new space string are kept
// separate from old space strings.
List<Object*> new_space_strings_;
List<Object*> old_space_strings_;
Heap* heap_;
friend class Heap;
DISALLOW_COPY_AND_ASSIGN(ExternalStringTable);
};
struct StrongRootsList;
struct StringTypeTable {
InstanceType type;
int size;
RootListIndex index;
};
struct ConstantStringTable {
const char* contents;
RootListIndex index;
};
struct StructTable {
InstanceType type;
int size;
RootListIndex index;
};
struct GCCallbackPair {
GCCallbackPair(v8::Isolate::GCCallback callback, GCType gc_type,
bool pass_isolate)
: callback(callback), gc_type(gc_type), pass_isolate(pass_isolate) {}
bool operator==(const GCCallbackPair& other) const {
return other.callback == callback;
}
v8::Isolate::GCCallback callback;
GCType gc_type;
bool pass_isolate;
};
typedef String* (*ExternalStringTableUpdaterCallback)(Heap* heap,
Object** pointer);
static const int kInitialStringTableSize = 2048;
static const int kInitialEvalCacheSize = 64;
static const int kInitialNumberStringCacheSize = 256;
static const int kRememberedUnmappedPages = 128;
static const StringTypeTable string_type_table[];
static const ConstantStringTable constant_string_table[];
static const StructTable struct_table[];
static const int kYoungSurvivalRateHighThreshold = 90;
static const int kYoungSurvivalRateAllowedDeviation = 15;
static const int kOldSurvivalRateLowThreshold = 10;
static const int kMaxMarkCompactsInIdleRound = 7;
static const int kIdleScavengeThreshold = 5;
static const int kInitialFeedbackCapacity = 256;
Heap();
static String* UpdateNewSpaceReferenceInExternalStringTableEntry(
Heap* heap, Object** pointer);
// Selects the proper allocation space based on the pretenuring decision.
static AllocationSpace SelectSpace(PretenureFlag pretenure) {
return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
}
#define ROOT_ACCESSOR(type, name, camel_name) \
inline void set_##name(type* value);
ROOT_LIST(ROOT_ACCESSOR)
#undef ROOT_ACCESSOR
StoreBuffer* store_buffer() { return &store_buffer_; }
void set_current_gc_flags(int flags) {
current_gc_flags_ = flags;
DCHECK(!ShouldFinalizeIncrementalMarking() ||
!ShouldAbortIncrementalMarking());
}
inline bool ShouldReduceMemory() const {
return current_gc_flags_ & kReduceMemoryFootprintMask;
}
inline bool ShouldAbortIncrementalMarking() const {
return current_gc_flags_ & kAbortIncrementalMarkingMask;
}
inline bool ShouldFinalizeIncrementalMarking() const {
return current_gc_flags_ & kFinalizeIncrementalMarkingMask;
}
void PreprocessStackTraces();
// Checks whether a global GC is necessary
GarbageCollector SelectGarbageCollector(AllocationSpace space,
const char** reason);
// Make sure there is a filler value behind the top of the new space
// so that the GC does not confuse some unintialized/stale memory
// with the allocation memento of the object at the top
void EnsureFillerObjectAtTop();
// Ensure that we have swept all spaces in such a way that we can iterate
// over all objects. May cause a GC.
void MakeHeapIterable();
// Performs garbage collection operation.
// Returns whether there is a chance that another major GC could
// collect more garbage.
bool CollectGarbage(
GarbageCollector collector, const char* gc_reason,
const char* collector_reason,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
// Performs garbage collection
// Returns whether there is a chance another major GC could
// collect more garbage.
bool PerformGarbageCollection(
GarbageCollector collector,
const GCCallbackFlags gc_callback_flags = kNoGCCallbackFlags);
inline void UpdateOldSpaceLimits();
// Initializes a JSObject based on its map.
void InitializeJSObjectFromMap(JSObject* obj, FixedArray* properties,
Map* map);
// Initializes JSObject body starting at given offset.
void InitializeJSObjectBody(JSObject* obj, Map* map, int start_offset);
void InitializeAllocationMemento(AllocationMemento* memento,
AllocationSite* allocation_site);
bool CreateInitialMaps();
void CreateInitialObjects();
// These five Create*EntryStub functions are here and forced to not be inlined
// because of a gcc-4.4 bug that assigns wrong vtable entries.
NO_INLINE(void CreateJSEntryStub());
NO_INLINE(void CreateJSConstructEntryStub());
void CreateFixedStubs();
HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
// Commits from space if it is uncommitted.
void EnsureFromSpaceIsCommitted();
// Uncommit unused semi space.
bool UncommitFromSpace() { return new_space_.UncommitFromSpace(); }
// Fill in bogus values in from space
void ZapFromSpace();
// Deopts all code that contains allocation instruction which are tenured or
// not tenured. Moreover it clears the pretenuring allocation site statistics.
void ResetAllAllocationSitesDependentCode(PretenureFlag flag);
// Evaluates local pretenuring for the old space and calls
// ResetAllTenuredAllocationSitesDependentCode if too many objects died in
// the old space.
void EvaluateOldSpaceLocalPretenuring(uint64_t size_of_objects_before_gc);
// Record statistics before and after garbage collection.
void ReportStatisticsBeforeGC();
void ReportStatisticsAfterGC();
// Creates and installs the full-sized number string cache.
int FullSizeNumberStringCacheLength();
// Flush the number to string cache.
void FlushNumberStringCache();
// TODO(hpayer): Allocation site pretenuring may make this method obsolete.
// Re-visit incremental marking heuristics.
bool IsHighSurvivalRate() { return high_survival_rate_period_length_ > 0; }
void ConfigureInitialOldGenerationSize();
bool HasLowYoungGenerationAllocationRate();
bool HasLowOldGenerationAllocationRate();
double YoungGenerationMutatorUtilization();
double OldGenerationMutatorUtilization();
void ReduceNewSpaceSize();
bool TryFinalizeIdleIncrementalMarking(
double idle_time_in_ms, size_t size_of_objects,
size_t mark_compact_speed_in_bytes_per_ms);
GCIdleTimeHeapState ComputeHeapState();
bool PerformIdleTimeAction(GCIdleTimeAction action,
GCIdleTimeHeapState heap_state,
double deadline_in_ms);
void IdleNotificationEpilogue(GCIdleTimeAction action,
GCIdleTimeHeapState heap_state, double start_ms,
double deadline_in_ms);
inline void UpdateAllocationsHash(HeapObject* object);
inline void UpdateAllocationsHash(uint32_t value);
void PrintAlloctionsHash();
void AddToRingBuffer(const char* string);
void GetFromRingBuffer(char* buffer);
void CompactRetainedMaps(ArrayList* retained_maps);
void CollectGarbageOnMemoryPressure(const char* source);
// Attempt to over-approximate the weak closure by marking object groups and
// implicit references from global handles, but don't atomically complete
// marking. If we continue to mark incrementally, we might have marked
// objects that die later.
void FinalizeIncrementalMarking(const char* gc_reason);
// Returns the timer used for a given GC type.
// - GCScavenger: young generation GC
// - GCCompactor: full GC
// - GCFinalzeMC: finalization of incremental full GC
// - GCFinalizeMCReduceMemory: finalization of incremental full GC with
// memory reduction
HistogramTimer* GCTypeTimer(GarbageCollector collector);
// ===========================================================================
// Pretenuring. ==============================================================
// ===========================================================================
// Pretenuring decisions are made based on feedback collected during new space
// evacuation. Note that between feedback collection and calling this method
// object in old space must not move.
void ProcessPretenuringFeedback();
// ===========================================================================
// Actual GC. ================================================================
// ===========================================================================
// Code that should be run before and after each GC. Includes some
// reporting/verification activities when compiled with DEBUG set.
void GarbageCollectionPrologue();
void GarbageCollectionEpilogue();
// Performs a major collection in the whole heap.
void MarkCompact();
// Code to be run before and after mark-compact.
void MarkCompactPrologue();
void MarkCompactEpilogue();
// Performs a minor collection in new generation.
void Scavenge();
Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
void UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func);
void UpdateReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func);
void ProcessAllWeakReferences(WeakObjectRetainer* retainer);
void ProcessYoungWeakReferences(WeakObjectRetainer* retainer);
void ProcessNativeContexts(WeakObjectRetainer* retainer);
void ProcessAllocationSites(WeakObjectRetainer* retainer);
void ProcessWeakListRoots(WeakObjectRetainer* retainer);
// ===========================================================================
// GC statistics. ============================================================
// ===========================================================================
inline intptr_t OldGenerationSpaceAvailable() {
return old_generation_allocation_limit_ - PromotedTotalSize();
}
// Returns maximum GC pause.
double get_max_gc_pause() { return max_gc_pause_; }
// Returns maximum size of objects alive after GC.
intptr_t get_max_alive_after_gc() { return max_alive_after_gc_; }
// Returns minimal interval between two subsequent collections.
double get_min_in_mutator() { return min_in_mutator_; }
// Update GC statistics that are tracked on the Heap.
void UpdateCumulativeGCStatistics(double duration, double spent_in_mutator,
double marking_time);
bool MaximumSizeScavenge() { return maximum_size_scavenges_ > 0; }
// ===========================================================================
// Growing strategy. =========================================================
// ===========================================================================
// Decrease the allocation limit if the new limit based on the given
// parameters is lower than the current limit.
void DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
double gc_speed,
double mutator_speed);
// Calculates the allocation limit based on a given growing factor and a
// given old generation size.
intptr_t CalculateOldGenerationAllocationLimit(double factor,
intptr_t old_gen_size);
// Sets the allocation limit to trigger the next full garbage collection.
void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
double mutator_speed);
// ===========================================================================
// Idle notification. ========================================================
// ===========================================================================
bool RecentIdleNotificationHappened();
void ScheduleIdleScavengeIfNeeded(int bytes_allocated);
// ===========================================================================
// HeapIterator helpers. =====================================================
// ===========================================================================
void heap_iterator_start() { heap_iterator_depth_++; }
void heap_iterator_end() { heap_iterator_depth_--; }
bool in_heap_iterator() { return heap_iterator_depth_ > 0; }
// ===========================================================================
// Allocation methods. =======================================================
// ===========================================================================
// Returns a deep copy of the JavaScript object.
// Properties and elements are copied too.
// Optionally takes an AllocationSite to be appended in an AllocationMemento.
MUST_USE_RESULT AllocationResult CopyJSObject(JSObject* source,
AllocationSite* site = NULL);
// Allocates a JS Map in the heap.
MUST_USE_RESULT AllocationResult
AllocateMap(InstanceType instance_type, int instance_size,
ElementsKind elements_kind = TERMINAL_FAST_ELEMENTS_KIND);
// Allocates and initializes a new JavaScript object based on a
// constructor.
// If allocation_site is non-null, then a memento is emitted after the object
// that points to the site.
MUST_USE_RESULT AllocationResult AllocateJSObject(
JSFunction* constructor, PretenureFlag pretenure = NOT_TENURED,
AllocationSite* allocation_site = NULL);
// Allocates and initializes a new JavaScript object based on a map.
// Passing an allocation site means that a memento will be created that
// points to the site.
MUST_USE_RESULT AllocationResult
AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure = NOT_TENURED,
AllocationSite* allocation_site = NULL);
// Allocates a HeapNumber from value.
MUST_USE_RESULT AllocationResult
AllocateHeapNumber(double value, MutableMode mode = IMMUTABLE,
PretenureFlag pretenure = NOT_TENURED);
// Allocates SIMD values from the given lane values.
#define SIMD_ALLOCATE_DECLARATION(TYPE, Type, type, lane_count, lane_type) \
AllocationResult Allocate##Type(lane_type lanes[lane_count], \
PretenureFlag pretenure = NOT_TENURED);
SIMD128_TYPES(SIMD_ALLOCATE_DECLARATION)
#undef SIMD_ALLOCATE_DECLARATION
// Allocates a byte array of the specified length
MUST_USE_RESULT AllocationResult
AllocateByteArray(int length, PretenureFlag pretenure = NOT_TENURED);
// Allocates a bytecode array with given contents.
MUST_USE_RESULT AllocationResult
AllocateBytecodeArray(int length, const byte* raw_bytecodes, int frame_size,
int parameter_count, FixedArray* constant_pool);
// Copy the code and scope info part of the code object, but insert
// the provided data as the relocation information.
MUST_USE_RESULT AllocationResult CopyCode(Code* code,
Vector<byte> reloc_info);
MUST_USE_RESULT AllocationResult CopyCode(Code* code);
MUST_USE_RESULT AllocationResult
CopyBytecodeArray(BytecodeArray* bytecode_array);
// Allocates a fixed array initialized with undefined values
MUST_USE_RESULT AllocationResult
AllocateFixedArray(int length, PretenureFlag pretenure = NOT_TENURED);
// Allocate an uninitialized object. The memory is non-executable if the
// hardware and OS allow. This is the single choke-point for allocations
// performed by the runtime and should not be bypassed (to extend this to
// inlined allocations, use the Heap::DisableInlineAllocation() support).
MUST_USE_RESULT inline AllocationResult AllocateRaw(
int size_in_bytes, AllocationSpace space,
AllocationAlignment aligment = kWordAligned);
// Allocates a heap object based on the map.
MUST_USE_RESULT AllocationResult
Allocate(Map* map, AllocationSpace space,
AllocationSite* allocation_site = NULL);
// Allocates a partial map for bootstrapping.
MUST_USE_RESULT AllocationResult
AllocatePartialMap(InstanceType instance_type, int instance_size);
// Allocate a block of memory in the given space (filled with a filler).
// Used as a fall-back for generated code when the space is full.
MUST_USE_RESULT AllocationResult
AllocateFillerObject(int size, bool double_align, AllocationSpace space);
// Allocate an uninitialized fixed array.
MUST_USE_RESULT AllocationResult
AllocateRawFixedArray(int length, PretenureFlag pretenure);
// Allocate an uninitialized fixed double array.
MUST_USE_RESULT AllocationResult
AllocateRawFixedDoubleArray(int length, PretenureFlag pretenure);
// Allocate an initialized fixed array with the given filler value.
MUST_USE_RESULT AllocationResult
AllocateFixedArrayWithFiller(int length, PretenureFlag pretenure,
Object* filler);
// Allocate and partially initializes a String. There are two String
// encodings: one-byte and two-byte. These functions allocate a string of
// the given length and set its map and length fields. The characters of
// the string are uninitialized.
MUST_USE_RESULT AllocationResult
AllocateRawOneByteString(int length, PretenureFlag pretenure);
MUST_USE_RESULT AllocationResult
AllocateRawTwoByteString(int length, PretenureFlag pretenure);
// Allocates an internalized string in old space based on the character
// stream.
MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringFromUtf8(
Vector<const char> str, int chars, uint32_t hash_field);
MUST_USE_RESULT inline AllocationResult AllocateOneByteInternalizedString(
Vector<const uint8_t> str, uint32_t hash_field);
MUST_USE_RESULT inline AllocationResult AllocateTwoByteInternalizedString(
Vector<const uc16> str, uint32_t hash_field);
template <bool is_one_byte, typename T>
MUST_USE_RESULT AllocationResult
AllocateInternalizedStringImpl(T t, int chars, uint32_t hash_field);
template <typename T>
MUST_USE_RESULT inline AllocationResult AllocateInternalizedStringImpl(
T t, int chars, uint32_t hash_field);
// Allocates an uninitialized fixed array. It must be filled by the caller.
MUST_USE_RESULT AllocationResult AllocateUninitializedFixedArray(int length);
// Make a copy of src and return it.
MUST_USE_RESULT inline AllocationResult CopyFixedArray(FixedArray* src);
// Make a copy of src, also grow the copy, and return the copy.
MUST_USE_RESULT AllocationResult
CopyFixedArrayAndGrow(FixedArray* src, int grow_by, PretenureFlag pretenure);
// Make a copy of src, also grow the copy, and return the copy.
MUST_USE_RESULT AllocationResult CopyFixedArrayUpTo(FixedArray* src,
int new_len,
PretenureFlag pretenure);
// Make a copy of src, set the map, and return the copy.
MUST_USE_RESULT AllocationResult
CopyFixedArrayWithMap(FixedArray* src, Map* map);
// Make a copy of src and return it.
MUST_USE_RESULT inline AllocationResult CopyFixedDoubleArray(
FixedDoubleArray* src);
// Computes a single character string where the character has code.
// A cache is used for one-byte (Latin1) codes.
MUST_USE_RESULT AllocationResult
LookupSingleCharacterStringFromCode(uint16_t code);
// Allocate a symbol in old space.
MUST_USE_RESULT AllocationResult AllocateSymbol();
// Allocates an external array of the specified length and type.
MUST_USE_RESULT AllocationResult AllocateFixedTypedArrayWithExternalPointer(
int length, ExternalArrayType array_type, void* external_pointer,
PretenureFlag pretenure);
// Allocates a fixed typed array of the specified length and type.
MUST_USE_RESULT AllocationResult
AllocateFixedTypedArray(int length, ExternalArrayType array_type,
bool initialize, PretenureFlag pretenure);
// Make a copy of src and return it.
MUST_USE_RESULT AllocationResult CopyAndTenureFixedCOWArray(FixedArray* src);
// Make a copy of src, set the map, and return the copy.
MUST_USE_RESULT AllocationResult
CopyFixedDoubleArrayWithMap(FixedDoubleArray* src, Map* map);
// Allocates a fixed double array with uninitialized values. Returns
MUST_USE_RESULT AllocationResult AllocateUninitializedFixedDoubleArray(
int length, PretenureFlag pretenure = NOT_TENURED);
// Allocate empty fixed array.
MUST_USE_RESULT AllocationResult AllocateEmptyFixedArray();
// Allocate empty fixed typed array of given type.
MUST_USE_RESULT AllocationResult
AllocateEmptyFixedTypedArray(ExternalArrayType array_type);
// Allocate a tenured simple cell.
MUST_USE_RESULT AllocationResult AllocateCell(Object* value);
// Allocate a tenured JS global property cell initialized with the hole.
MUST_USE_RESULT AllocationResult AllocatePropertyCell();
MUST_USE_RESULT AllocationResult AllocateWeakCell(HeapObject* value);
MUST_USE_RESULT AllocationResult AllocateTransitionArray(int capacity);
// Allocates a new utility object in the old generation.
MUST_USE_RESULT AllocationResult AllocateStruct(InstanceType type);
// Allocates a new foreign object.
MUST_USE_RESULT AllocationResult
AllocateForeign(Address address, PretenureFlag pretenure = NOT_TENURED);
MUST_USE_RESULT AllocationResult
AllocateCode(int object_size, bool immovable);
MUST_USE_RESULT AllocationResult InternalizeStringWithKey(HashTableKey* key);
MUST_USE_RESULT AllocationResult InternalizeString(String* str);
// ===========================================================================
void set_force_oom(bool value) { force_oom_ = value; }
// The amount of external memory registered through the API kept alive
// by global handles
int64_t amount_of_external_allocated_memory_;
// Caches the amount of external memory registered at the last global gc.
int64_t amount_of_external_allocated_memory_at_last_global_gc_;
// This can be calculated directly from a pointer to the heap; however, it is
// more expedient to get at the isolate directly from within Heap methods.
Isolate* isolate_;
Object* roots_[kRootListLength];
size_t code_range_size_;
int max_semi_space_size_;
int initial_semispace_size_;
intptr_t max_old_generation_size_;
intptr_t initial_old_generation_size_;
bool old_generation_size_configured_;
intptr_t max_executable_size_;
intptr_t maximum_committed_;
// For keeping track of how much data has survived
// scavenge since last new space expansion.
intptr_t survived_since_last_expansion_;
// ... and since the last scavenge.
intptr_t survived_last_scavenge_;
// This is not the depth of nested AlwaysAllocateScope's but rather a single
// count, as scopes can be acquired from multiple tasks (read: threads).
AtomicNumber<size_t> always_allocate_scope_count_;
// Stores the memory pressure level that set by MemoryPressureNotification
// and reset by a mark-compact garbage collection.
AtomicValue<MemoryPressureLevel> memory_pressure_level_;
// For keeping track of context disposals.
int contexts_disposed_;
// The length of the retained_maps array at the time of context disposal.
// This separates maps in the retained_maps array that were created before
// and after context disposal.
int number_of_disposed_maps_;
int global_ic_age_;
NewSpace new_space_;
OldSpace* old_space_;
OldSpace* code_space_;
MapSpace* map_space_;
LargeObjectSpace* lo_space_;
HeapState gc_state_;
int gc_post_processing_depth_;
Address new_space_top_after_last_gc_;
// Returns the amount of external memory registered since last global gc.
int64_t PromotedExternalMemorySize();
// How many "runtime allocations" happened.
uint32_t allocations_count_;
// Running hash over allocations performed.
uint32_t raw_allocations_hash_;
// How many mark-sweep collections happened.
unsigned int ms_count_;
// How many gc happened.
unsigned int gc_count_;
// For post mortem debugging.
int remembered_unmapped_pages_index_;
Address remembered_unmapped_pages_[kRememberedUnmappedPages];
#ifdef DEBUG
// If the --gc-interval flag is set to a positive value, this
// variable holds the value indicating the number of allocations
// remain until the next failure and garbage collection.
int allocation_timeout_;
#endif // DEBUG
// Limit that triggers a global GC on the next (normally caused) GC. This
// is checked when we have already decided to do a GC to help determine
// which collector to invoke, before expanding a paged space in the old
// generation and on every allocation in large object space.
intptr_t old_generation_allocation_limit_;
// Indicates that an allocation has failed in the old generation since the
// last GC.
bool old_gen_exhausted_;
// Indicates that memory usage is more important than latency.
// TODO(ulan): Merge it with memory reducer once chromium:490559 is fixed.
bool optimize_for_memory_usage_;
// Indicates that inline bump-pointer allocation has been globally disabled
// for all spaces. This is used to disable allocations in generated code.
bool inline_allocation_disabled_;
// Weak list heads, threaded through the objects.
// List heads are initialized lazily and contain the undefined_value at start.
Object* native_contexts_list_;
Object* allocation_sites_list_;
// List of encountered weak collections (JSWeakMap and JSWeakSet) during
// marking. It is initialized during marking, destroyed after marking and
// contains Smi(0) while marking is not active.
Object* encountered_weak_collections_;
Object* encountered_weak_cells_;
Object* encountered_transition_arrays_;
List<GCCallbackPair> gc_epilogue_callbacks_;
List<GCCallbackPair> gc_prologue_callbacks_;
// Total RegExp code ever generated
double total_regexp_code_generated_;
int deferred_counters_[v8::Isolate::kUseCounterFeatureCount];
GCTracer* tracer_;
EmbedderHeapTracer* embedder_heap_tracer_;
int high_survival_rate_period_length_;
intptr_t promoted_objects_size_;
double promotion_ratio_;
double promotion_rate_;
intptr_t semi_space_copied_object_size_;
intptr_t previous_semi_space_copied_object_size_;
double semi_space_copied_rate_;
int nodes_died_in_new_space_;
int nodes_copied_in_new_space_;
int nodes_promoted_;
// This is the pretenuring trigger for allocation sites that are in maybe
// tenure state. When we switched to the maximum new space size we deoptimize
// the code that belongs to the allocation site and derive the lifetime
// of the allocation site.
unsigned int maximum_size_scavenges_;
// Maximum GC pause.
double max_gc_pause_;
// Total time spent in GC.
double total_gc_time_ms_;
// Maximum size of objects alive after GC.
intptr_t max_alive_after_gc_;
// Minimal interval between two subsequent collections.
double min_in_mutator_;
// Cumulative GC time spent in marking.
double marking_time_;
// Cumulative GC time spent in sweeping.
double sweeping_time_;
// Last time an idle notification happened.
double last_idle_notification_time_;
// Last time a garbage collection happened.
double last_gc_time_;
Scavenger* scavenge_collector_;
MarkCompactCollector* mark_compact_collector_;
StoreBuffer store_buffer_;
IncrementalMarking* incremental_marking_;
GCIdleTimeHandler* gc_idle_time_handler_;
MemoryReducer* memory_reducer_;
ObjectStats* object_stats_;
ScavengeJob* scavenge_job_;
AllocationObserver* idle_scavenge_observer_;
// These two counters are monotomically increasing and never reset.
size_t full_codegen_bytes_generated_;
size_t crankshaft_codegen_bytes_generated_;
// This counter is increased before each GC and never reset.
// To account for the bytes allocated since the last GC, use the
// NewSpaceAllocationCounter() function.
size_t new_space_allocation_counter_;
// This counter is increased before each GC and never reset. To
// account for the bytes allocated since the last GC, use the
// OldGenerationAllocationCounter() function.
size_t old_generation_allocation_counter_;
// The size of objects in old generation after the last MarkCompact GC.
size_t old_generation_size_at_last_gc_;
// If the --deopt_every_n_garbage_collections flag is set to a positive value,
// this variable holds the number of garbage collections since the last
// deoptimization triggered by garbage collection.
int gcs_since_last_deopt_;
// The feedback storage is used to store allocation sites (keys) and how often
// they have been visited (values) by finding a memento behind an object. The
// storage is only alive temporary during a GC. The invariant is that all
// pointers in this map are already fixed, i.e., they do not point to
// forwarding pointers.
HashMap* global_pretenuring_feedback_;
char trace_ring_buffer_[kTraceRingBufferSize];
// If it's not full then the data is from 0 to ring_buffer_end_. If it's
// full then the data is from ring_buffer_end_ to the end of the buffer and
// from 0 to ring_buffer_end_.
bool ring_buffer_full_;
size_t ring_buffer_end_;
// Shared state read by the scavenge collector and set by ScavengeObject.
PromotionQueue promotion_queue_;
// Flag is set when the heap has been configured. The heap can be repeatedly
// configured through the API until it is set up.
bool configured_;
// Currently set GC flags that are respected by all GC components.
int current_gc_flags_;
// Currently set GC callback flags that are used to pass information between
// the embedder and V8's GC.
GCCallbackFlags current_gc_callback_flags_;
ExternalStringTable external_string_table_;
MemoryChunk* chunks_queued_for_free_;
size_t concurrent_unmapping_tasks_active_;
base::Semaphore pending_unmapping_tasks_semaphore_;
base::Mutex relocation_mutex_;
int gc_callbacks_depth_;
bool deserialization_complete_;
StrongRootsList* strong_roots_list_;
ArrayBufferTracker* array_buffer_tracker_;
// The depth of HeapIterator nestings.
int heap_iterator_depth_;
// Used for testing purposes.
bool force_oom_;
// Classes in "heap" can be friends.
friend class AlwaysAllocateScope;
friend class GCCallbacksScope;
friend class GCTracer;
friend class HeapIterator;
friend class IdleScavengeObserver;
friend class IncrementalMarking;
friend class IteratePromotedObjectsVisitor;
friend class MarkCompactCollector;
friend class MarkCompactMarkingVisitor;
friend class NewSpace;
friend class ObjectStatsVisitor;
friend class Page;
friend class Scavenger;
friend class StoreBuffer;
// The allocator interface.
friend class Factory;
// The Isolate constructs us.
friend class Isolate;
// Used in cctest.
friend class HeapTester;
DISALLOW_COPY_AND_ASSIGN(Heap);
};
class HeapStats {
public:
static const int kStartMarker = 0xDECADE00;
static const int kEndMarker = 0xDECADE01;
int* start_marker; // 0
int* new_space_size; // 1
int* new_space_capacity; // 2
intptr_t* old_space_size; // 3
intptr_t* old_space_capacity; // 4
intptr_t* code_space_size; // 5
intptr_t* code_space_capacity; // 6
intptr_t* map_space_size; // 7
intptr_t* map_space_capacity; // 8
intptr_t* lo_space_size; // 9
int* global_handle_count; // 10
int* weak_global_handle_count; // 11
int* pending_global_handle_count; // 12
int* near_death_global_handle_count; // 13
int* free_global_handle_count; // 14
intptr_t* memory_allocator_size; // 15
intptr_t* memory_allocator_capacity; // 16
int* objects_per_type; // 17
int* size_per_type; // 18
int* os_error; // 19
char* last_few_messages; // 20
char* js_stacktrace; // 21
int* end_marker; // 22
};
class AlwaysAllocateScope {
public:
explicit inline AlwaysAllocateScope(Isolate* isolate);
inline ~AlwaysAllocateScope();
private:
Heap* heap_;
};
// Visitor class to verify interior pointers in spaces that do not contain
// or care about intergenerational references. All heap object pointers have to
// point into the heap to a location that has a map pointer at its first word.
// Caveat: Heap::Contains is an approximation because it can return true for
// objects in a heap space but above the allocation pointer.
class VerifyPointersVisitor : public ObjectVisitor {
public:
inline void VisitPointers(Object** start, Object** end) override;
};
// Verify that all objects are Smis.
class VerifySmisVisitor : public ObjectVisitor {
public:
inline void VisitPointers(Object** start, Object** end) override;
};
// Space iterator for iterating over all spaces of the heap. Returns each space
// in turn, and null when it is done.
class AllSpaces BASE_EMBEDDED {
public:
explicit AllSpaces(Heap* heap) : heap_(heap), counter_(FIRST_SPACE) {}
Space* next();
private:
Heap* heap_;
int counter_;
};
// Space iterator for iterating over all old spaces of the heap: Old space
// and code space. Returns each space in turn, and null when it is done.
class OldSpaces BASE_EMBEDDED {
public:
explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
OldSpace* next();
private:
Heap* heap_;
int counter_;
};
// Space iterator for iterating over all the paged spaces of the heap: Map
// space, old space, code space and cell space. Returns
// each space in turn, and null when it is done.
class PagedSpaces BASE_EMBEDDED {
public:
explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
PagedSpace* next();
private:
Heap* heap_;
int counter_;
};
// Space iterator for iterating over all spaces of the heap.
// For each space an object iterator is provided. The deallocation of the
// returned object iterators is handled by the space iterator.
class SpaceIterator : public Malloced {
public:
explicit SpaceIterator(Heap* heap);
virtual ~SpaceIterator();
bool has_next();
ObjectIterator* next();
private:
ObjectIterator* CreateIterator();
Heap* heap_;
int current_space_; // from enum AllocationSpace.
ObjectIterator* iterator_; // object iterator for the current space.
};
// A HeapIterator provides iteration over the whole heap. It
// aggregates the specific iterators for the different spaces as
// these can only iterate over one space only.
//
// HeapIterator ensures there is no allocation during its lifetime
// (using an embedded DisallowHeapAllocation instance).
//
// HeapIterator can skip free list nodes (that is, de-allocated heap
// objects that still remain in the heap). As implementation of free
// nodes filtering uses GC marks, it can't be used during MS/MC GC
// phases. Also, it is forbidden to interrupt iteration in this mode,
// as this will leave heap objects marked (and thus, unusable).
class HeapIterator BASE_EMBEDDED {
public:
enum HeapObjectsFiltering { kNoFiltering, kFilterUnreachable };
explicit HeapIterator(Heap* heap,
HeapObjectsFiltering filtering = kNoFiltering);
~HeapIterator();
HeapObject* next();
private:
struct MakeHeapIterableHelper {
explicit MakeHeapIterableHelper(Heap* heap) { heap->MakeHeapIterable(); }
};
HeapObject* NextObject();
// The following two fields need to be declared in this order. Initialization
// order guarantees that we first make the heap iterable (which may involve
// allocations) and only then lock it down by not allowing further
// allocations.
MakeHeapIterableHelper make_heap_iterable_helper_;
DisallowHeapAllocation no_heap_allocation_;
Heap* heap_;
HeapObjectsFiltering filtering_;
HeapObjectsFilter* filter_;
// Space iterator for iterating all the spaces.
SpaceIterator* space_iterator_;
// Object iterator for the space currently being iterated.
ObjectIterator* object_iterator_;
};
// Cache for mapping (map, property name) into field offset.
// Cleared at startup and prior to mark sweep collection.
class KeyedLookupCache {
public:
// Lookup field offset for (map, name). If absent, -1 is returned.
int Lookup(Handle<Map> map, Handle<Name> name);
// Update an element in the cache.
void Update(Handle<Map> map, Handle<Name> name, int field_offset);
// Clear the cache.
void Clear();
static const int kLength = 256;
static const int kCapacityMask = kLength - 1;
static const int kMapHashShift = 5;
static const int kHashMask = -4; // Zero the last two bits.
static const int kEntriesPerBucket = 4;
static const int kEntryLength = 2;
static const int kMapIndex = 0;
static const int kKeyIndex = 1;
static const int kNotFound = -1;
// kEntriesPerBucket should be a power of 2.
STATIC_ASSERT((kEntriesPerBucket & (kEntriesPerBucket - 1)) == 0);
STATIC_ASSERT(kEntriesPerBucket == -kHashMask);
private:
KeyedLookupCache() {
for (int i = 0; i < kLength; ++i) {
keys_[i].map = NULL;
keys_[i].name = NULL;
field_offsets_[i] = kNotFound;
}
}
static inline int Hash(Handle<Map> map, Handle<Name> name);
// Get the address of the keys and field_offsets arrays. Used in
// generated code to perform cache lookups.
Address keys_address() { return reinterpret_cast<Address>(&keys_); }
Address field_offsets_address() {
return reinterpret_cast<Address>(&field_offsets_);
}
struct Key {
Map* map;
Name* name;
};
Key keys_[kLength];
int field_offsets_[kLength];
friend class ExternalReference;
friend class Isolate;
DISALLOW_COPY_AND_ASSIGN(KeyedLookupCache);
};
// Cache for mapping (map, property name) into descriptor index.
// The cache contains both positive and negative results.
// Descriptor index equals kNotFound means the property is absent.
// Cleared at startup and prior to any gc.
class DescriptorLookupCache {
public:
// Lookup descriptor index for (map, name).
// If absent, kAbsent is returned.
inline int Lookup(Map* source, Name* name);
// Update an element in the cache.
inline void Update(Map* source, Name* name, int result);
// Clear the cache.
void Clear();
static const int kAbsent = -2;
private:
DescriptorLookupCache() {
for (int i = 0; i < kLength; ++i) {
keys_[i].source = NULL;
keys_[i].name = NULL;
results_[i] = kAbsent;
}
}
static inline int Hash(Object* source, Name* name);
static const int kLength = 64;
struct Key {
Map* source;
Name* name;
};
Key keys_[kLength];
int results_[kLength];
friend class Isolate;
DISALLOW_COPY_AND_ASSIGN(DescriptorLookupCache);
};
// Abstract base class for checking whether a weak object should be retained.
class WeakObjectRetainer {
public:
virtual ~WeakObjectRetainer() {}
// Return whether this object should be retained. If NULL is returned the
// object has no references. Otherwise the address of the retained object
// should be returned as in some GC situations the object has been moved.
virtual Object* RetainAs(Object* object) = 0;
};
#ifdef DEBUG
// Helper class for tracing paths to a search target Object from all roots.
// The TracePathFrom() method can be used to trace paths from a specific
// object to the search target object.
class PathTracer : public ObjectVisitor {
public:
enum WhatToFind {
FIND_ALL, // Will find all matches.
FIND_FIRST // Will stop the search after first match.
};
// Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
static const int kMarkTag = 2;
// For the WhatToFind arg, if FIND_FIRST is specified, tracing will stop
// after the first match. If FIND_ALL is specified, then tracing will be
// done for all matches.
PathTracer(Object* search_target, WhatToFind what_to_find,
VisitMode visit_mode)
: search_target_(search_target),
found_target_(false),
found_target_in_trace_(false),
what_to_find_(what_to_find),
visit_mode_(visit_mode),
object_stack_(20),
no_allocation() {}
void VisitPointers(Object** start, Object** end) override;
void Reset();
void TracePathFrom(Object** root);
bool found() const { return found_target_; }
static Object* const kAnyGlobalObject;
protected:
class MarkVisitor;
class UnmarkVisitor;
void MarkRecursively(Object** p, MarkVisitor* mark_visitor);
void UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor);
virtual void ProcessResults();
Object* search_target_;
bool found_target_;
bool found_target_in_trace_;
WhatToFind what_to_find_;
VisitMode visit_mode_;
List<Object*> object_stack_;
DisallowHeapAllocation no_allocation; // i.e. no gc allowed.
private:
DISALLOW_IMPLICIT_CONSTRUCTORS(PathTracer);
};
#endif // DEBUG
// -----------------------------------------------------------------------------
// Allows observation of allocations.
class AllocationObserver {
public:
explicit AllocationObserver(intptr_t step_size)
: step_size_(step_size), bytes_to_next_step_(step_size) {
DCHECK(step_size >= kPointerSize);
}
virtual ~AllocationObserver() {}
// Called each time the observed space does an allocation step. This may be
// more frequently than the step_size we are monitoring (e.g. when there are
// multiple observers, or when page or space boundary is encountered.)
void AllocationStep(int bytes_allocated, Address soon_object, size_t size) {
bytes_to_next_step_ -= bytes_allocated;
if (bytes_to_next_step_ <= 0) {
Step(static_cast<int>(step_size_ - bytes_to_next_step_), soon_object,
size);
step_size_ = GetNextStepSize();
bytes_to_next_step_ = step_size_;
}
}
protected:
intptr_t step_size() const { return step_size_; }
intptr_t bytes_to_next_step() const { return bytes_to_next_step_; }
// Pure virtual method provided by the subclasses that gets called when at
// least step_size bytes have been allocated. soon_object is the address just
// allocated (but not yet initialized.) size is the size of the object as
// requested (i.e. w/o the alignment fillers). Some complexities to be aware
// of:
// 1) soon_object will be nullptr in cases where we end up observing an
// allocation that happens to be a filler space (e.g. page boundaries.)
// 2) size is the requested size at the time of allocation. Right-trimming
// may change the object size dynamically.
// 3) soon_object may actually be the first object in an allocation-folding
// group. In such a case size is the size of the group rather than the
// first object.
virtual void Step(int bytes_allocated, Address soon_object, size_t size) = 0;
// Subclasses can override this method to make step size dynamic.
virtual intptr_t GetNextStepSize() { return step_size_; }
intptr_t step_size_;
intptr_t bytes_to_next_step_;
private:
friend class LargeObjectSpace;
friend class NewSpace;
friend class PagedSpace;
DISALLOW_COPY_AND_ASSIGN(AllocationObserver);
};
} // namespace internal
} // namespace v8
#endif // V8_HEAP_HEAP_H_
| {
"content_hash": "1fab6d338a6ae4c2a43bff271f9b2c41",
"timestamp": "",
"source": "github",
"line_count": 2666,
"max_line_length": 80,
"avg_line_length": 39.0648912228057,
"alnum_prop": 0.641497114655247,
"repo_name": "zero-rp/miniblink49",
"id": "bbe1f05e4f265954190e2f3cb4e7dcd5821cadb0",
"size": "104147",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "v8_5_1/src/heap/heap.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "11324414"
},
{
"name": "Batchfile",
"bytes": "52488"
},
{
"name": "C",
"bytes": "31014938"
},
{
"name": "C++",
"bytes": "281193388"
},
{
"name": "CMake",
"bytes": "88548"
},
{
"name": "CSS",
"bytes": "20839"
},
{
"name": "DIGITAL Command Language",
"bytes": "226954"
},
{
"name": "HTML",
"bytes": "202637"
},
{
"name": "JavaScript",
"bytes": "32544926"
},
{
"name": "Lua",
"bytes": "32432"
},
{
"name": "M4",
"bytes": "125191"
},
{
"name": "Makefile",
"bytes": "1517330"
},
{
"name": "Objective-C",
"bytes": "87691"
},
{
"name": "Objective-C++",
"bytes": "35037"
},
{
"name": "PHP",
"bytes": "307541"
},
{
"name": "Perl",
"bytes": "3283676"
},
{
"name": "Prolog",
"bytes": "29177"
},
{
"name": "Python",
"bytes": "4308928"
},
{
"name": "R",
"bytes": "10248"
},
{
"name": "Scheme",
"bytes": "25457"
},
{
"name": "Shell",
"bytes": "264021"
},
{
"name": "TypeScript",
"bytes": "162421"
},
{
"name": "Vim script",
"bytes": "11362"
},
{
"name": "XS",
"bytes": "4319"
},
{
"name": "eC",
"bytes": "4383"
}
],
"symlink_target": ""
} |
#ifndef __DUMMYSTMGR_H_
#define __DUMMYSTMGR_H_
#include <string>
#include <vector>
#include "network/network_error.h"
#include "proto/messages.h"
#include "basics/basics.h"
#include "errors/errors.h"
#include "threads/threads.h"
#include "network/network.h"
namespace heron {
namespace testing {
class DummyStMgr : public Client {
public:
DummyStMgr(EventLoop* eventLoop, const NetworkOptions& options, const sp_string& stmgr_id,
const sp_string& myhost, sp_int32 myport,
const std::vector<proto::system::Instance*>& instances);
~DummyStMgr();
proto::system::PhysicalPlan* GetPhysicalPlan();
bool GotRestoreMessage() const { return got_restore_message_; }
void ResetGotRestoreMessage() { got_restore_message_ = false; }
bool GotStartProcessingMessage() const { return got_start_message_; }
void ResetGotStartProcessingMessage() { got_start_message_ = false; }
const std::string& stmgrid() const { return my_id_; }
protected:
virtual void HandleConnect(NetworkErrorCode status);
virtual void HandleClose(NetworkErrorCode status);
private:
void HandleRegisterResponse(void*, proto::tmaster::StMgrRegisterResponse* response,
NetworkErrorCode);
void HandleHeartbeatResponse(void*, proto::tmaster::StMgrHeartbeatResponse* response,
NetworkErrorCode);
void HandleNewAssignmentMessage(proto::stmgr::NewPhysicalPlanMessage* message);
void HandleNewPhysicalPlan(const proto::system::PhysicalPlan& pplan);
void HandleRestoreTopologyStateRequest(proto::ckptmgr::RestoreTopologyStateRequest* message);
void HandleStartProcessingMessage(proto::ckptmgr::StartStmgrStatefulProcessing* message);
void OnReConnectTimer();
void OnHeartbeatTimer();
void SendRegisterRequest();
void SendHeartbeatRequest();
std::string my_id_;
std::string my_host_;
sp_int32 my_port_;
std::vector<proto::system::Instance*> instances_;
proto::system::PhysicalPlan* pplan_;
bool got_restore_message_;
bool got_start_message_;
};
} // namespace testing
} // namespace heron
#endif
| {
"content_hash": "9df63cc4be12eba40ee61b583a7c8838",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 95,
"avg_line_length": 33.854838709677416,
"alnum_prop": 0.7298713673177704,
"repo_name": "srkukarni/heron",
"id": "0c6a25c290abedc5ad37976261ece1074fdd5813",
"size": "2693",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "heron/tmaster/tests/cpp/server/dummystmgr.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "4710"
},
{
"name": "C++",
"bytes": "1315767"
},
{
"name": "CSS",
"bytes": "109095"
},
{
"name": "HTML",
"bytes": "154389"
},
{
"name": "Java",
"bytes": "3525682"
},
{
"name": "JavaScript",
"bytes": "167652"
},
{
"name": "M4",
"bytes": "17941"
},
{
"name": "Makefile",
"bytes": "517"
},
{
"name": "Objective-C",
"bytes": "1929"
},
{
"name": "Protocol Buffer",
"bytes": "31644"
},
{
"name": "Python",
"bytes": "1382264"
},
{
"name": "Ruby",
"bytes": "1930"
},
{
"name": "Scala",
"bytes": "4640"
},
{
"name": "Shell",
"bytes": "157691"
},
{
"name": "Thrift",
"bytes": "915"
}
],
"symlink_target": ""
} |
import { expect } from 'chai';
import { iif, of } from 'rxjs';
import { expectObservable } from '../helpers/marble-testing';
describe('iif', () => {
it('should subscribe to thenSource when the conditional returns true', () => {
const e1 = iif(() => true, of('a'), of());
const expected = '(a|)';
expectObservable(e1).toBe(expected);
});
it('should subscribe to elseSource when the conditional returns false', () => {
const e1 = iif(() => false, of('a'), of('b'));
const expected = '(b|)';
expectObservable(e1).toBe(expected);
});
it('should complete without an elseSource when the conditional returns false', () => {
const e1 = iif(() => false, of('a'), of());
const expected = '|';
expectObservable(e1).toBe(expected);
});
it('should raise error when conditional throws', () => {
const e1 = iif(((): boolean => {
throw 'error';
}), of('a'), of());
const expected = '#';
expectObservable(e1).toBe(expected);
});
it('should accept resolved promise as thenSource', (done) => {
const expected = 42;
const e1 = iif(() => true, new Promise((resolve: any) => { resolve(expected); }), of());
e1.subscribe(x => {
expect(x).to.equal(expected);
}, (x) => {
done(new Error('should not be called'));
}, () => {
done();
});
});
it('should accept resolved promise as elseSource', (done) => {
const expected = 42;
const e1 = iif(() => false,
of('a'),
new Promise((resolve: any) => { resolve(expected); }));
e1.subscribe(x => {
expect(x).to.equal(expected);
}, (x) => {
done(new Error('should not be called'));
}, () => {
done();
});
});
it('should accept rejected promise as elseSource', (done) => {
const expected = 42;
const e1 = iif(() => false,
of('a'),
new Promise((resolve: any, reject: any) => { reject(expected); }));
e1.subscribe(x => {
done(new Error('should not be called'));
}, (x) => {
expect(x).to.equal(expected);
done();
}, () => {
done(new Error('should not be called'));
});
});
it('should accept rejected promise as thenSource', (done) => {
const expected = 42;
const e1 = iif(() => true, new Promise((resolve: any, reject: any) => { reject(expected); }), of());
e1.subscribe(x => {
done(new Error('should not be called'));
}, (x) => {
expect(x).to.equal(expected);
done();
}, () => {
done(new Error('should not be called'));
});
});
});
| {
"content_hash": "adedbcdef85f1baa61f9553d62ef72fc",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 104,
"avg_line_length": 27.159574468085108,
"alnum_prop": 0.5393654524089306,
"repo_name": "ReactiveX/RxJS",
"id": "85a2b6eeda227d6a38e64fe94225024be35146e9",
"size": "2553",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spec/observables/if-spec.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "6029"
},
{
"name": "JavaScript",
"bytes": "174594"
},
{
"name": "Shell",
"bytes": "381"
},
{
"name": "TypeScript",
"bytes": "1621730"
}
],
"symlink_target": ""
} |
(function(){
$A.Toggle = function(trigger, config){
var config = config || {}, t = typeof trigger === 'string' ? $A.getEl(trigger) : trigger, that = this,
isCheckbox = $A.getAttr(t, 'role') == 'checkbox' ? true : false, sraText = $A.createEl('span', null, $A.sraCSS);
if (!config.noToggle && config.noARIA){
if (!config.roleText)
config.roleText = 'Toggle';
if (!config.stateText)
config.stateText = 'Pressed';
t.appendChild(sraText);
}
var toggle = function(state){
var cr = true;
if (config.callback && typeof config.callback === 'function')
cr = config.callback.apply(t, [state]);
if (cr){
if (!config.noToggle && config.noARIA)
sraText.innerHTML = state
? (' ' + config.roleText + ' ' + config.stateText) : ' ' + config.roleText;
else if (!config.noToggle)
$A.setAttr(t, isCheckbox ? 'aria-checked' : 'aria-pressed', state ? 'true' : 'false');
that.state = state;
}
};
var nn = t.nodeName.toLowerCase();
if (!((nn == 'input' && (t.getAttribute('type') == 'button' || t.getAttribute('type') == 'image'))
|| (nn == 'a' && t.hasAttribute('href')) || (nn == 'button')))
$A.setAttr(t, 'tabindex', '0');
$A.bind(t,
{
keydown: function(ev){
var k = ev.which || ev.keyCode;
if (k == 13 || k == 32){
ev.preventDefault();
if (!(t.nodeName.toLowerCase() == 'input' && t.type == 'image' && k == 32))
$A.trigger(t, 'click');
}
},
click: function(ev){
toggle.apply(t, [that.state ? false : true]) ? true : false;
ev.preventDefault();
}
});
that.set = function(state){
toggle.apply(t, [state]);
};
if (!config.noToggle)
toggle.apply(t, [config.state ? true : false]);
};
})(); | {
"content_hash": "937188b7056f34ef67f4c6b63ec9ec48",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 115,
"avg_line_length": 27.96875,
"alnum_prop": 0.5541899441340782,
"repo_name": "ngashutosh/tsg",
"id": "bcfe26434b6cefbc087f3b63736319cddd2e15c3",
"size": "2425",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "AccDC Technical Style Guide/2 Accessible Component Modules/aria_toggler.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "298141"
},
{
"name": "HTML",
"bytes": "1241563"
},
{
"name": "JavaScript",
"bytes": "1011321"
}
],
"symlink_target": ""
} |
package com.yaochen.boss;
import java.net.ServerSocket;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
*
* 声明式启动任务
*
* @author hh
* @version 1.0, Jun 16, 2011
*/
public class Bootstrap {
public static void main(String[] args) throws Exception {
//证书相关设置
//System.setProperty("javax.net.debug", "ssl,handshake");
/*改成你本地的地址
System.setProperty("javax.net.ssl.keyStore", "D:/ycsoft/ssl/server.p12");
System.setProperty("javax.net.ssl.keyStorePassword", "a1234567");
System.setProperty("javax.net.ssl.keyStoreType", "PKCS12");
// 改成你本地的地址
System.setProperty("javax.net.ssl.trustStore", "D:/ycsoft/ssl/cfocn.ca.jks");
System.setProperty("javax.net.ssl.trustStorePassword", "a1234567");
System.setProperty("javax.net.ssl.trustStoreType", "JKS");
**/
//Keep only run single application
new Thread(new Runnable(){
@Override
public void run() {
try{
ServerSocket aa =new ServerSocket(15678);
aa.accept();
}catch(Exception e){
e.printStackTrace();
System.exit(0);
}
}
}).start();
final String[] xmlFiles = new String[] { "spring-beans.xml",
"spring-jobs.xml", "spring-client.xml" };
new ClassPathXmlApplicationContext(xmlFiles);
}
} | {
"content_hash": "752b85db07cc21b935755dab7e1c4556",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 81,
"avg_line_length": 26.03846153846154,
"alnum_prop": 0.6336779911373708,
"repo_name": "leopardoooo/cambodia",
"id": "8a8d7137813ee009d962a27abaf46160e0fda39f",
"size": "1602",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "boss-job/src/main/java/com/yaochen/boss/Bootstrap.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "826"
},
{
"name": "CSS",
"bytes": "1028439"
},
{
"name": "HTML",
"bytes": "17547"
},
{
"name": "Java",
"bytes": "14519872"
},
{
"name": "JavaScript",
"bytes": "4711774"
},
{
"name": "Shell",
"bytes": "2315"
}
],
"symlink_target": ""
} |
(experimental...)
Jres
====
A Java ElasticSearch client library which does not have a code dependency on the entire ElasticSearch library. All
communication is JSON over HTTP.
<img src="https://travis-ci.org/blacklocus/jres.svg?branch=master"/>
### Dependency ###
gradle
repositories {
// ...
mavenCentral()
}
dependencies {
// ...
compile 'com.blacklocus.jres:jres-core:0.2.0'
}
other dependency syntax on [mvnrepository.com](http://mvnrepository.com/artifact/com.blacklocus.jres/jres-core)
## Usage ##
```java
public class Main {
public static void main(String[] args) {
// Setup an index with a mapping for documents of type MyText.
String index = "readme_test_index";
String type = "definition";
String mapping = "{" +
" \"definition\": {" +
" \"properties\": {" +
" \"body\": {" +
" \"type\": \"string\"" +
" }" +
" }" +
" }" +
"}";
Jres jres = new Jres(host);
jres.quest(new JresCreateIndex(index));
jres.quest(new JresPutMapping(index, type, mapping));
// Index two documents (definitions via Google)
Definition cat = new Definition("cat",
"a small domesticated carnivorous mammal " +
"with soft fur, a short snout, and retractile claws. It is " +
"widely kept as a pet or for catching mice, and many breeds " +
"have been developed.");
Definition dog = new Definition("dog",
"a domesticated carnivorous mammal that " +
"typically has a long snout, an acute sense of smell, and a " +
"barking, howling, or whining voice. It is widely kept as a pet " +
"or for work or field sports.");
jres.quest(new JresIndexDocument(index, type, cat));
jres.quest(new JresIndexDocument(index, type, dog));
// Do some querying
JresMatchQuery retractileQuery = new JresMatchQuery("body", "retractile");
JresMatchQuery mammalQuery = new JresMatchQuery("body", "mammal");
JresMatchQuery alligatorQuery = new JresMatchQuery("body", "alligator");
JresBoolQuery retractileAndMammalQuery = new JresBoolQuery()
.must(retractileQuery, mammalQuery);
// We need to flush the queue in ElasticSearch otherwise our recently
// submitted documents may not yet be indexed.
jres.quest(new JresFlush(index));
JresSearchReply reply;
System.out.println("Searching 'retractile'");
reply = jres.quest(new JresSearch(index, type, new JresSearchBody()
.query(retractileQuery)));
for (Hit hit : reply.getHits().getHits()) {
Definition definition = hit.getSourceAsType(Definition.class);
System.out.println(" Found " + definition.term);
}
System.out.println("Searching 'mammal'");
reply = jres.quest(new JresSearch(index, type, new JresSearchBody()
.query(mammalQuery)));
for (Hit hit : reply.getHits().getHits()) {
Definition definition = hit.getSourceAsType(Definition.class);
System.out.println(" Found " + definition.term);
}
System.out.println("Searching 'alligator'");
reply = jres.quest(new JresSearch(index, type, new JresSearchBody()
.query(alligatorQuery)));
if (reply.getHits().getTotal() == 0) {
System.out.println(" Nothing found about alligators");
}
System.out.println("Searching 'retractile' and 'mammal'");
reply = jres.quest(new JresSearch(index, type, new JresSearchBody()
.query(retractileAndMammalQuery)));
for (Hit hit : reply.getHits().getHits()) {
Definition definition = hit.getSourceAsType(Definition.class);
System.out.println(" Found " + definition.term);
}
}
// This must be (de)serializable by Jackson. I have chosen to go with
// public fields, and provided a non-private default constructor.
// This is just one of many ways to enable Jackson (de)serialization.
static class Definition {
public String term;
public String body;
Definition() {
}
public Definition(String term, String body) {
this.term = term;
this.body = body;
}
@Override
public String toString() {
return term + ": " + body;
}
}
}
```
This program produces this output.
```
Searching 'retractile'
Found cat
Searching 'domestic'
Found cat
Found dog
Searching 'alligator'
Nothing found about alligators
Searching 'retractile' and 'mammal'
Found cat
```
### More ###
Select ElasticSearch APIs are wrapped up in corresponding request objects that implement
[`JresRequest`](https://github.com/blacklocus/jres/tree/master/jres/src/main/java/com/blacklocus/jres/request/JresRequest.java).
All such request objects are located in the `com.blacklocus.jres.request` package tree.
Notice that the request object captures what its
[`JresReply`](https://github.com/blacklocus/jres/tree/master/jres/src/main/java/com/blacklocus/jres/response/JresReply.java)
should be as a type parameter. Because the total ElasticSearch API is rather large, this transforms API bindings into
the form of representative classes (implementors of JresRequest<JresReply>), instead of requiring a large number
of methods on the Jres object to represent each unique ElasticSearch API call.
### jres.quest ###
This will likely be the primary invocation on a Jres object. It covers most kinds of requests and those that would be
used most frequently such as indexing documents. It accepts JresRequests whose correlated response extends `JresJsonReply`.
These are ElasticSearch API calls that return JSON (not all ElasticSearch APIs return JSON), and so those responses
can be deserialized into representative objects. If the HTTP response status code is not ok, then a
`JresErrorReplyException` will be thrown. An example follows for indexing a document.
[ElasticSearch API Index Document](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/docs-index_.html)
has been captured as
[`JresIndexDocument`](https://github.com/blacklocus/jres/tree/master/jres/src/main/java/com/blacklocus/jres/request/JresRequest.java)
This example is obscenely verbose for illustration.
Jres jres = new Jres(Suppliers.ofInstance("http://elasticsearchhost:9200/"));
UserCommentPojo document = new UserCommentPojo();
document.name = "test doc";
document.uploaded = System.currentTimeMillis();
document.content = "Hi, Mom.";
String elasticSearchIndex = "primarySite";
String elasticSearchDocumentType = "userComment";
Usually I find that IDE tooling helps immensely here. Lay out your request without variable capture.
jres.quest(new JresIndexDocument(elasticSearchIndex, elasticSearchDocumentType, document))
Then use your tooling to figure out what the reply object is, rather than inspecting the request class yourself.
For IntelliJ, use [Extract Variable Refactor](http://www.jetbrains.com/idea/webhelp/extract-variable.html).
JresIndexDocumentReply reply = jres.quest(new JresIndexDocument(elasticSearchIndex, elasticSearchDocumentType, document))
// At this time, these objects should satisfy the default configuration of the Jackson `ObjectMapper`, i.e.
// public fields or standard bean property getters/setters.
class UserCommentPojo {
public String name;
public Long uploaded;
public String contents;
}
### jres.tolerate ###
is identical to `jres.quest` but tolerates exception responses. If there was an exception response and the HTTP response
status code matches the given tolerated status code, that is captured in the return. This is useful where an error
response might be expected without involving expensive and laborious exception handling.
### jres.bool ###
represents ElasticSearch APIs that return a boolean response in the form of HTTP status codes (200 or 404). Negative
responses should translate to false return values rather than JresErrorReplyExceptions.
----
Plenty more examples can be observed in the request object unit tests:
[jres-test/src/test/java/com.blacklocus.jres.request](https://github.com/blacklocus/jres/tree/master/jres-test/src/test/java/com/blacklocus/jres/request)
## License ##
Copyright 2014 BlackLocus under [the Apache 2.0 license](LICENSE)
| {
"content_hash": "13be2ad563185819f7397f1572ca2152",
"timestamp": "",
"source": "github",
"line_count": 223,
"max_line_length": 153,
"avg_line_length": 39.18834080717489,
"alnum_prop": 0.6718159972536903,
"repo_name": "blacklocus/jres",
"id": "f7d65b0b9ffec263a4eaf5a1c1d8492dd8aa74d1",
"size": "8739",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "283481"
}
],
"symlink_target": ""
} |
# ChatLite2
A GUI Edition of "MicroChat"
Intuitive message transfer.
Distributed under MIT Licence.
## Message Encryption
Encryption available now!
Note that it's not a guarantee of information security.
## Copyright Information
Copyright (C) 2015 Yuxuan Chen Released under MIT Licence.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
| {
"content_hash": "eb5492229f9aadfeef79c54f04f993d7",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 460,
"avg_line_length": 83.625,
"alnum_prop": 0.7907324364723468,
"repo_name": "yuxuanchen1997/ChatLite2",
"id": "840136d619523425da7d2ccf6e5b458f1bb55dbb",
"size": "1344",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "18174"
}
],
"symlink_target": ""
} |
'use strict';
/**
* @ngdoc function
* @name simpleSurveyUi.controller:HomeController
* @description
* # HomeController
* Controller of the simpleSurveyUi
*/
angular
.module('simpleSurveyUi')
.controller('HomeController', [
'$scope',
'$location',
'AuthenticationService',
'Page',
function ($scope, $location, AuthenticationService, Page) {
Page.setTitle('Simple Survey | Welcome');
Page.setLH(false);
$scope.login = function() {
$scope.loading = true;
AuthenticationService.Login($scope.email, $scope.password, function (result) {
if(result.success === true) {
if(result.isAdmin) {
$location.path('/admin');
} else {
$location.path('/survey');
}
} else {
$scope.error = result.message;
$scope.loading = false;
}
});
};
}]); | {
"content_hash": "12e9ef3613deab7ddcc132de62a3176b",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 80,
"avg_line_length": 21.75,
"alnum_prop": 0.6538952745849298,
"repo_name": "bveiga/simple-survey",
"id": "e5e8152c3e9034189b9da21e3aa41e200372ed44",
"size": "783",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dev/scripts/controllers/home.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5485"
},
{
"name": "HTML",
"bytes": "6226"
},
{
"name": "JavaScript",
"bytes": "1713268"
}
],
"symlink_target": ""
} |
package org.springframework.showcase.carplant.dao;
import java.util.List;
/**
* A DAO capable of retrieving parts for building a car.
* Implementing using {@link JdbcCarPartsInventoryImpl JDBC},
* using {@link PlainHibernateInventoryImpl Hibernate using the plain Session API}
* and {@link TemplateHibernateInventoryImpl Hibernate using the template API},
* showing various Spring 2 and Java5 techniques.
*
* Sample originally described in a bigger sample application available from
* http://blog.interface21.com/main/2007/03/12/carplant-not-accepting-null-carmodels/
*
* @author Alef Arendsen
* @since 2.0.4
*/
public interface CarPartsInventory {
/** Retrieves a list of Parts specific for a CarModel */
public List<Part> getPartsForModel(CarModel defaultCarModel);
/** Updates stock for a specific part */
public void updatePartStock(String partNo, int i);
/** Adds a new part to the inventory */
public void addPart(String model, String number, String name);
}
| {
"content_hash": "58b61945a7714b0cae78f7603311a06f",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 85,
"avg_line_length": 34.310344827586206,
"alnum_prop": 0.7587939698492462,
"repo_name": "mattxia/spring-2.5-analysis",
"id": "ff8e92c634e9a7f47f38e2dd2733c85ae623adca",
"size": "995",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "samples/showcases/java5-dao/src/org/springframework/showcase/carplant/dao/CarPartsInventory.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "18516"
},
{
"name": "CSS",
"bytes": "20368"
},
{
"name": "Groovy",
"bytes": "2361"
},
{
"name": "Java",
"bytes": "16366293"
},
{
"name": "Ruby",
"bytes": "623"
},
{
"name": "Shell",
"bytes": "684"
},
{
"name": "XSLT",
"bytes": "2674"
}
],
"symlink_target": ""
} |
namespace Google.Cloud.SecurityCenter.V1.Snippets
{
// [START securitycenter_v1_generated_SecurityCenter_CreateBigQueryExport_sync]
using Google.Api.Gax.ResourceNames;
using Google.Cloud.SecurityCenter.V1;
public sealed partial class GeneratedSecurityCenterClientSnippets
{
/// <summary>Snippet for CreateBigQueryExport</summary>
/// <remarks>
/// This snippet has been automatically generated for illustrative purposes only.
/// It may require modifications to work in your environment.
/// </remarks>
public void CreateBigQueryExportRequestObject()
{
// Create client
SecurityCenterClient securityCenterClient = SecurityCenterClient.Create();
// Initialize request argument(s)
CreateBigQueryExportRequest request = new CreateBigQueryExportRequest
{
ParentAsOrganizationName = OrganizationName.FromOrganization("[ORGANIZATION]"),
BigQueryExport = new BigQueryExport(),
BigQueryExportId = "",
};
// Make the request
BigQueryExport response = securityCenterClient.CreateBigQueryExport(request);
}
}
// [END securitycenter_v1_generated_SecurityCenter_CreateBigQueryExport_sync]
}
| {
"content_hash": "2c260868e10aa9b4eab9790bb4683576",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 95,
"avg_line_length": 43.833333333333336,
"alnum_prop": 0.6714828897338403,
"repo_name": "jskeet/gcloud-dotnet",
"id": "943abb7088f42f377ea8c2f7a0de99ab33c89363",
"size": "1937",
"binary": false,
"copies": "1",
"ref": "refs/heads/bq-migration",
"path": "apis/Google.Cloud.SecurityCenter.V1/Google.Cloud.SecurityCenter.V1.GeneratedSnippets/SecurityCenterClient.CreateBigQueryExportRequestObjectSnippet.g.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1725"
},
{
"name": "C#",
"bytes": "1829733"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html>
<head>
<script src="https://use.fontawesome.com/14b1f7f7de.js"></script>
<meta charset="utf-8">
<link rel= "stylesheet" href="../style.css">
<link rel="icon" type="tab icon" href="../icons/paw.jpeg"/>
<link href="https://fonts.googleapis.com/css?family=Lobster+Two|Quicksand" rel="stylesheet">
<title>Barks 'N Rec</title>
</head>
<body>
<div class = "fix">
<div class = "social box">
<a href = "https://www.facebook.com/"><i class="fa fa-facebook icon fb" aria-hidden="true"></i></a>
<a href="https://twitter.com/BarksNRec206"><i class="fa fa-twitter-square icon twit" aria-hidden="true"></i></a>
<a href = "https://www.instagram.com/?hl=en"><i class="fa fa-instagram icon insta" aria-hidden="true"></i></a>
<a href = "https://www.pinterest.com/"><i class="fa fa-pinterest icon pint" aria-hidden="true"></i></a>
<a href = "https://www.youtube.com/watch?v=mRf3-JkwqfU"><i class="fa fa-youtube-play icon yt" aria-hidden="true"></i></a>
</div>
<div class = "logo box">
<i class="fa fa-paw" aria-hidden="true"></i>
<i class="fa fa-paw" aria-hidden="true"></i>
<h1>Barks 'N Rec</h1>
<i class="fa fa-paw" aria-hidden="true"></i>
<i class="fa fa-paw" aria-hidden="true"></i>
</div>
<div class = "nav box">
<a href = "../index.html" class = "button">HOME</a>
<a href = "about.html" class = "button">ABOUT US</a>
<a href = "contacts.html" class = "button">CONTACT US</a>
<a href = "testimonials.html" class = "button">TESTIMONIALS</a>
</div>
</div>
<div class="aboutSection">
<h2>About Our Product</h2>
<p>Barks 'N Rec was built to connect dog owners whose dogs have similiar interests. Rather than taking your dog to a random park, it matches you with your perfect "walk buddy" — another dynamic dog-owner duo who likes to swim, hike, or just go for a walk in the greater Seattle area. No matter what interests your dog has, we can find a match!</p>
<h2>About Our Team</h2>
<div class = "groupMember">
<img src="../miscImages/maxFinal.jpg" class = "team">
<p>Max is excited to once again be a student after previously working in a deli for the past few years. When he is not spending all of his time programming, he enjoys watching sports, playing video games, and listening to all kinds of music. His goal is to complete the Code Fellows program and become a full time developer, though he has not yet decided on what 401 class he would like to take.</p>
</div>
<br>
<div class = "groupMember">
<img src="../miscImages/chelsea.jpg" class = "team">
<p>Chelsea is a part-time jazz DJ, and a full-time dog lover. She entered Code Fellows with a background in economics and politics, which she is excited combine with software development. When she's not in hot pursuit of dogs to pet, she enjoys reading, drinking coffee, making mix playlists for friends, kayaking, and swing dancing. </p>
</div>
<br>
<div class = "groupMember">
<img src="../miscImages/david.jpeg" class = "team">
<p>David comes from Edmond,Oklahoma and previously was employed as a commercial diver in Washington and Alaska. He is currently studying to become a Front End web developer. In his spare time he swing dances, scuba dives, flys helicopers and hikes. Usually in that order starting a [0]. </p>
</div>
<br>
<div class = "groupMember">
<img src="../miscImages/abdul.jpg" class = "team">
<p>Abdullah Soulaiman, who is new to the web development world, has collaborated with this amazing team to establish "Bark 'N Rec." He loves cooking with his pressure cooker (those things are time savers), outdoor activities, hiking, and real football (AKA, "soccer"). His goal is to continue his learning journey into back-end development with both Python and Java, and to learn Ember.js framework for web applications.</p>
</div>
</div>
</body>
</html>
| {
"content_hash": "238459ee518a92e8546f5a8c95804a5b",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 432,
"avg_line_length": 64.87096774193549,
"alnum_prop": 0.662357036300348,
"repo_name": "chelseadole/barks-and-rec",
"id": "2c4bc059a3891e037b9ae9effd96270cee0d3e67",
"size": "4024",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pages/about.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11393"
},
{
"name": "HTML",
"bytes": "20073"
},
{
"name": "JavaScript",
"bytes": "22060"
}
],
"symlink_target": ""
} |
using DataCloner.Universal.Menu;
using DataCloner.Universal.Menu.Left;
using DataCloner.Universal.Menu.Top;
using System.Collections.Generic;
namespace DataCloner.Universal.ViewModels.Design
{
/// <summary>
/// The design-time ViewModel for the AppShell.
/// </summary>
public class AppShellDesignViewModel
{
public AppShellDesignViewModel()
{
NavigationBarMenuItemsTopLeft = new List<IMenuItem>()
{
new FileMenuItem(null),
new ToolsMenuItem(null),
new HelpMenuItem(null)
};
NavigationBarMenuItemsTopMiddle = new List<IMenuItem>()
{
new ClonerMenuItem(null)
};
NavigationBarMenuItemsLeft = new List<IMenuItem>()
{
new SqlServersMenuItem(null),
new ClonerMenuItem(null),
new ExtractionModelsMenuItem(null)
};
}
public List<IMenuItem> NavigationBarMenuItemsTopLeft { get; private set; }
public List<IMenuItem> NavigationBarMenuItemsTopMiddle { get; private set; }
public List<IMenuItem> NavigationBarMenuItemsTopRight { get; private set; }
public List<IMenuItem> NavigationBarMenuItemsLeft { get; private set; }
}
}
| {
"content_hash": "c38d592d484092f072d8303a201125b1",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 84,
"avg_line_length": 32.19512195121951,
"alnum_prop": 0.6143939393939394,
"repo_name": "naster01/DataCloner",
"id": "53d36a97896e7ede9c4befa65f6417465c15c52b",
"size": "1322",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Universal/DataCloner.Universal/ViewModels/Design/AppShellDesignViewModel.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3635"
},
{
"name": "C#",
"bytes": "1317908"
},
{
"name": "PLpgSQL",
"bytes": "3569371"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_75) on Tue May 19 17:13:23 PDT 2015 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>Uses of Package org.apache.hadoop.fs.http.server (Apache Hadoop HttpFS 2.6.0-cdh5.4.2 API)</title>
<meta name="date" content="2015-05-19">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Package org.apache.hadoop.fs.http.server (Apache Hadoop HttpFS 2.6.0-cdh5.4.2 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Use</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/apache/hadoop/fs/http/server/package-use.html" target="_top">Frames</a></li>
<li><a href="package-use.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h1 title="Uses of Package org.apache.hadoop.fs.http.server" class="title">Uses of Package<br>org.apache.hadoop.fs.http.server</h1>
</div>
<div class="contentContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../org/apache/hadoop/fs/http/server/package-summary.html">org.apache.hadoop.fs.http.server</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.apache.hadoop.fs.http.server">org.apache.hadoop.fs.http.server</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList"><a name="org.apache.hadoop.fs.http.server">
<!-- -->
</a>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../../../org/apache/hadoop/fs/http/server/package-summary.html">org.apache.hadoop.fs.http.server</a> used by <a href="../../../../../../org/apache/hadoop/fs/http/server/package-summary.html">org.apache.hadoop.fs.http.server</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Class and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colOne"><a href="../../../../../../org/apache/hadoop/fs/http/server/class-use/HttpFSParametersProvider.OperationParam.html#org.apache.hadoop.fs.http.server">HttpFSParametersProvider.OperationParam</a>
<div class="block">Class for operation parameter.</div>
</td>
</tr>
<tr class="rowColor">
<td class="colOne"><a href="../../../../../../org/apache/hadoop/fs/http/server/class-use/HttpFSServerWebApp.html#org.apache.hadoop.fs.http.server">HttpFSServerWebApp</a>
<div class="block">Bootstrap class that manages the initialization and destruction of the
HttpFSServer server, it is a <code>javax.servlet.ServletContextListener
</code> implementation that is wired in HttpFSServer's WAR
<code>WEB-INF/web.xml</code>.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="package-summary.html">Package</a></li>
<li>Class</li>
<li class="navBarCell1Rev">Use</li>
<li><a href="package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/apache/hadoop/fs/http/server/package-use.html" target="_top">Frames</a></li>
<li><a href="package-use.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2015 <a href="http://www.apache.org">Apache Software Foundation</a>. All Rights Reserved.</small></p>
</body>
</html>
| {
"content_hash": "95388e41ec9fe0d6825bff49d65f46fa",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 322,
"avg_line_length": 38.81875,
"alnum_prop": 0.6362904524231203,
"repo_name": "ZhangXFeng/hadoop",
"id": "8f10bb16258236126f781aa249f9609a3809d3c0",
"size": "6211",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "share/doc/hadoop-hdfs-httpfs/apidocs/org/apache/hadoop/fs/http/server/package-use.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "189381"
},
{
"name": "Batchfile",
"bytes": "215694"
},
{
"name": "C",
"bytes": "3575939"
},
{
"name": "C++",
"bytes": "2163041"
},
{
"name": "CMake",
"bytes": "100256"
},
{
"name": "CSS",
"bytes": "621096"
},
{
"name": "HTML",
"bytes": "96504707"
},
{
"name": "Java",
"bytes": "111573402"
},
{
"name": "JavaScript",
"bytes": "228374"
},
{
"name": "Makefile",
"bytes": "7278"
},
{
"name": "Objective-C",
"bytes": "118273"
},
{
"name": "PHP",
"bytes": "152555"
},
{
"name": "Perl",
"bytes": "187872"
},
{
"name": "Protocol Buffer",
"bytes": "561225"
},
{
"name": "Python",
"bytes": "1166492"
},
{
"name": "Ruby",
"bytes": "28485"
},
{
"name": "Shell",
"bytes": "912677"
},
{
"name": "Smalltalk",
"bytes": "56562"
},
{
"name": "TeX",
"bytes": "45082"
},
{
"name": "Thrift",
"bytes": "3965"
},
{
"name": "XSLT",
"bytes": "183042"
}
],
"symlink_target": ""
} |
<?xml version='1.0' encoding='UTF-8'?>
<Document>
<Header>
<SchemaName>Location</SchemaName>
<Identifier>location_geonames_6957661</Identifier>
<DocumentState>public</DocumentState>
<TimeStamp>1390753462594</TimeStamp>
<SummaryFields>
<Title>NL > North Brabant > Stichting Volksabdij O.L. Vrouw ter Duinen</Title>
</SummaryFields>
</Header>
<Body>
<Location>
<GeopoliticalHierarchy>NL,North Brabant,Stichting Volksabdij O.L. Vrouw ter Duinen</GeopoliticalHierarchy>
<LocationName>
<Appelation>Stichting Volksabdij O.L. Vrouw ter Duinen</Appelation>
<LocationNameType>monastery</LocationNameType>
<Comments></Comments>
</LocationName>
<Coordinates>
<Longitude>4.34121</Longitude>
<Latitude>51.409</Latitude>
</Coordinates>
<GeonamesURI>http://www.geonames.org/6957661</GeonamesURI>
<WikipediaLink/>
<DbpediaLink/>
</Location>
</Body>
</Document>
| {
"content_hash": "85e41503f23c326d48db3d7f2e101aab",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 112,
"avg_line_length": 33.96551724137931,
"alnum_prop": 0.6741116751269035,
"repo_name": "delving/oscr-data",
"id": "961957560afb63bb17aff62732be4bdbf9ddd05b",
"size": "985",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shared/Location/location_geonames_6957661.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
//
// AppDelegate.m
// FJUtils
//
// Created by Jianjun Wu on 2/17/12.
// Copyright (c) 2012 Fourj. All rights reserved.
//
#import "AppDelegate.h"
#import "ViewController.h"
@implementation AppDelegate
@synthesize window = _window;
@synthesize viewController = _viewController;
- (void)dealloc
{
[_window release];
[_viewController release];
[super dealloc];
}
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions
{
self.window = [[[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]] autorelease];
// Override point for customization after application launch.
self.viewController = [[[ViewController alloc] initWithNibName:@"ViewController" bundle:nil] autorelease];
UINavigationController *rootNav = [[UINavigationController alloc] initWithRootViewController:self.viewController];
self.window.rootViewController = rootNav;
[rootNav release];
[self.window makeKeyAndVisible];
return YES;
}
- (void)applicationWillResignActive:(UIApplication *)application
{
/*
Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
*/
}
- (void)applicationDidEnterBackground:(UIApplication *)application
{
/*
Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
*/
}
- (void)applicationWillEnterForeground:(UIApplication *)application
{
/*
Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
*/
}
- (void)applicationDidBecomeActive:(UIApplication *)application
{
/*
Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
*/
}
- (void)applicationWillTerminate:(UIApplication *)application
{
/*
Called when the application is about to terminate.
Save data if appropriate.
See also applicationDidEnterBackground:.
*/
}
@end
| {
"content_hash": "358aeec743799d9781c0fba0ae4959e1",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 277,
"avg_line_length": 33.8974358974359,
"alnum_prop": 0.7632375189107413,
"repo_name": "tonycn/FJIOSUtil",
"id": "a9488c674165789ae0b6e7c80ece5e7cc38f860d",
"size": "2644",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "FJUtils/AppDelegate.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "33788"
}
],
"symlink_target": ""
} |
package org.javarosa.core.model.data;
import org.javarosa.core.util.externalizable.Externalizable;
/**
* An IAnswerData object represents an answer to a question
* posed to a user.
*
* IAnswerData objects should never in any circumstances contain
* a null data value. In cases of empty or non-existent responses,
* the IAnswerData reference should itself be null.
*
* @author Drew Roos
*
*/
public interface IAnswerData extends Externalizable {
/**
* @param o the value of this answerdata object. Cannot be null.
* Null Data will not overwrite existing values.
* @throws NullPointerException if o is null
*/
void setValue (Object o); //can't be null
/**
* @return The value of this answer, will never
* be null
*/
Object getValue (); //will never be null
/**
* @return Gets a string representation of this
* answer
*/
String getDisplayText ();
IAnswerData clone ();
/**
* Data types can be uncast if they are expected to be used
* in different contexts. This allows, for instance, select
* values to be generated by casting other types or vic-a-versa.
*
* @return An uncast representation of this answer which can
* be used in a different context.
*/
UncastData uncast();
/**
* Casts the provided data into this data type.
*
* @param data An uncast data value which is compatible
* with this data type
* @return An instance of the instance's data type
* which contains that value
* @throws IllegalArgumentException If the uncast data is
* not in a compatible format
*/
IAnswerData cast(UncastData data) throws IllegalArgumentException;
}
| {
"content_hash": "20b27a4d94ce8ebe3bdd2332a11a5687",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 68,
"avg_line_length": 27.76271186440678,
"alnum_prop": 0.7087912087912088,
"repo_name": "Jonathan727/javarosa",
"id": "6ea0f10eb9c4a46752f9f6f553f04c961bf690c5",
"size": "2229",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "core/src/main/java/org/javarosa/core/model/data/IAnswerData.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2814"
},
{
"name": "Java",
"bytes": "1574800"
},
{
"name": "Lex",
"bytes": "4186"
},
{
"name": "Python",
"bytes": "34752"
}
],
"symlink_target": ""
} |
package org.apache.flink.runtime.executiongraph;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.metrics.groups.UnregisteredMetricsGroup;
import org.apache.flink.runtime.JobException;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.blob.BlobWriter;
import org.apache.flink.runtime.blob.VoidBlobWriter;
import org.apache.flink.runtime.checkpoint.CheckpointRecoveryFactory;
import org.apache.flink.runtime.checkpoint.StandaloneCheckpointRecoveryFactory;
import org.apache.flink.runtime.client.JobExecutionException;
import org.apache.flink.runtime.executiongraph.failover.FailoverStrategy;
import org.apache.flink.runtime.executiongraph.failover.RestartAllStrategy;
import org.apache.flink.runtime.executiongraph.restart.NoRestartStrategy;
import org.apache.flink.runtime.executiongraph.restart.RestartStrategy;
import org.apache.flink.runtime.io.network.partition.JobMasterPartitionTracker;
import org.apache.flink.runtime.io.network.partition.NoOpJobMasterPartitionTracker;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobmaster.TestingLogicalSlotBuilder;
import org.apache.flink.runtime.jobmaster.slotpool.SlotProvider;
import org.apache.flink.runtime.shuffle.NettyShuffleMaster;
import org.apache.flink.runtime.shuffle.ShuffleMaster;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
/**
* Builder of {@link ExecutionGraph} used in testing.
*/
public class TestingExecutionGraphBuilder {
private static final Logger LOG = LoggerFactory.getLogger(TestingExecutionGraphBuilder.class);
public static TestingExecutionGraphBuilder newBuilder() {
return new TestingExecutionGraphBuilder();
}
private ScheduledExecutorService futureExecutor = TestingUtils.defaultExecutor();
private Executor ioExecutor = TestingUtils.defaultExecutor();
private Time rpcTimeout = AkkaUtils.getDefaultTimeout();
private RestartStrategy restartStrategy = new NoRestartStrategy();
private FailoverStrategy.Factory failoverStrategyFactory = new RestartAllStrategy.Factory();
private SlotProvider slotProvider = new TestingSlotProvider(slotRequestId -> CompletableFuture.completedFuture(new TestingLogicalSlotBuilder().createTestingLogicalSlot()));
private ClassLoader userClassLoader = ExecutionGraph.class.getClassLoader();
private BlobWriter blobWriter = VoidBlobWriter.getInstance();
private Time allocationTimeout = AkkaUtils.getDefaultTimeout();
private ShuffleMaster<?> shuffleMaster = NettyShuffleMaster.INSTANCE;
private JobMasterPartitionTracker partitionTracker = NoOpJobMasterPartitionTracker.INSTANCE;
private Configuration jobMasterConfig = new Configuration();
private JobGraph jobGraph = new JobGraph();
private MetricGroup metricGroup = new UnregisteredMetricsGroup();
private CheckpointRecoveryFactory checkpointRecoveryFactory = new StandaloneCheckpointRecoveryFactory();
private ExecutionDeploymentListener executionDeploymentListener = NoOpExecutionDeploymentListener.get();
private ExecutionStateUpdateListener executionStateUpdateListener = (execution, newState) -> {};
private TestingExecutionGraphBuilder() {
}
public TestingExecutionGraphBuilder setJobMasterConfig(Configuration jobMasterConfig) {
this.jobMasterConfig = jobMasterConfig;
return this;
}
public TestingExecutionGraphBuilder setJobGraph(JobGraph jobGraph) {
this.jobGraph = jobGraph;
return this;
}
public TestingExecutionGraphBuilder setFutureExecutor(ScheduledExecutorService futureExecutor) {
this.futureExecutor = futureExecutor;
return this;
}
public TestingExecutionGraphBuilder setIoExecutor(Executor ioExecutor) {
this.ioExecutor = ioExecutor;
return this;
}
public TestingExecutionGraphBuilder setRpcTimeout(Time rpcTimeout) {
this.rpcTimeout = rpcTimeout;
return this;
}
public TestingExecutionGraphBuilder setRestartStrategy(RestartStrategy restartStrategy) {
this.restartStrategy = restartStrategy;
return this;
}
public TestingExecutionGraphBuilder setFailoverStrategyFactory(FailoverStrategy.Factory failoverStrategyFactory) {
this.failoverStrategyFactory = failoverStrategyFactory;
return this;
}
public TestingExecutionGraphBuilder setSlotProvider(SlotProvider slotProvider) {
this.slotProvider = slotProvider;
return this;
}
public TestingExecutionGraphBuilder setUserClassLoader(ClassLoader userClassLoader) {
this.userClassLoader = userClassLoader;
return this;
}
public TestingExecutionGraphBuilder setBlobWriter(BlobWriter blobWriter) {
this.blobWriter = blobWriter;
return this;
}
public TestingExecutionGraphBuilder setAllocationTimeout(Time allocationTimeout) {
this.allocationTimeout = allocationTimeout;
return this;
}
public TestingExecutionGraphBuilder setShuffleMaster(ShuffleMaster<?> shuffleMaster) {
this.shuffleMaster = shuffleMaster;
return this;
}
public TestingExecutionGraphBuilder setPartitionTracker(JobMasterPartitionTracker partitionTracker) {
this.partitionTracker = partitionTracker;
return this;
}
public TestingExecutionGraphBuilder setMetricGroup(MetricGroup metricGroup) {
this.metricGroup = metricGroup;
return this;
}
public TestingExecutionGraphBuilder setCheckpointRecoveryFactory(CheckpointRecoveryFactory checkpointRecoveryFactory) {
this.checkpointRecoveryFactory = checkpointRecoveryFactory;
return this;
}
public TestingExecutionGraphBuilder setExecutionDeploymentListener(ExecutionDeploymentListener executionDeploymentListener) {
this.executionDeploymentListener = executionDeploymentListener;
return this;
}
public TestingExecutionGraphBuilder setExecutionStateUpdateListener(ExecutionStateUpdateListener executionStateUpdateListener) {
this.executionStateUpdateListener = executionStateUpdateListener;
return this;
}
public ExecutionGraph build() throws JobException, JobExecutionException {
return ExecutionGraphBuilder.buildGraph(
null,
jobGraph,
jobMasterConfig,
futureExecutor,
ioExecutor,
slotProvider,
userClassLoader,
checkpointRecoveryFactory,
rpcTimeout,
restartStrategy,
metricGroup,
blobWriter,
allocationTimeout,
LOG,
shuffleMaster,
partitionTracker,
failoverStrategyFactory,
executionDeploymentListener,
executionStateUpdateListener,
System.currentTimeMillis());
}
}
| {
"content_hash": "f3ca2d0d7f5fe0ad6369045593e3eb72",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 173,
"avg_line_length": 37.19101123595506,
"alnum_prop": 0.8350453172205438,
"repo_name": "greghogan/flink",
"id": "3d9667ba011b07a57cfd6d189a841f900ed69d2a",
"size": "7421",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flink-runtime/src/test/java/org/apache/flink/runtime/executiongraph/TestingExecutionGraphBuilder.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4588"
},
{
"name": "CSS",
"bytes": "58146"
},
{
"name": "Clojure",
"bytes": "93329"
},
{
"name": "Dockerfile",
"bytes": "12142"
},
{
"name": "FreeMarker",
"bytes": "25294"
},
{
"name": "HTML",
"bytes": "108358"
},
{
"name": "Java",
"bytes": "52179549"
},
{
"name": "JavaScript",
"bytes": "1829"
},
{
"name": "Makefile",
"bytes": "5134"
},
{
"name": "Python",
"bytes": "1015013"
},
{
"name": "Scala",
"bytes": "13763923"
},
{
"name": "Shell",
"bytes": "513745"
},
{
"name": "TSQL",
"bytes": "123113"
},
{
"name": "TypeScript",
"bytes": "246974"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.