text
stringlengths
2
1.04M
meta
dict
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>SimpleUrlShortener with custom alias</title> <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css"> <link rel="stylesheet" href="css/styles.css"> </head> <body> <br> <h1>Hello, SimpleUrlShortener!</h1> <br><hr> <br><br> <div class="container"> <form class="form-inline"> <input id="url-field" type="text" class="form-control" placeholder="looooooooong url:(" > <div class="input-group"> <div class="input-group-addon">@</div> <input id="alias-field" type="text" class="form-control" placeholder="alias"> </div> <button type="button" class="btn btn-primary btn-make" id="btn-make">Submit</button> </form> <div id="result" class="row"></div> </div> <div class="footer"> <hr> <p>SimpleUrlShortener - 1st Personal Project</p> <p>Made by <a href="https://pyxisdev.github.io">WindSekirun</a></p> <p>Source deployed in <a href="https://github.com/WindSekirun/SimpleUrlShortener">Github</a></p> </div> <script src="https://code.jquery.com/jquery-2.1.4.min.js"></script> <script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/js/bootstrap.min.js"></script> <script src="javascripts/bootstrapAlert.min.js"></script> <script src="javascripts/make.js"></script> </body> </html>
{ "content_hash": "f511d9482c23021a1a66f0a7e469bca4", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 108, "avg_line_length": 40.525, "alnum_prop": 0.5582973473164713, "repo_name": "WindSekirun/SimpleUrlShortener", "id": "5696f8e6adfd1759009d9c28f17b54b08e51e7b3", "size": "1621", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "views/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "590" }, { "name": "HTML", "bytes": "1621" }, { "name": "JavaScript", "bytes": "6266" } ], "symlink_target": "" }
using namespace llvm; using namespace llvm::object; using namespace lld::coff; namespace lld { static_assert(sizeof(SymbolUnion) <= 48, "symbols should be optimized for memory usage"); // Returns a symbol name for an error message. static std::string maybeDemangleSymbol(StringRef symName) { if (config->demangle) { std::string prefix; StringRef prefixless = symName; if (prefixless.consume_front("__imp_")) prefix = "__declspec(dllimport) "; StringRef demangleInput = prefixless; if (config->machine == I386) demangleInput.consume_front("_"); std::string demangled = demangle(demangleInput); if (demangled != demangleInput) return prefix + demangle(demangleInput); return (prefix + prefixless).str(); } return symName; } std::string toString(coff::Symbol &b) { return maybeDemangleSymbol(b.getName()); } std::string toCOFFString(const Archive::Symbol &b) { return maybeDemangleSymbol(b.getName()); } namespace coff { StringRef Symbol::getName() { // COFF symbol names are read lazily for a performance reason. // Non-external symbol names are never used by the linker except for logging // or debugging. Their internal references are resolved not by name but by // symbol index. And because they are not external, no one can refer them by // name. Object files contain lots of non-external symbols, and creating // StringRefs for them (which involves lots of strlen() on the string table) // is a waste of time. if (nameData == nullptr) { auto *d = cast<DefinedCOFF>(this); StringRef nameStr; cast<ObjFile>(d->file)->getCOFFObj()->getSymbolName(d->sym, nameStr); nameData = nameStr.data(); nameSize = nameStr.size(); assert(nameSize == nameStr.size() && "name length truncated"); } return StringRef(nameData, nameSize); } InputFile *Symbol::getFile() { if (auto *sym = dyn_cast<DefinedCOFF>(this)) return sym->file; if (auto *sym = dyn_cast<LazyArchive>(this)) return sym->file; if (auto *sym = dyn_cast<LazyObject>(this)) return sym->file; return nullptr; } bool Symbol::isLive() const { if (auto *r = dyn_cast<DefinedRegular>(this)) return r->getChunk()->live; if (auto *imp = dyn_cast<DefinedImportData>(this)) return imp->file->live; if (auto *imp = dyn_cast<DefinedImportThunk>(this)) return imp->wrappedSym->file->thunkLive; // Assume any other kind of symbol is live. return true; } // MinGW specific. void Symbol::replaceKeepingName(Symbol *other, size_t size) { StringRef origName = getName(); memcpy(this, other, size); nameData = origName.data(); nameSize = origName.size(); } COFFSymbolRef DefinedCOFF::getCOFFSymbol() { size_t symSize = cast<ObjFile>(file)->getCOFFObj()->getSymbolTableEntrySize(); if (symSize == sizeof(coff_symbol16)) return COFFSymbolRef(reinterpret_cast<const coff_symbol16 *>(sym)); assert(symSize == sizeof(coff_symbol32)); return COFFSymbolRef(reinterpret_cast<const coff_symbol32 *>(sym)); } uint16_t DefinedAbsolute::numOutputSections; static Chunk *makeImportThunk(DefinedImportData *s, uint16_t machine) { if (machine == AMD64) return make<ImportThunkChunkX64>(s); if (machine == I386) return make<ImportThunkChunkX86>(s); if (machine == ARM64) return make<ImportThunkChunkARM64>(s); assert(machine == ARMNT); return make<ImportThunkChunkARM>(s); } DefinedImportThunk::DefinedImportThunk(StringRef name, DefinedImportData *s, uint16_t machine) : Defined(DefinedImportThunkKind, name), wrappedSym(s), data(makeImportThunk(s, machine)) {} Defined *Undefined::getWeakAlias() { // A weak alias may be a weak alias to another symbol, so check recursively. for (Symbol *a = weakAlias; a; a = cast<Undefined>(a)->weakAlias) if (auto *d = dyn_cast<Defined>(a)) return d; return nullptr; } MemoryBufferRef LazyArchive::getMemberBuffer() { Archive::Child c = CHECK(sym.getMember(), "could not get the member for symbol " + toCOFFString(sym)); return CHECK(c.getMemoryBufferRef(), "could not get the buffer for the member defining symbol " + toCOFFString(sym)); } } // namespace coff } // namespace lld
{ "content_hash": "bc8ef85226c417617a4fc88570457cad", "timestamp": "", "source": "github", "line_count": 128, "max_line_length": 80, "avg_line_length": 33.171875, "alnum_prop": 0.6912388130004711, "repo_name": "llvm-mirror/lld", "id": "938c9c527ffa8c868dc0f7215a21085f09cec088", "size": "4900", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "COFF/Symbols.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "2204226" }, { "name": "C++", "bytes": "2893828" }, { "name": "CMake", "bytes": "20776" }, { "name": "LLVM", "bytes": "416895" }, { "name": "Python", "bytes": "9667" } ], "symlink_target": "" }
package types import ( "github.com/plandem/xlsx/internal/ml/primitives" ) //CellType is alias of original primitives.CellType type to: // 1) make it public // 2) forbid usage of integers directly type CellType = primitives.CellType //List of all possible values for CellType const ( CellTypeGeneral CellType = iota CellTypeBool CellTypeDate CellTypeNumber CellTypeError CellTypeSharedString CellTypeFormula CellTypeInlineString ) func init() { primitives.FromCellType = map[CellType]string{ CellTypeBool: "b", CellTypeDate: "d", CellTypeNumber: "n", CellTypeError: "e", CellTypeSharedString: "s", CellTypeFormula: "str", CellTypeInlineString: "inlineStr", } primitives.ToCellType = make(map[string]CellType, len(primitives.FromCellType)) for k, v := range primitives.FromCellType { primitives.ToCellType[v] = k } }
{ "content_hash": "d0937a554eceebc23dff40be5a82ee62", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 80, "avg_line_length": 22.666666666666668, "alnum_prop": 0.7262443438914027, "repo_name": "plandem/xlsx", "id": "1e34e2a1317ce067cca74dc708eee6e81570c4f1", "size": "1050", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "types/cell_type.go", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "502" }, { "name": "Go", "bytes": "466498" }, { "name": "Makefile", "bytes": "521" } ], "symlink_target": "" }
cwt.Map = cwt.Class({ eventTypes: null, listeners: null, initialize: function () { eventTypes = [cwt.Map.MOUSE_OVER, cwt.Map.MOUSE_OUT, cwt.Map.MOUSE_MOVE, cwt.Map.MOUSE_DOWN, cwt.Map.MOUSE_UP, cwt.Map.CLICK, cwt.Map.DBLCLICK, cwt.Map.RESIZE, cwt.Map.MOVE_START, cwt.Map.MOVE, cwt.Map.MOVE_END, cwt.Map.ZOOM_END, cwt.Map.DRAG_START, cwt.Map.DRAG, cwt.Map.DRAG_END, cwt.Map.POINT, cwt.Map.RECT, cwt.Map.ROUND, cwt.Map.POLYLINE, cwt.Map.POLYGON]; this.listeners = {}; for (var i = 0; i < eventTypes.length; i++) { this.listeners[eventTypes[i]] = [] } }, destroy: function () { this.unlistenAllEvents(); this.unregisterAllEvents() }, getMapType: function () { }, getMapTypeDesc: function () { }, setActionType: function (actionType) { this.actionType = actionType }, getActionType: function () { return actionType }, getDistance:function(pointA,pointB){}, addOverviewMap: function () { }, removeOverviewMap: function () { }, addScaleBar: function () { }, removeScaleBar: function () { }, addPanZoomBar: function (left, top) { }, removePanZoomBar: function () { }, getScale: function () { }, setScale: function (scale) { }, getViewSize: function () { }, getMapBounds: function () { }, setMapBounds: function (bounds) { }, setCenter: function (lonlat) { }, getCenter: function () { }, saveCenter: function () { }, restoreCenter: function () { }, refresh: function () { }, getLonLatFromPixel: function (pixel) { }, getPixelFromLonLat: function (lonlat) { }, addPOI: function (poi) { }, addPOIWithCallback: function (poi, cback) { }, removePOI: function (id) { }, clearPOIs: function () { }, addPolyline: function (polyline) { }, removePolyline: function (id) { }, clearPolylines: function () { }, addRect: function (rectArea) { }, removeRect: function (id) { }, clearRects: function () { }, addCircle: function (circleArea) { }, removeCircle: function (id) { }, clearCircles: function () { }, addPolygon: function (polygonArea) { }, removePolygon: function (id) { }, clearPolygons: function () { }, addPanoramaLayer: function () { }, removePanoramaLayer: function () { }, addPanoramaCtrl: function (offset, position) { }, removePanoramaCtrl: function () { }, showPanorama: function (panorama) { }, hidePanorama: function () { }, getPanoramaData: function (lonlat, callback) { }, createMarkerClusterer: function (pois, options) { }, addClusterMarker: function (poi) { }, addClusterMarkers: function (pois) { }, clearClusterMarkers: function () { }, getClustersCount: function () { }, removeClusterMarker: function (id) { }, removeClusterMarkers: function (ids) { }, enableWheelZoom: function () { }, disableWheelZoom: function () { }, getWheelZoomStatus: function () { }, registerEvent: function (type, obj, func) { if (func != null) { if (obj == null) { obj = this } var listeners = this.listeners[type]; if (listeners != null) { var exited = false; for (var i = 0; i < listeners.length; i++) { if (listeners[i].obj == obj && listeners[i].func == func) { exited = true } } if (!exited) { listeners.push({ obj: obj, func: func }) } } } }, getRegisterEvents: function (type) { return this.listeners[type] }, unregisterEvent: function (type, obj, func) { if (obj == null) { obj = this } var listeners = this.listeners[type]; if (listeners != null) { for (var i = 0; i < listeners.length; i++) { if (listeners[i].obj == obj && listeners[i].func == func) { listeners.splice(i, 1) } } } }, unregisterAllEvents: function (type) { if (type) { var listeners = this.listeners[type]; if (listeners != null) { this.listeners[type] = [] } } else { for (var i = 0; i < eventTypes.length; i++) { this.listeners[eventTypes[i]] = [] } } }, listenAllEvent: function () { }, unlistenAllEvents: function () { }, stopEventChain: function (event, allowDefault) { if (!allowDefault) { if (event.preventDefault) { event.preventDefault() } else { event.returnValue = false } } if (event.stopPropagation) { event.stopPropagation() } else { event.cancelBubble = true } }, VERSION: "1.1.5.0" }); cwt.Map.ACTION_TYPE_NONE = 0; cwt.Map.ACTION_TYPE_ZOOMIN = 1; cwt.Map.ACTION_TYPE_ZOOMOUT = 2; cwt.Map.ACTION_TYPE_PAN = 3; cwt.Map.ACTION_TYPE_POINT = 4; cwt.Map.ACTION_TYPE_RECT = 5; cwt.Map.ACTION_TYPE_ROUND = 6; cwt.Map.ACTION_TYPE_POLYLINE = 7; cwt.Map.ACTION_TYPE_POLYGON = 8; cwt.Map.ACTION_TYPE_MEASUREDIS = 9; cwt.Map.ACTION_TYPE_MEASURE_AREA = 10; cwt.Map.ACTION_TYPE_STOP_MEASURE = 11; cwt.Map.MOUSE_OVER = "mouseover"; cwt.Map.MOUSE_OUT = "mouseout"; cwt.Map.MOUSE_MOVE = "mousemove"; cwt.Map.MOUSE_DOWN = "mousedown"; cwt.Map.MOUSE_UP = "mouseup"; cwt.Map.CLICK = "click"; cwt.Map.DBLCLICK = "dblclick"; cwt.Map.RESIZE = "resize"; cwt.Map.MOVE_START = "movestart"; cwt.Map.MOVE = "move"; cwt.Map.MOVE_END = "moveend"; cwt.Map.ZOOM_END = "zoomend"; cwt.Map.DRAG_START = "dragstart"; cwt.Map.DRAG = "drag"; cwt.Map.DRAG_END = "dragend"; cwt.Map.POINT = "point"; cwt.Map.RECT = "rect"; cwt.Map.ROUND = "round"; cwt.Map.POLYLINE = "polyline"; cwt.Map.POLYGON = "polygon";
{ "content_hash": "ea6c98a100ccac80f32a66e5be4fe445", "timestamp": "", "source": "github", "line_count": 174, "max_line_length": 365, "avg_line_length": 32.54597701149425, "alnum_prop": 0.595797280593325, "repo_name": "baby-plan/common-website", "id": "a835f3f8d5466e0713a7f4920d6f09367fd47804", "size": "5663", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "public/system/cwt/assets/scripts/map/cwt.map.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2404677" }, { "name": "HTML", "bytes": "741468" }, { "name": "Java", "bytes": "4091" }, { "name": "JavaScript", "bytes": "4804335" }, { "name": "Roff", "bytes": "519722" } ], "symlink_target": "" }
package org.puma.analyzer import scala.io.Source import scala.xml.pull._ import scala.xml.pull.EvElemStart import scala.xml.pull.EvText import scala.collection.mutable import org.puma.analyzer.filter.ExtractorFilter import com.typesafe.scalalogging.slf4j.LazyLogging import org.puma.configuration.ConfigurationUtil /** * Project: puma * Package: org.puma.analyzer * * Author: Sergio Álvarez * Date: 09/2013 */ class Extractor extends LazyLogging{ private[this] var _path: String = null private[this] var _filter: ExtractorFilter = null private[this] var results = mutable.Map.empty[List[String], Int] private[this] var minimumFreq = 1 private[this] val MaximumExtractedTerms = ConfigurationUtil.getMaximumExtractedTerms private[this] val FactorToRemove = ConfigurationUtil.getFactorToRemove def path(value: String): Extractor = { _path = value this } def filter(value: ExtractorFilter): Extractor = { _filter = value this } def extract: Map[List[String], Int] = { if(_filter == null || _path == null){ throw new IllegalArgumentException("You must provide a filter and valid path for making the extraction") } logger.debug("Extracting: " + _path + " with filter: " + _filter.getClass.getSimpleName) val reader = new XMLEventReader(Source.fromFile(_path)) var in = false reader.foreach({ case e: EvElemStart if e.label == _filter.field => in = true case EvText(text) if in => applyFilter(text) case e: EvElemEnd if e.label == _filter.field => in = false case _ => ; }) results.toMap } private[this] def applyFilter(tweet: String) = { checkMemoryStatus() _filter.extract(tweet).foreach(term => { if(results.contains(term)){ results(term) += 1 }else{ results(term) = minimumFreq } }) } private[this] def checkMemoryStatus() = { if(results.keys.size >= MaximumExtractedTerms) { logger.debug("Memory overload. Maximum limit for extracted terms have been reached. Reducing map...") reduceMapLoad() } } private[this] def reduceMapLoad() = { val itemsToRemove = (results.keys.size * FactorToRemove).toInt logger.debug("They are going to be removed " + itemsToRemove + " items") val orderedList = results.toList.sortBy({_._2}) minimumFreq = orderedList(itemsToRemove - 1)._2 logger.debug("New minimum frequency is " + minimumFreq) val reduced = orderedList.slice(itemsToRemove - 1, orderedList.size) results = collection.mutable.Map(reduced.toMap[List[String], Int].toSeq: _*) // converting to mutable map logger.debug("Reduced map contains " + results.keys.size + " terms") } }
{ "content_hash": "2729c3b11a78fcfe14e36885a3fd8325", "timestamp": "", "source": "github", "line_count": 86, "max_line_length": 110, "avg_line_length": 31.848837209302324, "alnum_prop": 0.6783497626871121, "repo_name": "sergioalvz/puma", "id": "d876629a8ffc21fda537e634a8378e19c3a18719", "size": "2740", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/main/scala/org/puma/analyzer/Extractor.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Scala", "bytes": "28508" } ], "symlink_target": "" }
import { NgModule } from '@angular/core' import { SharedModule } from '../shared/shared.module' import { Lazyload2Component } from './lazyload2.component' import { LazyLoad2RoutingModule }from './lazyload2-routing.module' @NgModule({ declarations:[ Lazyload2Component ], imports:[ LazyLoad2RoutingModule, SharedModule ] }) export class LazyLoad2Module{ constructor(){} }
{ "content_hash": "20445cf3232341947beb63f36ab59e38", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 66, "avg_line_length": 23.11111111111111, "alnum_prop": 0.6850961538461539, "repo_name": "wuzhouyang/angular2-webpack-aot-lazyload-starter", "id": "101d0d6e862070fd89acbeb94f324e17b0f4d65b", "size": "416", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/app/lazyload2/lazyload2.module.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "328" }, { "name": "HTML", "bytes": "475" }, { "name": "JavaScript", "bytes": "6530" }, { "name": "TypeScript", "bytes": "2239" } ], "symlink_target": "" }
package org.openxava.actions; import java.io.*; import java.sql.*; import java.text.*; import java.util.*; import java.util.Date; import javax.servlet.*; import net.sf.jasperreports.engine.*; import org.openxava.jpa.*; import org.openxava.util.*; /** * To generate your custom Jasper Report. <p> * * You only need to overwrite the abstract methods.<br> * * @author Javier Paniza * @author Daniel García Salas */ abstract public class JasperReportBaseAction extends ViewBaseAction implements IForwardAction, IModelAction { public static String PDF = "pdf"; public static String EXCEL = "excel"; public static String RTF = "rtf"; public static String ODT = "odt"; private String modelName; private String fileName; private String format = PDF; /** * Data to print. <p> * * If return null then a JDBC connection is sent to JasperReport, * this is for the case of a SQL inside JasperReport design. */ abstract protected JRDataSource getDataSource() throws Exception; /** * The name of the XML with the JasperReports design. <p> * * If it is a relative path (as <code>reports/myreport.jrxml</code> has * to be in classpath. If it is a absolute path (as * <code>/home/java/reports/myreport.xml</code> or * <code>C:\\JAVA\\REPORTS\MYREPORT.JRXML</code> then it look at the * file system. */ abstract protected String getJRXML() throws Exception; /** * Parameters to send to report. */ abstract protected Map getParameters() throws Exception; /** * Output report format, it can be 'pdf' or 'excel'. <p> */ public String getFormat() throws Exception { return format; } /** * Output report format, it can be 'pdf', 'excel' or 'rtf'. <p> */ public void setFormat(String format) throws Exception { if (!EXCEL.equalsIgnoreCase(format) && !PDF.equalsIgnoreCase(format) && !RTF.equalsIgnoreCase(format) && !ODT.equalsIgnoreCase(format)) { throw new XavaException("invalid_report_format", "'excel', 'pdf', 'rtf','odt'"); } this.format = format; } public void execute() throws Exception { ServletContext application = getRequest().getSession().getServletContext(); System.setProperty("jasper.reports.compile.class.path", application.getRealPath("/WEB-INF/lib/jasperreports.jar") + System.getProperty("path.separator") + application.getRealPath("/WEB-INF/classes/") ); InputStream xmlDesign = null; String jrxml = getJRXML(); if (isAbsolutePath(jrxml)) { xmlDesign = new FileInputStream(jrxml); } else { xmlDesign = JasperReportBaseAction.class.getResourceAsStream("/" + jrxml); } if (xmlDesign == null) throw new XavaException("design_not_found"); JasperReport report = JasperCompileManager.compileReport(xmlDesign); Map parameters = getParameters(); // getParameters() before getDatasource() JRDataSource ds = getDataSource(); JasperPrint jprint = null; if (ds == null) { Connection con = null; try { con = DataSourceConnectionProvider.getByComponent(modelName).getConnection(); // If the schema is changed through URL or XPersistence.setDefaultSchema, the connection // contains the original catalog (schema) instead of the new one, thus rendering the // wrong data on the report. This is a fix for such behavior. if (!Is.emptyString(XPersistence.getDefaultSchema())) { con.setCatalog(XPersistence.getDefaultSchema()); } jprint = JasperFillManager.fillReport(report, parameters, con); } finally { con.close(); } } else { jprint = JasperFillManager.fillReport(report, parameters, ds); } getRequest().getSession().setAttribute("xava.report.jprint", jprint); getRequest().getSession().setAttribute("xava.report.format", getFormat()); getRequest().getSession().setAttribute("xava.report.filename", getFileName()); } private boolean isAbsolutePath(String design) { return design.startsWith("/") || ( design.length() > 2 && design.charAt(1) == ':' && Character.isLetter(design.charAt(0)) ); } public String getForwardURI() { return "/xava/report.pdf?time="+System.currentTimeMillis(); } public boolean inNewWindow() { return true; } public void setModel(String modelName) { // to obtain a JDCB connection, if required this.modelName = modelName; } public String getFileName() { if (fileName==null) { String now = new SimpleDateFormat("yyyyMMdd_HHmm").format(new Date()); return getModelName() + "-report_" + now; } else return fileName; } public void setFileName(String fileName) { this.fileName = fileName; } }
{ "content_hash": "8f65880d029b92d4267d166956c44896", "timestamp": "", "source": "github", "line_count": 158, "max_line_length": 110, "avg_line_length": 29.227848101265824, "alnum_prop": 0.6931572109138155, "repo_name": "jecuendet/maven4openxava", "id": "c3f905e436ddf4e58512538a34844520b52cfbec", "size": "4619", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dist/openxava/workspace/OpenXava/src/org/openxava/actions/JasperReportBaseAction.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "1695359" }, { "name": "Groovy", "bytes": "105862" }, { "name": "Java", "bytes": "4269286" }, { "name": "JavaScript", "bytes": "4897092" }, { "name": "Shell", "bytes": "65963" }, { "name": "XSLT", "bytes": "3987" } ], "symlink_target": "" }
@charset "utf-8"; /* CSS Document */ .font{font-family:'Microsoft Yahei',"\5FAE\8F6F\96C5\9ED1";} .scrollbar{height:35px;line-height:35px;background:#f6f6f6;z-index:1000; border-bottom:1px solid #dcdadc;} .List_box_Z{ width:1000px; margin:0 auto; zoom:1;} .List_box_L{ float:left;} .List_box_L1{ float:left; margin-right:-5px;} .List_box_L1 li img{ margin:-4px -4px 0 0;} .List_box_L2{ float:left;} .List_box_L1 li{float:left;padding-right:6px;} .List_box_L1 li a{color:#585858; font-size:14px;padding:0 12px 0 3px;} .List_box_R{float:right; position: relative;} .List_box_L2 li{padding: 0 4px 0 7px;} .List_box_L2 a{font-size:14px;} .List_box_L3{ float:left; display:inline; margin-left:10px;} .List_box_L3 a{color:#585858; font-size:14px;} .List_box_li01{margin-right:28px;} .wd_ks{width:1000px;overflow:hidden;zoom:1;margin:39px auto 0} .container {width:1000px;zoom:1;margin:0px auto;} .List_box_tul li{float:left; height:15px;line-height:15px;border-right:1px solid #ccc; } .List_box_tul li a{color:#7a7a7a; font-size:14px; padding:0 7.3px;} .List_box_t{width:1002px;margin:0 auto;} .List_box_bann{display:block;} .List_box_tul ul{ margin:15px 0 8px 0;overflow:hidden;} .wd_ly_tw{overflow:hidden; margin-top:20px;} .sKey{width:64px;border: 1px solid #41AB25;border-top:none;padding:5px 0;display:none;background:#fff;font-size: 14px;left:-1px;line-height:20px;position: absolute;text-align:center;top: 31px;position: absolute;left: -1px;top:30px;z-index: 9999;} .sKey li{color:#7C7C7C;line-height: 26px; cursor: pointer;} .zTw span{color:#fff;font-size:16px; margin-left:44px; } .zTw a:hover{text-decoration:none;} .wd_ly_img1{float:left;} .wd_ly_img1 img{ float:left;display:block; } .wd_mian{overflow:hidden;} .wd_mian a{color:#333;margin:0 4px;} .sckol{height:60px; line-height:60px; } .sckol b{margin: 13px 11px 0 4px;background:url(../images/hotbg.png) no-repeat 0 2px;width:40px;font-family:"microsoft YaHei"; height:32px; text-align:center;line-height:32px; color:#fff; font-size:22px;display:block; float:left;} .List_box_L2 li.mainleve{float:left;/*IE6 only*/padding-right:18px;position:relative;} .List_box_L2 li.mainleve:hover{background:#dcdad9;} .List_box_L2 li.mainleve .n_whi.current{background:url(../images/list-jt.png) no-repeat 0 -10px;width:9px; height:6px;display:block;right:0;top:0;} .List_box_L2 li.mainleve .n_whi{padding-right:10px;position:absolute;width:9px;height:6px;background:url(../images/list-jt.png) no-repeat 0 0;right:0;top:0; margin-top:15px;} .mainleve a {color:#333; text-decoration:none; line-height:32px; display:block;} .mainleve a:hover{color:#464646; text-decoration:none;} .mainleve ul {display:none; position:absolute;left:0px;} .mainleve li {border-top:1px solid #fff; background:#fff;/*IE6 only*/} .mainleve span{ color:#fff; font-size:14px; padding:0 5px; display:inline-block;} .mainleve span a{color:#585858;line-height:35px;display:block;} .sub_02{border:1px solid #dcdadc; border-top:none; background:#fff;width:152px; height:76px;padding:10px; margin-top:-1px;} .sub_02 li{ float:left;} .sub_02 li b{ display:block; font-size:14px;} .sub_03{width:430px;overflow:hidden;border:1px solid #dcdadc;background:#fff;padding:10px; border-top:none;margin-top:-1px;} .wd_more_li{overflow:hidden;} .wd_more em{float:left;color:#000; font-weight:bold; margin-right:20px; float:left;} .wd_more{font-size:14px;display:block;} .wd_more a{ margin-right:20px; float:left;} .wd_more_a{float:left; width:360px; border-bottom:1px solid #dbe6e2;} .wd_more_a1{float:left; width:250px;} .sub_03 ul li{padding:0 4px 0 5px;} .wd_ly_img1 span{height:24px;width:54px;letter-spacing:5px;display:inline-block;line-height:22px;font-size:22px;color: #fff; margin-left:4px;} .d-list_login{width:280px;float:left;margin:22px 10px 0 0;height:24px;border:1px solid #c1c4cb; position:relative;zoom:1;z-index:11; border-radius:2px;} .d-list_login .sptxt{width:49px;height:30px;_height:32px;text-align:center;cursor:pointer;line-height:32px;_line-height:31px;color:#989898;float:left;background:url(../../../images_list/srIcon3.png) no-repeat 50px center #f2f2f2;display:block;border:none;border-right:1px solid #e7e7e7;padding-right:15px;text-align:center;font-size:14px;} .d-ztRbtn{width:76px;height:28px; line-height:28px;display:block;border:0;cursor:pointer;background:#1ebfc7;position:absolute;top:-1px;background:url(../images/d-sec.gif) no-repeat; right: -85px; color:#fff;font-size:16px; font-family:"microsoft YaHei";} .d-ztLt{width:282px;height:26px;line-height:26px;background:url(../images/wd_search.png) no-repeat 8px center; padding-left: 4px;border:0;font-size:14px;color:#333;} .my_logo{float:left;} .zTw { width:130px; height:30px; background:#ff7a67 url(../images/wd_qu.png) no-repeat 20px center;line-height: 30px;_height: 30px; _line-height: 30px;cursor: pointer; border: none; float:left; margin-top:3px;} .zTw span{color:#fff;font-size:16px;} .my_sec{ width:516px; overflow:hidden; float:right} .wd_more_a a:link,.wd_more_a a:visited{color:#333; text-decoration:none;} .wd_more_a a:hover{color:#333; text-decoration:underline;} .henav{ position:relative; background:#eee; } .art_lo{ width:160px; margin-right:10px;} .art_s{ float:left; color:#999; margin-top:18px;font-family:"宋体"; padding-left:15px;} .art_s a{ color:#333; margin:0 4px;} .art_fo{ width:312px; margin-top:5px;} .art_ser{ width:240px; float:left;} .a_inq{ width:60px; background:#ff7a67; text-align:center;} .a_inq span{ margin-left:0;} .a_inq a:hover{ text-decoration:none;} /*首页底部*/ .footer{ text-align:center;margin:22px 0 15px 0;} .footer p{ margin-top:12px;color:#666;} .footer p a{ margin:0 6px;color:#666;}/*footer*/ /*注册*/ .rLg{float:right;} .dlQ{overflow: hidden;} .dlQ span{margin-right:13px;float: left;} .dlQ a{color:#585858; font-size:14px; cursor:pointer;} .dlQ a:hover,.tDtxt a:hover{color:#ff6600;} .dlQ span i{width:17px;height:20px;display:block;float:left;margin:9px 3px 0 0;} .rIcon1 i{background: url(../images/wd_user.png) no-repeat right 0;} .dlQ .rIcon2 i{background: url(../images/wd_rester.png) no-repeat;margin-top:10px;} .rIcon2 a{padding-top:2px;} .dlH{overflow:hidden;color:#fff;font-size:14px;} .dlH span{margin-right:10px;} .dlH a{color:#fff;} .dlH a:hover{color:#ff6600;} .dlH .gren a{color:#fff;} .dlH .gren a:hover{color:#ff6600;} .psIo{position:relative;z-index:100} .login{width:242px;height:251px;z-index:100;font-size:12px;color:#666;padding:14px 0 0 24px;background:#fff;zoom:1;position:absolute;right:78px;top:43px;} .sJo{width:20px;height:11px;background:url(../images/sjIcon.png) no-repeat;display:block;position:absolute;left:45px;top:-11px;} .sClose{width:18px;height:18px; cursor:pointer;display:block;position:absolute;top:10px;right:24px;background: url(../images/close.png);} .ubD{border: 1px solid #61affd;} .uInput{width:217px;height:35px;border: 1px solid #61affd;margin-top:12px;} .uInput span{width:40px;height:35px;float:left;} .uInput input{width:177px;height:35px;float:left;color:#666;line-height:35px;padding:0;margin:0;border:0;} .yh{background: url(../images/yhIcon.png) no-repeat center center;} .mm{background: url(../images/mIcon.png) no-repeat center center;} .reBd{border:1px solid #c7c6c6;} .foget{line-height:26px;margin-top: 6px;} .foget a{color: #0066cc;overflow: hidden;} .foRa{float: right;margin-right: 24px;} .qqP{line-height: 26px;margin-top:10px;} .qqP a{float:right;margin-right: 24px;} .qqP img{margin-left: 10px;} .dvBtn{width: 215px;height: 37px;line-height: 37px;text-align: center;margin-top:5px } .dvBtn input{width: 215px;height: 37px;color:#fff;font-weight: bold;background: url(../images/lDv.png) no-repeat;cursor:pointer;border: none;} .zLdv{position:relative;zoom:1;} /*公共*/ .md0{margin:0px auto 0px} /*分享*/ .secod{border:1px solid #ccc; display:none; padding:3px 7px; width:190px; position:absolute; right:140px; top:50px; z-index:500; background:#fff;} .secod img{margin:7px 0;} .secod h4 a{ float:right;} .sympt_02{ padding-left:110px; width:890px;} .sympt_03{ padding-left:210px; width:790px;} .sympt_05{ padding-left:438px; width: 562px;} .sympt_07{ padding-left:418px;width:582px;} /*新添样式*/ .novel a:hover{color:#ff6600; text-decoration:underline;} .novel .kmo{color:#ff6600;} /*内容页面翻页*/ .pagination .p_curr{ text-decoration: underline;} .mTop20{margin-top:20px;} .wd_pd{ border:1px solid #e4e4e4; margin-top:10px; } .wd_pd a{font-size:14px; color:#585858;} .wd_pd_z {overflow:hidden;} .wd_pd_z a{color:#666; font-size:12px;} .wd_pd_z1 a{ margin-right:22px;} .wd_pd h2{ color:#333; font-size:16px;height:30px;} .sh{height:32px;line-height:32px;} .bdisea{height:39px;} .bdisea a {font-size:16px;width:105px; height:39px;line-height:43px; border-width:0 1px;float:left; text-align:center; position:relative;font-family:"microsoft YaHei";} .bdisea a.indexahover{ cursor:pointer; color:#0eb9c1; text-decoration:none; font-weight:bold;} .bdisea a.indexahover:after{content:"";position:absolute;width:65px; border-bottom:3px solid #12a6af;top:38px; left:20px; font-weight:bold;} .wd_qh{border-top:1px solid #e4e4e4;padding:14px 9px 7px 23px;} .sh a:hover{color:#00c7d3 !important;text-decoration:underline;} .wd_qh a{margin-right:14px;} .wd_qh b a{ background:url(../images/hsbg.jpg) no-repeat; width:75px; height:22px;line-height:22px; display:inline-block; font-size:12px;text-align: center; font-weight:normal; color:#1aafb6;} .curro{ display:none;}
{ "content_hash": "c5636785b8336e2d38c43560d590ad34", "timestamp": "", "source": "github", "line_count": 152, "max_line_length": 339, "avg_line_length": 61.625, "alnum_prop": 0.7401515960286111, "repo_name": "VampireMe/admin-9939-com", "id": "57eb2fa7f2acc449e8780a4851f6954a929905ad", "size": "9411", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "api/web/css/main.css", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ApacheConf", "bytes": "692" }, { "name": "Batchfile", "bytes": "1541" }, { "name": "CSS", "bytes": "551432" }, { "name": "HTML", "bytes": "87733" }, { "name": "JavaScript", "bytes": "3037467" }, { "name": "PHP", "bytes": "3239788" } ], "symlink_target": "" }
package com.scalacraft.domain.v2.binary import com.scalacraft.domain.v2.internal.Information import com.scalacraft.domain.v2.binary.unconstrained.{OctetPair => Unconstrained} /** * An `OctetPair` represents two [[Octet]]s. * * The following constraints hold for instances of this class, * * - `hi` is not null * - `lo` is not null * * An instance can be created using a suitable overload of the `opt` method. * * {{{ * val op1: Option[OctetPair] = OctetPair.opt(34540) // OctetPair(134, 236) = 34540 * val op2: Option[OctetPair] = OctetPair.opt(0x86ec) // OctetPair(134, 236) = 34540 * val op3: Option[OctetPair] = OctetPair.opt("d") // OctetPair(0, 13) * val op3: Option[OctetPair] = OctetPair.opt("fe") // OctetPair(0, 254) * val op5: Option[OctetPair] = OctetPair.opt("012") // Decimal 18 * val op4: Option[OctetPair] = OctetPair.opt("f11d") // Decimal 61725 * }}} * * Note that the string variant takes between one to four hex characters. * * When any class constraint is violated the result is `None`. * {{{ * val op1: Option[OctetPair] = OctetPair.opt("NaN") // None * val op2: Option[OctetPair] = OctetPair.opt(0xf1234) // None * }}} * * === Pattern Matching === * * Pattern matching is supported as the following examples demonstrate, * {{{ * 0x3490 match { * case OctetPair(hi, lo) => (hi, lo) // (Octet(0x34), Octet(0x90) * case _ => None * } * }}} * * The match target can be a string, * {{{ * val s: String = "4020" * * s match { * case OctetPair(hi, lo) => (hi, lo) // (Octet(0x40), Octet(0x20) * case _ => None * } * }}} * * Invalid octet pairs are not matched, * {{{ * -129 match { * case OctetPair(hi, lo) => (hi, lo) * case _ => None // None * } * }}} * * === Implicit Conversions === * * Implicit conversions are supplied which allow an instance of `OctetPair` to be used when an `Int` or `String` is * required. * * {{{ * val Some(pair) = OctetPair.opt(0x40cc) * val w: Int = 1 + pair // 0x40cd * }}} * * {{{ * val Some(pair) = OctetPair.opt("f") * val s: String = pair: String // 000f * }}} * * A conversion to the unconstrained version of this class is also available. * * @param hi A valid octet representing the high byte * @param lo A valid octet representing the low byte */ case class OctetPair private(hi: Octet, lo: Octet) { /** * Convert to the unconstrained version of octet pair. * @return An unconstrained instance of octet pair */ def unconstrained: Unconstrained = Unconstrained(hi.unconstrained, lo.unconstrained) } object OctetPair { @deprecated(since = "2.1.0") implicit def `to-Int`(octetPair: OctetPair): Int = octetPair.hi * 256 + octetPair.lo /** * @example Given OctetPair(Octet(47), Octet(128)) this will return `2f80` * @param octetPair The instance to extract a value from * @return A string representation of the octet pair as four hex character without any prefix */ @deprecated(since = "2.1.0") implicit def `to-String`(octetPair: OctetPair): String = Octet.`to-String`(octetPair.hi) + Octet.`to-String`(octetPair.lo) /** * Implicit conversion to the unconstrained version of octet pair. * @param octetPair The instance to convert * @return An unconstrained instance of octet pair */ @deprecated(since = "2.1.0") implicit def `to-OctetPair`(octetPair: OctetPair): Unconstrained = octetPair.unconstrained def opt(x: Int): Option[OctetPair] = unapply(x) map { case (hi, lo) => OctetPair(hi, lo) } def opt(x: String): Option[OctetPair] = unapply(x) map { case (hi, lo) => OctetPair(hi, lo) } def opt(a: Octet, b: Octet): Option[OctetPair] = Information.whenNotNull(a, b)(OctetPair.apply) def unapply(x: String): Option[(Octet, Octet)] = { val octets = extractOctets(x, Nil) octets match { case Some(lo :: Nil) => Some(Octet.Zero, lo) case Some(hi :: lo :: Nil) => Some(hi, lo) case _ => None } } // TODO: How to extract without knowing the range of the octet? def unapply(x: Int): Option[(Octet, Octet)] = for { hi <- Octet.opt(x / 256) lo <- Octet.opt(x % 256) } yield (hi, lo) /** * No assumptions are made about the format acceptable to [[Octet]] */ private def extractOctets(x: String, accum: List[Octet]): Option[List[Octet]] = Information.whenSome(x, accum) { longestRightMatch(_) match { case Some((unused, octet)) => extractOctets(unused, octet :: accum) case None => None } } private def longestRightMatch(x: String): Option[(String, Octet)] = { /* For example: (,250d), (2,50d), (25,0d), (250,d) */ val spans: Seq[(String, String)] = (0 until x.length).toList map x.splitAt val candidates: Seq[(String, Option[Octet])] = spans.map { case (unused, target) => (unused, Octet.opt(target)) } candidates.collectFirst { case (unused, Some(octet)) => (unused, octet) } } }
{ "content_hash": "05d2a225fae51d3d43cc68f0852a29f1", "timestamp": "", "source": "github", "line_count": 156, "max_line_length": 115, "avg_line_length": 31.826923076923077, "alnum_prop": 0.6352467270896274, "repo_name": "janekdb/scalacraft-domain", "id": "3bc74fe6a765f5bf34be25899d6b47bb397e4315", "size": "5561", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/scala/com/scalacraft/domain/v2/binary/OctetPair.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Scala", "bytes": "206795" } ], "symlink_target": "" }
/*************************************************************************/ /* translation.cpp */ /*************************************************************************/ /* This file is part of: */ /* GODOT ENGINE */ /* https://godotengine.org */ /*************************************************************************/ /* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */ /* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */ /* */ /* Permission is hereby granted, free of charge, to any person obtaining */ /* a copy of this software and associated documentation files (the */ /* "Software"), to deal in the Software without restriction, including */ /* without limitation the rights to use, copy, modify, merge, publish, */ /* distribute, sublicense, and/or sell copies of the Software, and to */ /* permit persons to whom the Software is furnished to do so, subject to */ /* the following conditions: */ /* */ /* The above copyright notice and this permission notice shall be */ /* included in all copies or substantial portions of the Software. */ /* */ /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/ /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /*************************************************************************/ #include "translation.h" #include "io/resource_loader.h" #include "os/os.h" #include "project_settings.h" static const char *locale_list[] = { "aa", // Afar "aa_DJ", // Afar (Djibouti) "aa_ER", // Afar (Eritrea) "aa_ET", // Afar (Ethiopia) "af", // Afrikaans "af_ZA", // Afrikaans (South Africa) "agr_PE", // Aguaruna (Peru) "ak_GH", // Akan (Ghana) "am_ET", // Amharic (Ethiopia) "an_ES", // Aragonese (Spain) "anp_IN", // Angika (India) "ar", // Arabic "ar_AE", // Arabic (United Arab Emirates) "ar_BH", // Arabic (Bahrain) "ar_DZ", // Arabic (Algeria) "ar_EG", // Arabic (Egypt) "ar_IN", // Arabic (India) "ar_IQ", // Arabic (Iraq) "ar_JO", // Arabic (Jordan) "ar_KW", // Arabic (Kuwait) "ar_LB", // Arabic (Lebanon) "ar_LY", // Arabic (Libya) "ar_MA", // Arabic (Morocco) "ar_OM", // Arabic (Oman) "ar_QA", // Arabic (Qatar) "ar_SA", // Arabic (Saudi Arabia) "ar_SD", // Arabic (Sudan) "ar_SS", // Arabic (South Soudan) "ar_SY", // Arabic (Syria) "ar_TN", // Arabic (Tunisia) "ar_YE", // Arabic (Yemen) "as_IN", // Assamese (India) "ast_ES", // Asturian (Spain) "ayc_PE", // Southern Aymara (Peru) "ay_PE", // Aymara (Peru) "az_AZ", // Azerbaijani (Azerbaijan) "be", // Belarusian "be_BY", // Belarusian (Belarus) "bem_ZM", // Bemba (Zambia) "ber_DZ", // Berber languages (Algeria) "ber_MA", // Berber languages (Morocco) "bg", // Bulgarian "bg_BG", // Bulgarian (Bulgaria) "bhb_IN", // Bhili (India) "bho_IN", // Bhojpuri (India) "bi_TV", // Bislama (Tuvalu) "bn", // Bengali "bn_BD", // Bengali (Bangladesh) "bn_IN", // Bengali (India) "bo", // Tibetan "bo_CN", // Tibetan (China) "bo_IN", // Tibetan (India) "br_FR", // Breton (France) "brx_IN", // Bodo (India) "bs_BA", // Bosnian (Bosnia and Herzegovina) "byn_ER", // Bilin (Eritrea) "ca", // Catalan "ca_AD", // Catalan (Andorra) "ca_ES", // Catalan (Spain) "ca_FR", // Catalan (France) "ca_IT", // Catalan (Italy) "ce_RU", // Chechen (Russia) "chr_US", // Cherokee (United States) "cmn_TW", // Mandarin Chinese (Taiwan) "crh_UA", // Crimean Tatar (Ukraine) "csb_PL", // Kashubian (Poland) "cs", // Czech "cs_CZ", // Czech (Czech Republic) "cv_RU", // Chuvash (Russia) "cy_GB", // Welsh (United Kingdom) "da", // Danish "da_DK", // Danish (Denmark) "de", // German "de_AT", // German (Austria) "de_BE", // German (Belgium) "de_CH", // German (Switzerland) "de_DE", // German (Germany) "de_IT", // German (Italy) "de_LU", // German (Luxembourg) "doi_IN", // Dogri (India) "dv_MV", // Dhivehi (Maldives) "dz_BT", // Dzongkha (Bhutan) "el", // Greek "el_CY", // Greek (Cyprus) "el_GR", // Greek (Greece) "en", // English "en_AG", // English (Antigua and Barbuda) "en_AU", // English (Australia) "en_BW", // English (Botswana) "en_CA", // English (Canada) "en_DK", // English (Denmark) "en_GB", // English (United Kingdom) "en_HK", // English (Hong Kong) "en_IE", // English (Ireland) "en_IL", // English (Israel) "en_IN", // English (India) "en_NG", // English (Nigeria) "en_NZ", // English (New Zealand) "en_PH", // English (Philippines) "en_SG", // English (Singapore) "en_US", // English (United States) "en_ZA", // English (South Africa) "en_ZM", // English (Zambia) "en_ZW", // English (Zimbabwe) "eo", // Esperanto "es", // Spanish "es_AR", // Spanish (Argentina) "es_BO", // Spanish (Bolivia) "es_CL", // Spanish (Chile) "es_CO", // Spanish (Colombia) "es_CR", // Spanish (Costa Rica) "es_CU", // Spanish (Cuba) "es_DO", // Spanish (Dominican Republic) "es_EC", // Spanish (Ecuador) "es_ES", // Spanish (Spain) "es_GT", // Spanish (Guatemala) "es_HN", // Spanish (Honduras) "es_MX", // Spanish (Mexico) "es_NI", // Spanish (Nicaragua) "es_PA", // Spanish (Panama) "es_PE", // Spanish (Peru) "es_PR", // Spanish (Puerto Rico) "es_PY", // Spanish (Paraguay) "es_SV", // Spanish (El Salvador) "es_US", // Spanish (United States) "es_UY", // Spanish (Uruguay) "es_VE", // Spanish (Venezuela) "et", // Estonian "et_EE", // Estonian (Estonia) "eu", // Basque "eu_ES", // Basque (Spain) "fa", // Persian "fa_IR", // Persian (Iran) "ff_SN", // Fulah (Senegal) "fi", // Finnish "fi_FI", // Finnish (Finland) "fil_PH", // Filipino (Philippines) "fo_FO", // Faroese (Faroe Islands) "fr", // French "fr_BE", // French (Belgium) "fr_CA", // French (Canada) "fr_CH", // French (Switzerland) "fr_FR", // French (France) "fr_LU", // French (Luxembourg) "fur_IT", // Friulian (Italy) "fy_DE", // Western Frisian (Germany) "fy_NL", // Western Frisian (Netherlands) "ga", // Irish "ga_IE", // Irish (Ireland) "gd_GB", // Scottish Gaelic (United Kingdom) "gez_ER", // Geez (Eritrea) "gez_ET", // Geez (Ethiopia) "gl_ES", // Galician (Spain) "gu_IN", // Gujarati (India) "gv_GB", // Manx (United Kingdom) "hak_TW", // Hakka Chinese (Taiwan) "ha_NG", // Hausa (Nigeria) "he", // Hebrew "he_IL", // Hebrew (Israel) "hi", // Hindi "hi_IN", // Hindi (India) "hne_IN", // Chhattisgarhi (India) "hr", // Croatian "hr_HR", // Croatian (Croatia) "hsb_DE", // Upper Sorbian (Germany) "ht_HT", // Haitian (Haiti) "hu", // Hungarian "hu_HU", // Hungarian (Hungary) "hus_MX", // Huastec (Mexico) "hy_AM", // Armenian (Armenia) "ia_FR", // Interlingua (France) "id", // Indonesian "id_ID", // Indonesian (Indonesia) "ig_NG", // Igbo (Nigeria) "ik_CA", // Inupiaq (Canada) "is", // Icelandic "is_IS", // Icelandic (Iceland) "it", // Italian "it_CH", // Italian (Switzerland) "it_IT", // Italian (Italy) "iu_CA", // Inuktitut (Canada) "ja", // Japanese "ja_JP", // Japanese (Japan) "kab_DZ", // Kabyle (Algeria) "ka_GE", // Georgian (Georgia) "kk_KZ", // Kazakh (Kazakhstan) "kl_GL", // Kalaallisut (Greenland) "km_KH", // Central Khmer (Cambodia) "kn_IN", // Kannada (India) "kok_IN", // Konkani (India) "ko", // Korean "ko_KR", // Korean (South Korea) "ks_IN", // Kashmiri (India) "ku", // Kurdish "ku_TR", // Kurdish (Turkey) "kw_GB", // Cornish (United Kingdom) "ky_KG", // Kirghiz (Kyrgyzstan) "lb_LU", // Luxembourgish (Luxembourg) "lg_UG", // Ganda (Uganda) "li_BE", // Limburgan (Belgium) "li_NL", // Limburgan (Netherlands) "lij_IT", // Ligurian (Italy) "ln_CD", // Lingala (Congo) "lo_LA", // Lao (Laos) "lt", // Lithuanian "lt_LT", // Lithuanian (Lithuania) "lv", // Latvian "lv_LV", // Latvian (Latvia) "lzh_TW", // Literary Chinese (Taiwan) "mag_IN", // Magahi (India) "mai_IN", // Maithili (India) "mg_MG", // Malagasy (Madagascar) "mh_MH", // Marshallese (Marshall Islands) "mhr_RU", // Eastern Mari (Russia) "mi_NZ", // Maori (New Zealand) "miq_NI", // Mískito (Nicaragua) "mk", // Macedonian "mk_MK", // Macedonian (Macedonia) "ml_IN", // Malayalam (India) "mni_IN", // Manipuri (India) "mn_MN", // Mongolian (Mongolia) "mr_IN", // Marathi (India) "ms", // Malay "ms_MY", // Malay (Malaysia) "mt", // Maltese "mt_MT", // Maltese (Malta) "my_MM", // Burmese (Myanmar) "myv_RU", // Erzya (Russia) "nah_MX", // Nahuatl languages (Mexico) "nan_TW", // Min Nan Chinese (Taiwan) "nb", // Norwegian Bokmål "nb_NO", // Norwegian Bokmål (Norway) "nds_DE", // Low German (Germany) "nds_NL", // Low German (Netherlands) "ne_NP", // Nepali (Nepal) "nhn_MX", // Central Nahuatl (Mexico) "niu_NU", // Niuean (Niue) "niu_NZ", // Niuean (New Zealand) "nl", // Dutch "nl_AW", // Dutch (Aruba) "nl_BE", // Dutch (Belgium) "nl_NL", // Dutch (Netherlands) "nn", // Norwegian Nynorsk "nn_NO", // Norwegian Nynorsk (Norway) "nr_ZA", // South Ndebele (South Africa) "nso_ZA", // Pedi (South Africa) "oc_FR", // Occitan (France) "om", // Oromo "om_ET", // Oromo (Ethiopia) "om_KE", // Oromo (Kenya) "or_IN", // Oriya (India) "os_RU", // Ossetian (Russia) "pa_IN", // Panjabi (India) "pap", // Papiamento "pap_AN", // Papiamento (Netherlands Antilles) "pap_AW", // Papiamento (Aruba) "pap_CW", // Papiamento (Curaçao) "pa_PK", // Panjabi (Pakistan) "pl", // Polish "pl_PL", // Polish (Poland) "pr", // Pirate "ps_AF", // Pushto (Afghanistan) "pt", // Portuguese "pt_BR", // Portuguese (Brazil) "pt_PT", // Portuguese (Portugal) "quy_PE", // Ayacucho Quechua (Peru) "quz_PE", // Cusco Quechua (Peru) "raj_IN", // Rajasthani (India) "ro", // Romanian "ro_RO", // Romanian (Romania) "ru", // Russian "ru_RU", // Russian (Russia) "ru_UA", // Russian (Ukraine) "rw_RW", // Kinyarwanda (Rwanda) "sa_IN", // Sanskrit (India) "sat_IN", // Santali (India) "sc_IT", // Sardinian (Italy) "sco", // Scots "sd_IN", // Sindhi (India) "se_NO", // Northern Sami (Norway) "sgs_LT", // Samogitian (Lithuania) "shs_CA", // Shuswap (Canada) "sid_ET", // Sidamo (Ethiopia) "si_LK", // Sinhala (Sri Lanka) "sk", // Slovak "sk_SK", // Slovak (Slovakia) "sl", // Slovenian "sl_SI", // Slovenian (Slovenia) "so", // Somali "so_DJ", // Somali (Djibouti) "so_ET", // Somali (Ethiopia) "so_KE", // Somali (Kenya) "so_SO", // Somali (Somalia) "son_ML", // Songhai languages (Mali) "sq", // Albanian "sq_AL", // Albanian (Albania) "sq_KV", // Albanian (Kosovo) "sq_MK", // Albanian (Macedonia) "sr", // Serbian "sr_ME", // Serbian (Montenegro) "sr_RS", // Serbian (Serbia) "ss_ZA", // Swati (South Africa) "st_ZA", // Southern Sotho (South Africa) "sv", // Swedish "sv_FI", // Swedish (Finland) "sv_SE", // Swedish (Sweden) "sw_KE", // Swahili (Kenya) "sw_TZ", // Swahili (Tanzania) "szl_PL", // Silesian (Poland) "ta", // Tamil "ta_IN", // Tamil (India) "ta_LK", // Tamil (Sri Lanka) "tcy_IN", // Tulu (India) "te_IN", // Telugu (India) "tg_TJ", // Tajik (Tajikistan) "the_NP", // Chitwania Tharu (Nepal) "th", // Thai "th_TH", // Thai (Thailand) "ti", // Tigrinya "ti_ER", // Tigrinya (Eritrea) "ti_ET", // Tigrinya (Ethiopia) "tig_ER", // Tigre (Eritrea) "tk_TM", // Turkmen (Turkmenistan) "tl_PH", // Tagalog (Philippines) "tn_ZA", // Tswana (South Africa) "tr", // Turkish "tr_CY", // Turkish (Cyprus) "tr_TR", // Turkish (Turkey) "ts_ZA", // Tsonga (South Africa) "tt_RU", // Tatar (Russia) "ug_CN", // Uighur (China) "uk", // Ukrainian "uk_UA", // Ukrainian (Ukraine) "unm_US", // Unami (United States) "ur", // Urdu "ur_IN", // Urdu (India) "ur_PK", // Urdu (Pakistan) "uz", // Uzbek "uz_UZ", // Uzbek (Uzbekistan) "ve_ZA", // Venda (South Africa) "vi", // Vietnamese "vi_VN", // Vietnamese (Vietnam) "wa_BE", // Walloon (Belgium) "wae_CH", // Walser (Switzerland) "wal_ET", // Wolaytta (Ethiopia) "wo_SN", // Wolof (Senegal) "xh_ZA", // Xhosa (South Africa) "yi_US", // Yiddish (United States) "yo_NG", // Yoruba (Nigeria) "yue_HK", // Yue Chinese (Hong Kong) "zh", // Chinese "zh_CN", // Chinese (China) "zh_HK", // Chinese (Hong Kong) "zh_SG", // Chinese (Singapore) "zh_TW", // Chinese (Taiwan) "zu_ZA", // Zulu (South Africa) 0 }; static const char *locale_names[] = { "Afar", "Afar (Djibouti)", "Afar (Eritrea)", "Afar (Ethiopia)", "Afrikaans", "Afrikaans (South Africa)", "Aguaruna (Peru)", "Akan (Ghana)", "Amharic (Ethiopia)", "Aragonese (Spain)", "Angika (India)", "Arabic", "Arabic (United Arab Emirates)", "Arabic (Bahrain)", "Arabic (Algeria)", "Arabic (Egypt)", "Arabic (India)", "Arabic (Iraq)", "Arabic (Jordan)", "Arabic (Kuwait)", "Arabic (Lebanon)", "Arabic (Libya)", "Arabic (Morocco)", "Arabic (Oman)", "Arabic (Qatar)", "Arabic (Saudi Arabia)", "Arabic (Sudan)", "Arabic (South Soudan)", "Arabic (Syria)", "Arabic (Tunisia)", "Arabic (Yemen)", "Assamese (India)", "Asturian (Spain)", "Southern Aymara (Peru)", "Aymara (Peru)", "Azerbaijani (Azerbaijan)", "Belarusian", "Belarusian (Belarus)", "Bemba (Zambia)", "Berber languages (Algeria)", "Berber languages (Morocco)", "Bulgarian", "Bulgarian (Bulgaria)", "Bhili (India)", "Bhojpuri (India)", "Bislama (Tuvalu)", "Bengali", "Bengali (Bangladesh)", "Bengali (India)", "Tibetan", "Tibetan (China)", "Tibetan (India)", "Breton (France)", "Bodo (India)", "Bosnian (Bosnia and Herzegovina)", "Bilin (Eritrea)", "Catalan", "Catalan (Andorra)", "Catalan (Spain)", "Catalan (France)", "Catalan (Italy)", "Chechen (Russia)", "Cherokee (United States)", "Mandarin Chinese (Taiwan)", "Crimean Tatar (Ukraine)", "Kashubian (Poland)", "Czech", "Czech (Czech Republic)", "Chuvash (Russia)", "Welsh (United Kingdom)", "Danish", "Danish (Denmark)", "German", "German (Austria)", "German (Belgium)", "German (Switzerland)", "German (Germany)", "German (Italy)", "German (Luxembourg)", "Dogri (India)", "Dhivehi (Maldives)", "Dzongkha (Bhutan)", "Greek", "Greek (Cyprus)", "Greek (Greece)", "English", "English (Antigua and Barbuda)", "English (Australia)", "English (Botswana)", "English (Canada)", "English (Denmark)", "English (United Kingdom)", "English (Hong Kong)", "English (Ireland)", "English (Israel)", "English (India)", "English (Nigeria)", "English (New Zealand)", "English (Philippines)", "English (Singapore)", "English (United States)", "English (South Africa)", "English (Zambia)", "English (Zimbabwe)", "Esperanto", "Spanish", "Spanish (Argentina)", "Spanish (Bolivia)", "Spanish (Chile)", "Spanish (Colombia)", "Spanish (Costa Rica)", "Spanish (Cuba)", "Spanish (Dominican Republic)", "Spanish (Ecuador)", "Spanish (Spain)", "Spanish (Guatemala)", "Spanish (Honduras)", "Spanish (Mexico)", "Spanish (Nicaragua)", "Spanish (Panama)", "Spanish (Peru)", "Spanish (Puerto Rico)", "Spanish (Paraguay)", "Spanish (El Salvador)", "Spanish (United States)", "Spanish (Uruguay)", "Spanish (Venezuela)", "Estonian", "Estonian (Estonia)", "Basque", "Basque (Spain)", "Persian", "Persian (Iran)", "Fulah (Senegal)", "Finnish", "Finnish (Finland)", "Filipino (Philippines)", "Faroese (Faroe Islands)", "French", "French (Belgium)", "French (Canada)", "French (Switzerland)", "French (France)", "French (Luxembourg)", "Friulian (Italy)", "Western Frisian (Germany)", "Western Frisian (Netherlands)", "Irish", "Irish (Ireland)", "Scottish Gaelic (United Kingdom)", "Geez (Eritrea)", "Geez (Ethiopia)", "Galician (Spain)", "Gujarati (India)", "Manx (United Kingdom)", "Hakka Chinese (Taiwan)", "Hausa (Nigeria)", "Hebrew", "Hebrew (Israel)", "Hindi", "Hindi (India)", "Chhattisgarhi (India)", "Croatian", "Croatian (Croatia)", "Upper Sorbian (Germany)", "Haitian (Haiti)", "Hungarian", "Hungarian (Hungary)", "Huastec (Mexico)", "Armenian (Armenia)", "Interlingua (France)", "Indonesian", "Indonesian (Indonesia)", "Igbo (Nigeria)", "Inupiaq (Canada)", "Icelandic", "Icelandic (Iceland)", "Italian", "Italian (Switzerland)", "Italian (Italy)", "Inuktitut (Canada)", "Japanese", "Japanese (Japan)", "Kabyle (Algeria)", "Georgian (Georgia)", "Kazakh (Kazakhstan)", "Kalaallisut (Greenland)", "Central Khmer (Cambodia)", "Kannada (India)", "Konkani (India)", "Korean", "Korean (South Korea)", "Kashmiri (India)", "Kurdish", "Kurdish (Turkey)", "Cornish (United Kingdom)", "Kirghiz (Kyrgyzstan)", "Luxembourgish (Luxembourg)", "Ganda (Uganda)", "Limburgan (Belgium)", "Limburgan (Netherlands)", "Ligurian (Italy)", "Lingala (Congo)", "Lao (Laos)", "Lithuanian", "Lithuanian (Lithuania)", "Latvian", "Latvian (Latvia)", "Literary Chinese (Taiwan)", "Magahi (India)", "Maithili (India)", "Malagasy (Madagascar)", "Marshallese (Marshall Islands)", "Eastern Mari (Russia)", "Maori (New Zealand)", "Mískito (Nicaragua)", "Macedonian", "Macedonian (Macedonia)", "Malayalam (India)", "Manipuri (India)", "Mongolian (Mongolia)", "Marathi (India)", "Malay", "Malay (Malaysia)", "Maltese", "Maltese (Malta)", "Burmese (Myanmar)", "Erzya (Russia)", "Nahuatl languages (Mexico)", "Min Nan Chinese (Taiwan)", "Norwegian Bokmål", "Norwegian Bokmål (Norway)", "Low German (Germany)", "Low German (Netherlands)", "Nepali (Nepal)", "Central Nahuatl (Mexico)", "Niuean (Niue)", "Niuean (New Zealand)", "Dutch", "Dutch (Aruba)", "Dutch (Belgium)", "Dutch (Netherlands)", "Norwegian Nynorsk", "Norwegian Nynorsk (Norway)", "South Ndebele (South Africa)", "Pedi (South Africa)", "Occitan (France)", "Oromo", "Oromo (Ethiopia)", "Oromo (Kenya)", "Oriya (India)", "Ossetian (Russia)", "Panjabi (India)", "Papiamento", "Papiamento (Netherlands Antilles)", "Papiamento (Aruba)", "Papiamento (Curaçao)", "Panjabi (Pakistan)", "Polish", "Polish (Poland)", "Pirate", "Pushto (Afghanistan)", "Portuguese", "Portuguese (Brazil)", "Portuguese (Portugal)", "Ayacucho Quechua (Peru)", "Cusco Quechua (Peru)", "Rajasthani (India)", "Romanian", "Romanian (Romania)", "Russian", "Russian (Russia)", "Russian (Ukraine)", "Kinyarwanda (Rwanda)", "Sanskrit (India)", "Santali (India)", "Sardinian (Italy)", "Scots (Scotland)", "Sindhi (India)", "Northern Sami (Norway)", "Samogitian (Lithuania)", "Shuswap (Canada)", "Sidamo (Ethiopia)", "Sinhala (Sri Lanka)", "Slovak", "Slovak (Slovakia)", "Slovenian", "Slovenian (Slovenia)", "Somali", "Somali (Djibouti)", "Somali (Ethiopia)", "Somali (Kenya)", "Somali (Somalia)", "Songhai languages (Mali)", "Albanian", "Albanian (Albania)", "Albanian (Kosovo)", "Albanian (Macedonia)", "Serbian", "Serbian (Montenegro)", "Serbian (Serbia)", "Swati (South Africa)", "Southern Sotho (South Africa)", "Swedish", "Swedish (Finland)", "Swedish (Sweden)", "Swahili (Kenya)", "Swahili (Tanzania)", "Silesian (Poland)", "Tamil", "Tamil (India)", "Tamil (Sri Lanka)", "Tulu (India)", "Telugu (India)", "Tajik (Tajikistan)", "Chitwania Tharu (Nepal)", "Thai", "Thai (Thailand)", "Tigrinya", "Tigrinya (Eritrea)", "Tigrinya (Ethiopia)", "Tigre (Eritrea)", "Turkmen (Turkmenistan)", "Tagalog (Philippines)", "Tswana (South Africa)", "Turkish", "Turkish (Cyprus)", "Turkish (Turkey)", "Tsonga (South Africa)", "Tatar (Russia)", "Uighur (China)", "Ukrainian", "Ukrainian (Ukraine)", "Unami (United States)", "Urdu", "Urdu (India)", "Urdu (Pakistan)", "Uzbek", "Uzbek (Uzbekistan)", "Venda (South Africa)", "Vietnamese", "Vietnamese (Vietnam)", "Walloon (Belgium)", "Walser (Switzerland)", "Wolaytta (Ethiopia)", "Wolof (Senegal)", "Xhosa (South Africa)", "Yiddish (United States)", "Yoruba (Nigeria)", "Yue Chinese (Hong Kong)", "Chinese", "Chinese (China)", "Chinese (Hong Kong)", "Chinese (Singapore)", "Chinese (Taiwan)", "Zulu (South Africa)", 0 }; static const char *locale_renames[][2] = { { "no", "nb" }, { NULL, NULL } }; static String get_trimmed_locale(const String &p_locale) { return p_locale.substr(0, 2); } /////////////////////////////////////////////// PoolVector<String> Translation::_get_messages() const { PoolVector<String> msgs; msgs.resize(translation_map.size() * 2); int idx = 0; for (const Map<StringName, StringName>::Element *E = translation_map.front(); E; E = E->next()) { msgs.set(idx + 0, E->key()); msgs.set(idx + 1, E->get()); idx += 2; } return msgs; } PoolVector<String> Translation::_get_message_list() const { PoolVector<String> msgs; msgs.resize(translation_map.size()); int idx = 0; for (const Map<StringName, StringName>::Element *E = translation_map.front(); E; E = E->next()) { msgs.set(idx, E->key()); idx += 1; } return msgs; } void Translation::_set_messages(const PoolVector<String> &p_messages) { int msg_count = p_messages.size(); ERR_FAIL_COND(msg_count % 2); PoolVector<String>::Read r = p_messages.read(); for (int i = 0; i < msg_count; i += 2) { add_message(r[i + 0], r[i + 1]); } } void Translation::set_locale(const String &p_locale) { String univ_locale = TranslationServer::standardize_locale(p_locale); if (!TranslationServer::is_locale_valid(univ_locale)) { String trimmed_locale = get_trimmed_locale(univ_locale); ERR_EXPLAIN("Invalid locale: " + trimmed_locale); ERR_FAIL_COND(!TranslationServer::is_locale_valid(trimmed_locale)); locale = trimmed_locale; } else { locale = univ_locale; } if (OS::get_singleton()->get_main_loop()) { OS::get_singleton()->get_main_loop()->notification(MainLoop::NOTIFICATION_TRANSLATION_CHANGED); } } void Translation::add_message(const StringName &p_src_text, const StringName &p_xlated_text) { translation_map[p_src_text] = p_xlated_text; } StringName Translation::get_message(const StringName &p_src_text) const { const Map<StringName, StringName>::Element *E = translation_map.find(p_src_text); if (!E) return StringName(); return E->get(); } void Translation::erase_message(const StringName &p_src_text) { translation_map.erase(p_src_text); } void Translation::get_message_list(List<StringName> *r_messages) const { for (const Map<StringName, StringName>::Element *E = translation_map.front(); E; E = E->next()) { r_messages->push_back(E->key()); } } int Translation::get_message_count() const { return translation_map.size(); }; void Translation::_bind_methods() { ClassDB::bind_method(D_METHOD("set_locale", "locale"), &Translation::set_locale); ClassDB::bind_method(D_METHOD("get_locale"), &Translation::get_locale); ClassDB::bind_method(D_METHOD("add_message", "src_message", "xlated_message"), &Translation::add_message); ClassDB::bind_method(D_METHOD("get_message", "src_message"), &Translation::get_message); ClassDB::bind_method(D_METHOD("erase_message", "src_message"), &Translation::erase_message); ClassDB::bind_method(D_METHOD("get_message_list"), &Translation::_get_message_list); ClassDB::bind_method(D_METHOD("get_message_count"), &Translation::get_message_count); ClassDB::bind_method(D_METHOD("_set_messages"), &Translation::_set_messages); ClassDB::bind_method(D_METHOD("_get_messages"), &Translation::_get_messages); ADD_PROPERTY(PropertyInfo(Variant::POOL_STRING_ARRAY, "messages", PROPERTY_HINT_NONE, "", PROPERTY_USAGE_NOEDITOR), "_set_messages", "_get_messages"); ADD_PROPERTY(PropertyInfo(Variant::STRING, "locale"), "set_locale", "get_locale"); } Translation::Translation() : locale("en") { } /////////////////////////////////////////////// bool TranslationServer::is_locale_valid(const String &p_locale) { const char **ptr = locale_list; while (*ptr) { if (*ptr == p_locale) return true; ptr++; } return false; } String TranslationServer::standardize_locale(const String &p_locale) { // Replaces '-' with '_' for macOS Sierra-style locales String univ_locale = p_locale.replace("-", "_"); // Handles known non-ISO locale names used e.g. on Windows int idx = 0; while (locale_renames[idx][0] != NULL) { if (locale_renames[idx][0] == univ_locale) { univ_locale = locale_renames[idx][1]; break; } idx++; } return univ_locale; } void TranslationServer::set_locale(const String &p_locale) { String univ_locale = standardize_locale(p_locale); if (!is_locale_valid(univ_locale)) { String trimmed_locale = get_trimmed_locale(univ_locale); ERR_EXPLAIN("Invalid locale: " + trimmed_locale); ERR_FAIL_COND(!is_locale_valid(trimmed_locale)); locale = trimmed_locale; } else { locale = univ_locale; } if (OS::get_singleton()->get_main_loop()) { OS::get_singleton()->get_main_loop()->notification(MainLoop::NOTIFICATION_TRANSLATION_CHANGED); } ResourceLoader::reload_translation_remaps(); } String TranslationServer::get_locale() const { return locale; } String TranslationServer::get_locale_name(const String &p_locale) const { if (!locale_name_map.has(p_locale)) return String(); return locale_name_map[p_locale]; } Vector<String> TranslationServer::get_all_locales() { Vector<String> locales; const char **ptr = locale_list; while (*ptr) { locales.push_back(*ptr); ptr++; } return locales; } Vector<String> TranslationServer::get_all_locale_names() { Vector<String> locales; const char **ptr = locale_names; while (*ptr) { locales.push_back(*ptr); ptr++; } return locales; } void TranslationServer::add_translation(const Ref<Translation> &p_translation) { translations.insert(p_translation); } void TranslationServer::remove_translation(const Ref<Translation> &p_translation) { translations.erase(p_translation); } void TranslationServer::clear() { translations.clear(); }; StringName TranslationServer::translate(const StringName &p_message) const { //translate using locale if (!enabled) return p_message; StringName res; bool near_match = false; const CharType *lptr = &locale[0]; for (const Set<Ref<Translation> >::Element *E = translations.front(); E; E = E->next()) { const Ref<Translation> &t = E->get(); String l = t->get_locale(); if (lptr[0] != l[0] || lptr[1] != l[1]) continue; // locale not match //near match bool match = (l != locale); if (near_match && !match) continue; //only near-match once StringName r = t->get_message(p_message); if (!r) continue; res = r; if (match) break; else near_match = true; } if (!res) { //try again with fallback if (fallback.length() >= 2) { const CharType *fptr = &fallback[0]; bool near_match = false; for (const Set<Ref<Translation> >::Element *E = translations.front(); E; E = E->next()) { const Ref<Translation> &t = E->get(); String l = t->get_locale(); if (fptr[0] != l[0] || fptr[1] != l[1]) continue; // locale not match //near match bool match = (l != fallback); if (near_match && !match) continue; //only near-match once StringName r = t->get_message(p_message); if (!r) continue; res = r; if (match) break; else near_match = true; } } } if (!res) return p_message; return res; } TranslationServer *TranslationServer::singleton = NULL; bool TranslationServer::_load_translations(const String &p_from) { if (ProjectSettings::get_singleton()->has_setting(p_from)) { PoolVector<String> translations = ProjectSettings::get_singleton()->get(p_from); int tcount = translations.size(); if (tcount) { PoolVector<String>::Read r = translations.read(); for (int i = 0; i < tcount; i++) { //print_line( "Loading translation from " + r[i] ); Ref<Translation> tr = ResourceLoader::load(r[i]); if (tr.is_valid()) add_translation(tr); } } return true; } return false; } void TranslationServer::setup() { String test = GLOBAL_DEF("locale/test", ""); test = test.strip_edges(); if (test != "") set_locale(test); else set_locale(OS::get_singleton()->get_locale()); fallback = GLOBAL_DEF("locale/fallback", "en"); #ifdef TOOLS_ENABLED { String options = ""; int idx = 0; while (locale_list[idx]) { if (idx > 0) options += ","; options += locale_list[idx]; idx++; } ProjectSettings::get_singleton()->set_custom_property_info("locale/fallback", PropertyInfo(Variant::STRING, "locale/fallback", PROPERTY_HINT_ENUM, options)); } #endif //load translations } void TranslationServer::set_tool_translation(const Ref<Translation> &p_translation) { tool_translation = p_translation; } StringName TranslationServer::tool_translate(const StringName &p_message) const { if (tool_translation.is_valid()) { StringName r = tool_translation->get_message(p_message); if (r) { return r; } } return p_message; } void TranslationServer::_bind_methods() { ClassDB::bind_method(D_METHOD("set_locale", "locale"), &TranslationServer::set_locale); ClassDB::bind_method(D_METHOD("get_locale"), &TranslationServer::get_locale); ClassDB::bind_method(D_METHOD("get_locale_name", "locale"), &TranslationServer::get_locale_name); ClassDB::bind_method(D_METHOD("translate", "message"), &TranslationServer::translate); ClassDB::bind_method(D_METHOD("add_translation", "translation"), &TranslationServer::add_translation); ClassDB::bind_method(D_METHOD("remove_translation", "translation"), &TranslationServer::remove_translation); ClassDB::bind_method(D_METHOD("clear"), &TranslationServer::clear); } void TranslationServer::load_translations() { String locale = get_locale(); bool found = _load_translations("locale/translations"); //all if (_load_translations("locale/translations_" + locale.substr(0, 2))) found = true; if (locale.substr(0, 2) != locale) { if (_load_translations("locale/translations_" + locale)) found = true; } } TranslationServer::TranslationServer() : locale("en"), enabled(true) { singleton = this; for (int i = 0; locale_list[i]; ++i) { locale_name_map.insert(locale_list[i], locale_names[i]); } }
{ "content_hash": "965b972744e298ec68c74a251b5fd77d", "timestamp": "", "source": "github", "line_count": 1173, "max_line_length": 159, "avg_line_length": 26.41687979539642, "alnum_prop": 0.6065124084293413, "repo_name": "ageazrael/godot", "id": "058db956e51be0b6da1ee88c3414f766484dbe9b", "size": "30995", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "core/translation.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "50004" }, { "name": "C#", "bytes": "175588" }, { "name": "C++", "bytes": "17913784" }, { "name": "GLSL", "bytes": "1271" }, { "name": "Java", "bytes": "499031" }, { "name": "JavaScript", "bytes": "9580" }, { "name": "Makefile", "bytes": "451" }, { "name": "Objective-C", "bytes": "2644" }, { "name": "Objective-C++", "bytes": "169356" }, { "name": "Python", "bytes": "311752" }, { "name": "Shell", "bytes": "11043" } ], "symlink_target": "" }
require 'spec_helper' describe Relationship do let(:follower) { FactoryGirl.create(:user) } let(:followed) { FactoryGirl.create(:user) } let(:relationship) { follower.relationships.build(followed_id: followed.id) } subject { relationship } it { should be_valid } describe "follower methods" do it { should respond_to(:follower) } it { should respond_to(:followed) } its(:follower) { should eq follower } its(:followed) { should eq followed } end describe "when followed id is not present" do before { relationship.followed_id = nil } it { should_not be_valid } end describe "when follower id is not present" do before { relationship.follower_id = nil } it { should_not be_valid } end end
{ "content_hash": "b0783acca6c2faec34b862c16dc0a9c5", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 79, "avg_line_length": 25.93103448275862, "alnum_prop": 0.6768617021276596, "repo_name": "yoshuawuyts/rails-uva", "id": "6ddf20606a540ce98e6b8c06dfe3690b075b449a", "size": "752", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "spec/models/relationship_spec.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3807" }, { "name": "CoffeeScript", "bytes": "633" }, { "name": "JavaScript", "bytes": "105" }, { "name": "Ruby", "bytes": "43345" } ], "symlink_target": "" }
<div id="__html_block_list_id__" class="adm-bp-blocks"> <bx_include_auto:bp_blocks.html /> <script language="javascript"> $(document).ready(function () { $('.adm-bp-block .adm-bpb-icon, .adm-bp-block .adm-bpb-info').bind('click', function() { var oCheckbox = $(this).parents('.adm-bp-block').find("input[type = 'checkbox']"); oCheckbox.attr('checked', !oCheckbox.attr('checked')).trigger('change'); }); }); </script> </div>
{ "content_hash": "6595bcdd6e69ac24318caf5ba59ee7cd", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 100, "avg_line_length": 47, "alnum_prop": 0.539651837524178, "repo_name": "boonex/trident", "id": "99953021286ac4bd4bdb73b6d6f24aa1347c40d0", "size": "517", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "upgrade/files/11.0.4-12.0.0.B1/files/studio/template/bp_blocks_list.html", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "1763" }, { "name": "CSS", "bytes": "1481231" }, { "name": "HTML", "bytes": "690596" }, { "name": "JavaScript", "bytes": "4916309" }, { "name": "PHP", "bytes": "28451148" }, { "name": "Shell", "bytes": "1265" } ], "symlink_target": "" }
const accountSid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'; const authToken = 'your_auth_token'; const client = require('twilio')(accountSid, authToken); client.sip .ipAccessControlLists('AL32a3c49700934481addd5ce1659f04d2') .ipAddresses('IP32a3c49700934481addd5ce1659f04d2') .get((err, ipAddress) => { console.log(ipAddress.ipAddress); });
{ "content_hash": "60f969c994b852f5a303d9d49715a137", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 61, "avg_line_length": 35.1, "alnum_prop": 0.7692307692307693, "repo_name": "teoreteetik/api-snippets", "id": "7ddd351907cdaacb503f18c9b82f97e79c0e0c8a", "size": "498", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "rest/sip-in/get-address-instance/get-address-instance.2.x.js", "mode": "33188", "license": "mit", "language": [ { "name": "C#", "bytes": "643369" }, { "name": "HTML", "bytes": "335" }, { "name": "Java", "bytes": "943336" }, { "name": "JavaScript", "bytes": "539577" }, { "name": "M", "bytes": "117" }, { "name": "Mathematica", "bytes": "93" }, { "name": "Objective-C", "bytes": "46198" }, { "name": "PHP", "bytes": "538312" }, { "name": "Python", "bytes": "467248" }, { "name": "Ruby", "bytes": "470316" }, { "name": "Shell", "bytes": "1564" }, { "name": "Swift", "bytes": "36563" } ], "symlink_target": "" }
package net.minepass.api.gameserver.embed.solidtx.embed.json.parser; /** * ParseException explains why and where the error occurs in source JSON text. * * @author FangYidong<fangyidong@yahoo.com.cn> * */ public class ParseException extends Exception { private static final long serialVersionUID = -7880698968187728547L; public static final int ERROR_UNEXPECTED_CHAR = 0; public static final int ERROR_UNEXPECTED_TOKEN = 1; public static final int ERROR_UNEXPECTED_EXCEPTION = 2; private int errorType; private Object unexpectedObject; private int position; public ParseException(int errorType){ this(-1, errorType, null); } public ParseException(int errorType, Object unexpectedObject){ this(-1, errorType, unexpectedObject); } public ParseException(int position, int errorType, Object unexpectedObject){ this.position = position; this.errorType = errorType; this.unexpectedObject = unexpectedObject; } public int getErrorType() { return errorType; } public void setErrorType(int errorType) { this.errorType = errorType; } /** * @see net.minepass.api.gameserver.embed.solidtx.embed.json.parser.JSONParser#getPosition() * * @return The character position (starting with 0) of the input where the error occurs. */ public int getPosition() { return position; } public void setPosition(int position) { this.position = position; } /** * @see net.minepass.api.gameserver.embed.solidtx.embed.json.parser.Yytoken * * @return One of the following base on the value of errorType: * ERROR_UNEXPECTED_CHAR java.lang.Character * ERROR_UNEXPECTED_TOKEN net.minepass.api.gameserver.embed.solidtx.embed.json.parser.Yytoken * ERROR_UNEXPECTED_EXCEPTION java.lang.Exception */ public Object getUnexpectedObject() { return unexpectedObject; } public void setUnexpectedObject(Object unexpectedObject) { this.unexpectedObject = unexpectedObject; } public String getMessage() { StringBuffer sb = new StringBuffer(); switch(errorType){ case ERROR_UNEXPECTED_CHAR: sb.append("Unexpected character (").append(unexpectedObject).append(") at position ").append(position).append("."); break; case ERROR_UNEXPECTED_TOKEN: sb.append("Unexpected token ").append(unexpectedObject).append(" at position ").append(position).append("."); break; case ERROR_UNEXPECTED_EXCEPTION: sb.append("Unexpected exception at position ").append(position).append(": ").append(unexpectedObject); break; default: sb.append("Unkown error at position ").append(position).append("."); break; } return sb.toString(); } }
{ "content_hash": "b4e9e331c558d88c531047d072854554", "timestamp": "", "source": "github", "line_count": 90, "max_line_length": 118, "avg_line_length": 29.2, "alnum_prop": 0.7336377473363774, "repo_name": "minepass/gameserver-core", "id": "111d8b1796d265a962f821e589f49c133821fe6f", "size": "2628", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/embed/java/net/minepass/api/gameserver/embed/solidtx/embed/json/parser/ParseException.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "209873" } ], "symlink_target": "" }
title: createWebsite.ps1 - AddDefaultDocument linkText: AddDefaultDocument description: Details about the AddDefaultDocument function in createWebsite.ps1 helper script --- # AddDefaultDocument ```PowerShell {% raw %} AddDefaultDocument [-websiteName] <String> [-defaultDocumentName] <String> {% endraw %} ``` ## Description Adds a default document to existing website. ## Example ```PowerShell {% raw %} AddDefaultDocument -websiteName "MySite" -defaultDocumentName "welcome.htm" {% endraw %} ```
{ "content_hash": "088324286841a01aff0f1df3d253b9e9", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 93, "avg_line_length": 19.692307692307693, "alnum_prop": 0.74609375, "repo_name": "15below/Ensconce", "id": "56ae9f2e0944178fb00561d361fdf5ab79e8c9fd", "size": "516", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "docs/powershell/create-website/add-default-document.md", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1618" }, { "name": "C#", "bytes": "1172496" }, { "name": "F#", "bytes": "255690" }, { "name": "HTML", "bytes": "2858" }, { "name": "PowerShell", "bytes": "86350" } ], "symlink_target": "" }
import sys import jinja2 import json def jinja2_render(config, input, output): with open(input, 'r') as f: template = jinja2.Template(f.read(), undefined=jinja2.StrictUndefined, trim_blocks=True, lstrip_blocks=True) with open(output, 'w') as f: f.write(template.render(**config)) def usage(): print(f'usage: {sys.argv[0]} <json-literal-config> <input-file> <output-file>', file=sys.stderr) sys.exit(1) def main(): if len(sys.argv) != 4: usage() jinja2_render(json.loads(sys.argv[1]), sys.argv[2], sys.argv[3]) if __name__ == "__main__": main()
{ "content_hash": "655ae71bfc89e2a265ce84195313340e", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 116, "avg_line_length": 28.571428571428573, "alnum_prop": 0.6316666666666667, "repo_name": "danking/hail", "id": "7562fc33b02e3796275ca5ad8b88cbada044d68e", "size": "600", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "ci/jinja2_render.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "7729" }, { "name": "C", "bytes": "289" }, { "name": "C++", "bytes": "171899" }, { "name": "CSS", "bytes": "29124" }, { "name": "Dockerfile", "bytes": "13073" }, { "name": "Emacs Lisp", "bytes": "252" }, { "name": "HTML", "bytes": "151709" }, { "name": "Java", "bytes": "32302" }, { "name": "JavaScript", "bytes": "3309" }, { "name": "Jupyter Notebook", "bytes": "162395" }, { "name": "Makefile", "bytes": "73914" }, { "name": "Python", "bytes": "4149266" }, { "name": "R", "bytes": "3038" }, { "name": "SCSS", "bytes": "9075" }, { "name": "Scala", "bytes": "4426573" }, { "name": "Shell", "bytes": "49103" }, { "name": "TeX", "bytes": "7125" }, { "name": "XSLT", "bytes": "5748" } ], "symlink_target": "" }
FROM dockerfile/nodejs MAINTAINER Matthias Luebken, matthias@catalyst-zero.com # Mongo install and run # add our user and group first to make sure their IDs get assigned consistently, regardless of whatever dependencies get added RUN groupadd -r mongodb && useradd -r -g mongodb mongodb RUN apt-get update \ && apt-get install -y --no-install-recommends \ ca-certificates curl \ numactl \ && rm -rf /var/lib/apt/lists/* # grab gosu for easy step-down from root RUN gpg --keyserver pool.sks-keyservers.net --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4 RUN curl -o /usr/local/bin/gosu -SL "https://github.com/tianon/gosu/releases/download/1.2/gosu-$(dpkg --print-architecture)" \ && curl -o /usr/local/bin/gosu.asc -SL "https://github.com/tianon/gosu/releases/download/1.2/gosu-$(dpkg --print-architecture).asc" \ && gpg --verify /usr/local/bin/gosu.asc \ && rm /usr/local/bin/gosu.asc \ && chmod +x /usr/local/bin/gosu # gpg: key 7F0CEB10: public key "Richard Kreuter <richard@10gen.com>" imported RUN apt-key adv --keyserver pool.sks-keyservers.net --recv-keys 492EAFE8CD016A07919F1D2B9ECBEC467F0CEB10 ENV MONGO_MAJOR 3.0 ENV MONGO_VERSION 3.0.0 RUN echo "deb http://repo.mongodb.org/apt/debian wheezy/mongodb-org/$MONGO_MAJOR main" > /etc/apt/sources.list.d/mongodb-org.list RUN set -x \ && apt-get update \ && apt-get install -y mongodb-org=$MONGO_VERSION \ && rm -rf /var/lib/apt/lists/* \ && rm -rf /var/lib/mongodb \ && mv /etc/mongod.conf /etc/mongod.conf.orig RUN mkdir -p /data/db && chown -R mongodb:mongodb /data/db VOLUME /data/db COPY docker-entrypoint.sh /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] EXPOSE 27017 CMD ["mongod"] # End WORKDIR /home/mean # Install Mean.JS Prerequisites RUN npm install -g grunt-cli RUN npm install -g bower # Install Mean.JS packages ADD package.json /home/mean/package.json RUN npm install # Manually trigger bower. Why doesnt this work via npm install? ADD .bowerrc /home/mean/.bowerrc ADD bower.json /home/mean/bower.json RUN bower install --config.interactive=false --allow-root # Make everything available for start ADD . /home/mean # currently only works for development ENV NODE_ENV development # Port 3000 for server # Port 35729 for livereload EXPOSE 3000 35729 CMD ["grunt"]
{ "content_hash": "789d07ef253955926c5955b2ea6e2d8b", "timestamp": "", "source": "github", "line_count": 74, "max_line_length": 134, "avg_line_length": 30.7027027027027, "alnum_prop": 0.7407570422535211, "repo_name": "rdesmedt/pictorious", "id": "fb2418d5f805f2628008fd6caac9af81d3f6c7e5", "size": "2272", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Dockerfile", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "503" }, { "name": "HTML", "bytes": "25582" }, { "name": "JavaScript", "bytes": "166061" }, { "name": "Perl", "bytes": "48" }, { "name": "Shell", "bytes": "414" } ], "symlink_target": "" }
import React from 'react' import findIndex from 'lodash.findindex' import { Link } from 'react-router-dom' import { Route, Index, Link as ILink } from '../../../../types' const current = (route: Route, index: Index) => { const module = findIndex(index, { id: route.module }) const article = findIndex(index[module].chapters, { id: route.article }) return { module, article } } const prev = (route: Route, index: Index) => { let prevModule: ILink, prevArticle: ILink const { article, module } = current(route, index) if (article === 0 && module === 0) return prevArticle = index[module].chapters[article - 1] prevModule = index[module] if (!prevArticle) { prevModule = index[module - 1] prevArticle = prevModule.chapters[prevModule.chapters.length - 1] } return <Link to={`/docs/${prevModule.id}/${prevArticle.id}`} className='prev'> <div className='module'>{prevModule.title}</div> <div className='article'>{prevArticle.title}</div> </Link> } const next = (route: Route, index: Index) => { let nextModule: ILink, nextArticle: ILink const { article, module } = current(route, index) nextArticle = index[module].chapters[article + 1] nextModule = index[module] if (!nextArticle && !index[module + 1]) return if (!nextArticle) { nextArticle = index[module + 1].chapters[0] nextModule = index[module + 1] } return <Link to={`/docs/${nextModule.id}/${nextArticle.id}`} className='next'> <div className='module'>{nextModule.title}</div> <div className='article'>{nextArticle.title}</div> </Link> } export default ({ route, index }: { route: Route, index: Index }) => <div className='pagination'> {prev(route, index)} {next(route, index)} </div>
{ "content_hash": "649685c5cacd6401f01a9241ff081019", "timestamp": "", "source": "github", "line_count": 55, "max_line_length": 80, "avg_line_length": 31.563636363636363, "alnum_prop": 0.6595622119815668, "repo_name": "dempfi/xene-docs", "id": "02847c39372c074cb802b750c2a7927e2a13acb3", "size": "1736", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/client/pages/docs/article/pagination.tsx", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "17913" }, { "name": "HTML", "bytes": "567" }, { "name": "JavaScript", "bytes": "778" }, { "name": "Shell", "bytes": "270" }, { "name": "TypeScript", "bytes": "26337" } ], "symlink_target": "" }
@extends('layouts.app') @section('content') <div class="row"> <div class="col-md-8 col-md-offset-2"> <div class="panel panel-default"> <div class="panel-heading">Termin hinzufügen</div> <div class="panel-body"> <form method="POST" action="{{ url('/events', $event->id) }}" class="form-horizontal"> <div class="form-group"> <label class="control-label col-sm-3" for="title">Titel:</label> <div class="col-sm-6"> <input type="text" name="title" id="title" class="form-control" value="{{ $event->title }}"> </div> </div> <div class="form-group"> <label class="control-label col-sm-3" for="from_location">Start</label> <div class="col-sm-5"> <input type="text" class="form-control" id="from_location" name="from_location" value="{{ $event->from_location }}"> </div> </div> <div class="form-group"> <label class="control-label col-sm-3" class="control-label" for="from_date">Startzeitpunkt</label> <div class="col-sm-4"> <input type="datetime-local" name="from_date" id="from_date" class="form-control" value="{{ $event->from_date->format('Y-m-d\TH:i:s') }}"> </div> </div> <div class="form-group"> <label class="control-label col-sm-3" class="control-label" for="to_location">Ende</label> <div class="col-sm-5"> <input type="text" name="to_location" id="to_location" class="form-control" value="{{ $event->to_location }}"> </div> </div> <div class="form-group"> <label class="control-label col-sm-3" class="control-label" for="to_date">Endezeitpunkt</label> <div class="col-sm-4"> <input type="datetime-local" name="to_date" id="to_date" class="form-control" value="{{ $event->to_date->format('Y-m-d\TH:i:s') }}"> </div> </div> <div class="form-group"> <label class="control-label col-sm-3" for="speed">Geschwindigkeit (km/h)</label> <div class="col-sm-2"> <input type="number" name="speed" id="speed" size="3" class="form-control" step="0.1" value="{{ $event->speed }}"> </div> </div> <div class="form-group"> <label class="control-label col-sm-3" for="distance">Strecke (km)</label> <div class="col-sm-2"> <input type="number" name="distance" id="distance" size="3" class="form-control" step="0.1" value="{{ $event->distance }}"> </div> </div> <div class="form-group"> <label class="control-label col-sm-3" for="arrival">Anfahrt</label> <div class="col-sm-5"> <input type="text" name="arrival" id="arrival" class="form-control" value="{{ $event->arrival }}"> </div> </div> <div class="form-group"> <label class="control-label col-sm-3" for="expense">Unkosten (EUR)</label> <div class="col-sm-2"> <input type="number" name="expense" id="expense" size="3" class="form-control" step="0.01" value="{{ $event->expense }}"> </div> </div> <div class="form-group"> <label class="control-label col-sm-3" for="misc">Sonstiges:</label> <div class="col-sm-9"> <textarea name="misc" id="misc" class="form-control">{{ $event->misc }}</textarea> </div> </div> <div class="form-group"> <label class="control-label col-sm-3" for="contacts"> Wanderleiter: </label> <div class="col-sm-9"> <select class="form-control" id="contacts" name="contacts[]" multiple> @foreach ($contacts as $contact) <option value="{{ $contact->id }}" @if ($event->contacts->contains('id', $contact->id)) selected @endif > {{ $contact->first_name }} {{ $contact->last_name }} {{ $contact->club }} </option> @endforeach </select> </div> </div> <div class="form-group"> <div class="col-sm-offset-3 col-sm-3"><button type="submit" class="btn btn-primary">Speichern</button></div> </div> {{ method_field('PUT') }} {{ csrf_field() }} </form> </div> </div> </div> </div> @endsection
{ "content_hash": "11c5a78d9c76172db54e6b9cc2e75ccc", "timestamp": "", "source": "github", "line_count": 111, "max_line_length": 152, "avg_line_length": 41.67567567567568, "alnum_prop": 0.5073497622135754, "repo_name": "jensdoecke/bwf", "id": "d0fe3904da843eb69659f6983bc3582f02768419", "size": "4627", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "resources/views/events/edit.blade.php", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "59558" }, { "name": "JavaScript", "bytes": "1211" }, { "name": "PHP", "bytes": "102837" }, { "name": "Vue", "bytes": "563" } ], "symlink_target": "" }
package com.indoqa.cycle.plugin; public class Connection<T> { private final T start; private final T end; public Connection(T start, T end) { super(); this.start = start; this.end = end; } public T getEnd() { return this.end; } public T getStart() { return this.start; } }
{ "content_hash": "49494007539c4324beb8b3e7c5d2b2b6", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 39, "avg_line_length": 15.954545454545455, "alnum_prop": 0.5527065527065527, "repo_name": "Indoqa/cycle-maven-plugin", "id": "51839b80a716cd2f9b2bcdb5bcd0929cb7f80b30", "size": "1168", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/indoqa/cycle/plugin/Connection.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "24277" } ], "symlink_target": "" }
/* This excellent error creator concept was borrowed from Mark Cavage https://github.com/mcavage/node-ldapjs/blob/master/lib/errors/index.js */ var util = require('util'); var CODES = { DNS_NO_ERROR: 0, DNS_PROTOCOL_ERROR: 1, DNS_CANNOT_PROCESS: 2, DNS_NO_NAME: 3, DNS_NOT_IMPLEMENTED: 4, DNS_REFUSED: 5, DNS_EXCEPTION: 6 } var ERRORS = []; function DnsError(name, code, msg, caller) { if (Error.captureStackTrace) Error.captureStackTrace(this, caller || DnsError); this.code = code; this.name = name; this.message = function() { return msg || name; } } util.inherits(DnsError, Error); module.exports = {}; module.exports.DnsError = DnsError; Object.keys(CODES).forEach(function (code) { module.exports[code] = CODES[code]; if (CODES[code] === 0) return; var err = '', msg = ''; var pieces = code.split('_').slice(1); for (var i in pieces) { var lc = pieces[i].toLowerCase(); var key = lc.charAt(0).toUpperCase() + lc.slice(1); err += key; msg += key + (( i + 1 ) < pieces.length ? ' ' : ''); } if (!/\w+Error$/.test(err)) err += 'Error'; module.exports[err] = function(message, caller) { DnsError.call(this, err, CODES[code], message || msg, caller || module.exports[err]); }; module.exports[err].constructor = module.exports[err]; util.inherits(module.exports[err], DnsError); ERRORS[CODES[code]] = { err: err, message: msg } });
{ "content_hash": "2b1a7dc9cef8c74ba72408f102e4cfcd", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 79, "avg_line_length": 23.426666666666666, "alnum_prop": 0.5150825270347182, "repo_name": "dnsanalytics/scripts", "id": "dd2b4c4b53d4dc31c625db7585c8bd56ca8f1c75", "size": "1757", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "auth-server/node-named/lib/errors.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "99405" }, { "name": "Makefile", "bytes": "226" }, { "name": "Python", "bytes": "28066" }, { "name": "Ruby", "bytes": "2139" }, { "name": "Shell", "bytes": "15493" } ], "symlink_target": "" }
import { createIterator } from '../helpers/helpers'; import { STRICT_THIS } from '../helpers/constants'; import Iterator from 'core-js-pure/full/iterator'; QUnit.test('Iterator#some', assert => { const { some } = Iterator.prototype; assert.isFunction(some); assert.arity(some, 1); assert.nonEnumerable(Iterator.prototype, 'some'); assert.true(some.call(createIterator([1, 2, 3]), it => it % 2), 'basic functionality #1'); assert.false(some.call(createIterator([1, 2, 3]), it => typeof it == 'string'), 'basic functionality #2'); some.call(createIterator([1]), function (arg, counter) { assert.same(this, STRICT_THIS, 'this'); assert.same(arguments.length, 2, 'arguments length'); assert.same(arg, 1, 'argument'); assert.same(counter, 0, 'counter'); }); assert.throws(() => some.call(undefined, () => { /* empty */ }), TypeError); assert.throws(() => some.call(null, () => { /* empty */ }), TypeError); assert.throws(() => some.call({}, () => { /* empty */ }), TypeError); assert.throws(() => some.call([], () => { /* empty */ }), TypeError); assert.throws(() => some.call(createIterator([1]), undefined), TypeError); assert.throws(() => some.call(createIterator([1]), null), TypeError); assert.throws(() => some.call(createIterator([1]), {}), TypeError); });
{ "content_hash": "6e24945021924069e8c6acf5563b56eb", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 108, "avg_line_length": 45.172413793103445, "alnum_prop": 0.6290076335877862, "repo_name": "zloirock/core-js", "id": "724c7c5d014202b62c69e2992738e25c1d9d3780", "size": "1310", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/unit-pure/esnext.iterator.some.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "1672" }, { "name": "JavaScript", "bytes": "3005012" } ], "symlink_target": "" }
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TupleSections #-} -- | Construct a @Plan@ for how to build module Stack.Build.ConstructPlan ( constructPlan ) where import Control.Exception.Lifted import Control.Monad import Control.Monad.Catch (MonadCatch) import Control.Monad.IO.Class import Control.Monad.Logger (MonadLogger) import Control.Monad.RWS.Strict import Control.Monad.Trans.Resource import qualified Data.ByteString.Char8 as S8 import Data.Either import Data.Function import Data.List import Data.Map.Strict (Map) import qualified Data.Map.Strict as M import qualified Data.Map.Strict as Map import Data.Maybe import Data.Set (Set) import qualified Data.Set as Set import Data.Text (Text) import qualified Data.Text as T import Data.Text.Encoding (encodeUtf8, decodeUtf8With) import Data.Text.Encoding.Error (lenientDecode) import Distribution.Package (Dependency (..)) import Distribution.Version (anyVersion) import Network.HTTP.Client.Conduit (HasHttpManager) import Prelude hiding (FilePath, pi, writeFile) import Stack.Build.Cache import Stack.Build.Haddock import Stack.Build.Installed import Stack.Build.Source import Stack.Types.Build import Stack.BuildPlan import Stack.Package import Stack.PackageIndex import Stack.Types data PackageInfo = PIOnlyInstalled Version InstallLocation Installed | PIOnlySource PackageSource | PIBoth PackageSource Installed combineSourceInstalled :: PackageSource -> (Version, InstallLocation, Installed) -> PackageInfo combineSourceInstalled ps (version, location, installed) = assert (piiVersion ps == version) $ assert (piiLocation ps == location) $ case location of -- Always trust something in the snapshot Snap -> PIOnlyInstalled version location installed Local -> PIBoth ps installed type CombinedMap = Map PackageName PackageInfo combineMap :: SourceMap -> InstalledMap -> CombinedMap combineMap = Map.mergeWithKey (\_ s i -> Just $ combineSourceInstalled s i) (fmap PIOnlySource) (fmap (\(v, l, i) -> PIOnlyInstalled v l i)) data AddDepRes = ADRToInstall Task | ADRFound InstallLocation Version Installed deriving Show type M = RWST Ctx ( Map PackageName (Either ConstructPlanException Task) -- finals , Map Text InstallLocation -- executable to be installed, and location where the binary is placed , Map PackageName Text -- why a local package is considered dirty ) (Map PackageName (Either ConstructPlanException AddDepRes)) IO data Ctx = Ctx { mbp :: !MiniBuildPlan , baseConfigOpts :: !BaseConfigOpts , loadPackage :: !(PackageName -> Version -> Map FlagName Bool -> IO Package) , combinedMap :: !CombinedMap , toolToPackages :: !(Dependency -> Map PackageName VersionRange) , ctxEnvConfig :: !EnvConfig , callStack :: ![PackageName] , extraToBuild :: !(Set PackageName) , latestVersions :: !(Map PackageName Version) , wanted :: !(Set PackageName) } instance HasStackRoot Ctx instance HasPlatform Ctx instance HasConfig Ctx instance HasBuildConfig Ctx where getBuildConfig = getBuildConfig . getEnvConfig instance HasEnvConfig Ctx where getEnvConfig = ctxEnvConfig constructPlan :: forall env m. (MonadCatch m, MonadReader env m, HasEnvConfig env, MonadIO m, MonadLogger m, MonadBaseControl IO m, HasHttpManager env) => MiniBuildPlan -> BaseConfigOpts -> [LocalPackage] -> Set PackageName -- ^ additional packages that must be built -> Set GhcPkgId -- ^ locally registered -> (PackageName -> Version -> Map FlagName Bool -> IO Package) -- ^ load upstream package -> SourceMap -> InstalledMap -> m Plan constructPlan mbp0 baseConfigOpts0 locals extraToBuild0 locallyRegistered loadPackage0 sourceMap installedMap = do menv <- getMinimalEnvOverride caches <- getPackageCaches menv let latest = Map.fromListWith max $ map toTuple $ Map.keys caches econfig <- asks getEnvConfig let onWanted = case boptsFinalAction $ bcoBuildOpts baseConfigOpts0 of DoNothing -> void . addDep . packageName . lpPackage _ -> addFinal let inner = do mapM_ onWanted $ filter lpWanted locals mapM_ addDep $ Set.toList extraToBuild0 ((), m, (efinals, installExes, dirtyReason)) <- liftIO $ runRWST inner (ctx econfig latest) M.empty let toEither (_, Left e) = Left e toEither (k, Right v) = Right (k, v) (errlibs, adrs) = partitionEithers $ map toEither $ M.toList m (errfinals, finals) = partitionEithers $ map toEither $ M.toList efinals errs = errlibs ++ errfinals if null errs then do let toTask (_, ADRFound _ _ _) = Nothing toTask (name, ADRToInstall task) = Just (name, task) tasks = M.fromList $ mapMaybe toTask adrs maybeStripLocals | boptsOnlySnapshot $ bcoBuildOpts baseConfigOpts0 = stripLocals | otherwise = id return $ maybeStripLocals Plan { planTasks = tasks , planFinals = M.fromList finals , planUnregisterLocal = mkUnregisterLocal tasks dirtyReason locallyRegistered , planInstallExes = if boptsInstallExes $ bcoBuildOpts baseConfigOpts0 then installExes else Map.empty } else throwM $ ConstructPlanExceptions errs (bcStackYaml $ getBuildConfig econfig) where ctx econfig latest = Ctx { mbp = mbp0 , baseConfigOpts = baseConfigOpts0 , loadPackage = loadPackage0 , combinedMap = combineMap sourceMap installedMap , toolToPackages = \ (Dependency name _) -> maybe Map.empty (Map.fromSet (\_ -> anyVersion)) $ Map.lookup (S8.pack . packageNameString . fromCabalPackageName $ name) toolMap , ctxEnvConfig = econfig , callStack = [] , extraToBuild = extraToBuild0 , latestVersions = latest , wanted = wantedLocalPackages locals } -- TODO Currently, this will only consider and install tools from the -- snapshot. It will not automatically install build tools from extra-deps -- or local packages. toolMap = getToolMap mbp0 -- | Determine which packages to unregister based on the given tasks and -- already registered local packages mkUnregisterLocal :: Map PackageName Task -> Map PackageName Text -> Set GhcPkgId -> Map GhcPkgId Text mkUnregisterLocal tasks dirtyReason locallyRegistered = Map.unions $ map toUnregisterMap $ Set.toList locallyRegistered where toUnregisterMap gid = case M.lookup name tasks of Nothing -> Map.empty Just _ -> Map.singleton gid $ fromMaybe "likely unregistering due to a version change" $ Map.lookup name dirtyReason where ident = ghcPkgIdPackageIdentifier gid name = packageIdentifierName ident addFinal :: LocalPackage -> M () addFinal lp = do depsRes <- addPackageDeps package res <- case depsRes of Left e -> return $ Left e Right (missing, present, _minLoc) -> do ctx <- ask return $ Right Task { taskProvides = PackageIdentifier (packageName package) (packageVersion package) , taskConfigOpts = TaskConfigOpts missing $ \missing' -> let allDeps = Set.union present missing' in configureOpts (getEnvConfig ctx) (baseConfigOpts ctx) allDeps True -- wanted Local package , taskPresent = present , taskType = TTLocal lp } tell (Map.singleton (packageName package) res, mempty, mempty) where package = lpPackageFinal lp addDep :: PackageName -> M (Either ConstructPlanException AddDepRes) addDep name = do m <- get case Map.lookup name m of Just res -> return res Nothing -> do res <- addDep' name modify $ Map.insert name res return res addDep' :: PackageName -> M (Either ConstructPlanException AddDepRes) addDep' name = do ctx <- ask if name `elem` callStack ctx then return $ Left $ DependencyCycleDetected $ name : callStack ctx else local (\ctx' -> ctx' { callStack = name : callStack ctx' }) $ do (addDep'' name) addDep'' :: PackageName -> M (Either ConstructPlanException AddDepRes) addDep'' name = do ctx <- ask case Map.lookup name $ combinedMap ctx of -- TODO look up in the package index and see if there's a -- recommendation available Nothing -> return $ Left $ UnknownPackage name Just (PIOnlyInstalled version loc installed) -> do tellExecutablesUpstream name version loc Map.empty -- slightly hacky, no flags since they likely won't affect executable names return $ Right $ ADRFound loc version installed Just (PIOnlySource ps) -> do tellExecutables name ps installPackage name ps Just (PIBoth ps installed) -> do tellExecutables name ps needInstall <- checkNeedInstall name ps installed (wanted ctx) if needInstall then installPackage name ps else return $ Right $ ADRFound (piiLocation ps) (piiVersion ps) installed tellExecutables :: PackageName -> PackageSource -> M () -- TODO merge this with addFinal above? tellExecutables _ (PSLocal lp) | lpWanted lp = tellExecutablesPackage Local $ lpPackage lp | otherwise = return () tellExecutables name (PSUpstream version loc flags) = do tellExecutablesUpstream name version loc flags tellExecutablesUpstream :: PackageName -> Version -> InstallLocation -> Map FlagName Bool -> M () tellExecutablesUpstream name version loc flags = do ctx <- ask when (name `Set.member` extraToBuild ctx) $ do p <- liftIO $ loadPackage ctx name version flags tellExecutablesPackage loc p tellExecutablesPackage :: InstallLocation -> Package -> M () tellExecutablesPackage loc p = do cm <- asks combinedMap -- Determine which components are enabled so we know which ones to copy let myComps = case Map.lookup (packageName p) cm of Nothing -> assert False Set.empty Just (PIOnlyInstalled _ _ _) -> Set.empty Just (PIOnlySource ps) -> goSource ps Just (PIBoth ps _) -> goSource ps goSource (PSLocal lp) = lpComponents lp goSource (PSUpstream _ _ _) = Set.empty tell (Map.empty, m myComps, Map.empty) where m myComps = Map.fromList $ map (, loc) $ Set.toList $ filterComps myComps $ packageExes p filterComps myComps x | Set.null myComps = x | otherwise = Set.intersection x $ Set.map toExe myComps toExe x = fromMaybe x $ T.stripPrefix "exe:" x -- TODO There are a lot of duplicated computations below. I've kept that for -- simplicity right now installPackage :: PackageName -> PackageSource -> M (Either ConstructPlanException AddDepRes) installPackage name ps = do ctx <- ask package <- psPackage name ps depsRes <- addPackageDeps package case depsRes of Left e -> return $ Left e Right (missing, present, minLoc) -> do return $ Right $ ADRToInstall Task { taskProvides = PackageIdentifier (packageName package) (packageVersion package) , taskConfigOpts = TaskConfigOpts missing $ \missing' -> let allDeps = Set.union present missing' destLoc = piiLocation ps <> minLoc in configureOpts (getEnvConfig ctx) (baseConfigOpts ctx) allDeps (psWanted ps) -- An assertion to check for a recurrence of -- https://github.com/commercialhaskell/stack/issues/345 (assert (destLoc == piiLocation ps) destLoc) package , taskPresent = present , taskType = case ps of PSLocal lp -> TTLocal lp PSUpstream _ loc _ -> TTUpstream package $ loc <> minLoc } checkNeedInstall :: PackageName -> PackageSource -> Installed -> Set PackageName -> M Bool checkNeedInstall name ps installed wanted = assert (piiLocation ps == Local) $ do package <- psPackage name ps depsRes <- addPackageDeps package case depsRes of Left _e -> return True -- installPackage will find the error again Right (missing, present, _loc) | Set.null missing -> checkDirtiness ps installed package present wanted | otherwise -> do tell (Map.empty, Map.empty, Map.singleton name $ let t = T.intercalate ", " $ map (T.pack . packageNameString . packageIdentifierName) (Set.toList missing) in T.append "missing dependencies: " $ if T.length t < 100 then t else T.take 97 t <> "...") return True addPackageDeps :: Package -> M (Either ConstructPlanException (Set PackageIdentifier, Set GhcPkgId, InstallLocation)) addPackageDeps package = do ctx <- ask deps' <- packageDepsWithTools package deps <- forM (Map.toList deps') $ \(depname, range) -> do eres <- addDep depname let mlatest = Map.lookup depname $ latestVersions ctx case eres of Left e -> let bd = case e of UnknownPackage name -> assert (name == depname) NotInBuildPlan _ -> Couldn'tResolveItsDependencies in return $ Left (depname, (range, mlatest, bd)) Right adr | not $ adrVersion adr `withinRange` range -> return $ Left (depname, (range, mlatest, DependencyMismatch $ adrVersion adr)) Right (ADRToInstall task) -> return $ Right (Set.singleton $ taskProvides task, Set.empty, taskLocation task) Right (ADRFound loc _ (Executable _)) -> return $ Right (Set.empty, Set.empty, loc) Right (ADRFound loc _ (Library gid)) -> return $ Right (Set.empty, Set.singleton gid, loc) case partitionEithers deps of ([], pairs) -> return $ Right $ mconcat pairs (errs, _) -> return $ Left $ DependencyPlanFailures (PackageIdentifier (packageName package) (packageVersion package)) (Map.fromList errs) where adrVersion (ADRToInstall task) = packageIdentifierVersion $ taskProvides task adrVersion (ADRFound _ v _) = v checkDirtiness :: PackageSource -> Installed -> Package -> Set GhcPkgId -> Set PackageName -> M Bool checkDirtiness ps installed package present wanted = do ctx <- ask moldOpts <- tryGetFlagCache installed let configOpts = configureOpts (getEnvConfig ctx) (baseConfigOpts ctx) present (psWanted ps) (piiLocation ps) -- should be Local always package buildOpts = bcoBuildOpts (baseConfigOpts ctx) wantConfigCache = ConfigCache { configCacheOpts = map encodeUtf8 configOpts , configCacheDeps = present , configCacheComponents = case ps of PSLocal lp -> Set.map encodeUtf8 $ lpComponents lp PSUpstream _ _ _ -> Set.empty , configCacheHaddock = shouldHaddockPackage buildOpts wanted (packageName package) || -- Disabling haddocks when old config had haddocks doesn't make dirty. maybe False configCacheHaddock moldOpts } let mreason = case moldOpts of Nothing -> Just "old configure information not found" Just oldOpts | oldOpts /= wantConfigCache -> Just $ describeConfigDiff oldOpts wantConfigCache | psDirty ps -> Just "local file changes" | otherwise -> Nothing case mreason of Nothing -> return False Just reason -> do tell (Map.empty, Map.empty, Map.singleton (packageName package) reason) return True describeConfigDiff :: ConfigCache -> ConfigCache -> Text describeConfigDiff old new | configCacheDeps old /= configCacheDeps new = "dependencies changed" | configCacheComponents old /= configCacheComponents new = "components changed" | configCacheHaddock old && not (configCacheHaddock new) = "no longer building haddocks" | not (configCacheHaddock old) && configCacheHaddock new = "building haddocks" | oldOpts /= newOpts = T.pack $ concat [ "flags changed from " , show oldOpts , " to " , show newOpts ] | otherwise = "unknown config cache difference" where -- options set by stack isStackOpt t = any (`T.isPrefixOf` t) [ "--dependency=" , "--constraint=" , "--package-db=" , "--libdir=" , "--bindir=" ] userOpts = filter (not . isStackOpt) . map (decodeUtf8With lenientDecode) . configCacheOpts (oldOpts, newOpts) = removeMatching (userOpts old) (userOpts new) removeMatching (x:xs) (y:ys) | x == y = removeMatching xs ys removeMatching xs ys = (xs, ys) psDirty :: PackageSource -> Bool psDirty (PSLocal lp) = lpDirtyFiles lp psDirty (PSUpstream _ _ _) = False -- files never change in an upstream package psWanted :: PackageSource -> Bool psWanted (PSLocal lp) = lpWanted lp psWanted (PSUpstream _ _ _) = False psPackage :: PackageName -> PackageSource -> M Package psPackage _ (PSLocal lp) = return $ lpPackage lp psPackage name (PSUpstream version _ flags) = do ctx <- ask liftIO $ loadPackage ctx name version flags -- | Get all of the dependencies for a given package, including guessed build -- tool dependencies. packageDepsWithTools :: Package -> M (Map PackageName VersionRange) packageDepsWithTools p = do ctx <- ask return $ Map.unionsWith intersectVersionRanges $ packageDeps p : map (toolToPackages ctx) (packageTools p) -- | Strip out anything from the @Plan@ intended for the local database stripLocals :: Plan -> Plan stripLocals plan = plan { planTasks = Map.filter checkTask $ planTasks plan , planFinals = Map.empty , planUnregisterLocal = Map.empty , planInstallExes = Map.filter (/= Local) $ planInstallExes plan } where checkTask task = case taskType task of TTLocal _ -> False TTUpstream _ Local -> False TTUpstream _ Snap -> True
{ "content_hash": "411a07b619f8f804e6c29e3f0459610f", "timestamp": "", "source": "github", "line_count": 495, "max_line_length": 138, "avg_line_length": 40.6949494949495, "alnum_prop": 0.6050933280381255, "repo_name": "duplode/stack", "id": "6a62b33b70fc49a0910a25eb048fb72839fbe8ea", "size": "20144", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/Stack/Build/ConstructPlan.hs", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "17" }, { "name": "Emacs Lisp", "bytes": "356" }, { "name": "Haskell", "bytes": "793926" }, { "name": "Ruby", "bytes": "5995" }, { "name": "Shell", "bytes": "6180" } ], "symlink_target": "" }
using System; using System.Globalization; using System.Windows; using System.Windows.Data; using System.Windows.Markup; namespace Smellyriver.TankInspector.Pro.GameClientExplorer { class BoolToFontWeightConverter : MarkupExtension, IValueConverter { public object Convert(object value, Type targetType, object parameter, CultureInfo culture) { if ((bool)value) return FontWeights.Bold; else return FontWeights.Normal; } public object ConvertBack(object value, Type targetType, object parameter, CultureInfo culture) { throw new NotImplementedException(); } public override object ProvideValue(IServiceProvider serviceProvider) { return this; } } }
{ "content_hash": "5af7e3b79fe436738dd1ec516175189a", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 103, "avg_line_length": 27.233333333333334, "alnum_prop": 0.653610771113831, "repo_name": "smellyriver/tank-inspector-pro", "id": "0ab3f1b84fee0567c79494543445dcc8df57868c", "size": "819", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Smellyriver.TankInspector.Pro.GameClientExplorer/BoolToFontWeightConverter.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "50675" }, { "name": "C#", "bytes": "2241562" }, { "name": "HLSL", "bytes": "34797" }, { "name": "Python", "bytes": "259" }, { "name": "Smalltalk", "bytes": "515" } ], "symlink_target": "" }
import type Context from '../../BlockContext'; export default { name: 'subtraction', inputs: [ { name: 'minuend', type: 'number' }, { name: 'subtrahend', type: 'number' } ], outputs: [ { name: 'subtraction', type: 'number' } ], executor: (ctx: Context) => { const minuend: number = ctx.getInput('minuend'); const subtrahend: number = ctx.getInput('subtrahend'); ctx.setOutput('division', minuend - subtrahend); } };
{ "content_hash": "60b500b3392cec2fbb714be7847f7458", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 62, "avg_line_length": 22.653846153846153, "alnum_prop": 0.4702886247877759, "repo_name": "neptunejs/larissa", "id": "73be7f604384affdce53cbf28ad0595b4effaf41", "size": "598", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "packages/larissa/src/Blocks/math/subtraction.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "90062" } ], "symlink_target": "" }
import { expect, it } from 'vitest' import factory from '@/__tests__/factory' import UnitTestCase from '@/__tests__/UnitTestCase' import { mediaInfoService } from '@/services/mediaInfoService' import { commonStore, songStore } from '@/stores' import { fireEvent } from '@testing-library/vue' import { playbackService } from '@/services' import AlbumInfoComponent from './AlbumInfo.vue' let album: Album new class extends UnitTestCase { private async renderComponent (mode: MediaInfoDisplayMode = 'aside', info?: AlbumInfo) { commonStore.state.use_last_fm = true if (info === undefined) { info = factory<AlbumInfo>('album-info') } album = factory<Album>('album', { name: 'IV' }) const fetchMock = this.mock(mediaInfoService, 'fetchForAlbum').mockResolvedValue(info) const rendered = this.render(AlbumInfoComponent, { props: { album, mode }, global: { stubs: { TrackList: this.stub() } } }) await this.tick(1) expect(fetchMock).toHaveBeenCalledWith(album) return rendered } protected test () { it.each<[MediaInfoDisplayMode]>([['aside'], ['full']])('renders in %s mode', async (mode) => { const { getByTestId } = await this.renderComponent(mode) getByTestId('album-artist-thumbnail') getByTestId('album-info-tracks') expect(getByTestId('album-info').classList.contains(mode)).toBe(true) }) it('triggers showing full wiki for aside mode', async () => { const { getByTestId, queryByTestId } = await this.renderComponent('aside') expect(queryByTestId('full')).toBeNull() await fireEvent.click(getByTestId('more-btn')) expect(queryByTestId('summary')).toBeNull() expect(queryByTestId('full')).not.toBeNull() }) it('shows full wiki for full mode', async () => { const { queryByTestId } = await this.renderComponent('full') expect(queryByTestId('full')).not.toBeNull() expect(queryByTestId('summary')).toBeNull() expect(queryByTestId('more-btn')).toBeNull() }) it('plays', async () => { const songs = factory<Song>('song', 3) const fetchMock = this.mock(songStore, 'fetchForAlbum').mockResolvedValue(songs) const playMock = this.mock(playbackService, 'queueAndPlay') const { getByTitle } = await this.renderComponent() await fireEvent.click(getByTitle('Play all songs in IV')) await this.tick(2) expect(fetchMock).toHaveBeenCalledWith(album) expect(playMock).toHaveBeenCalledWith(songs) }) } }
{ "content_hash": "c37ca4642abc4e1761ac993fb75adda4", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 98, "avg_line_length": 31.48780487804878, "alnum_prop": 0.6549186676994578, "repo_name": "phanan/koel", "id": "b0156777abf922b90154810ee004d40547985f67", "size": "2582", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "resources/assets/js/components/album/AlbumInfo.spec.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "34018" }, { "name": "Dockerfile", "bytes": "66" }, { "name": "HTML", "bytes": "138693" }, { "name": "JavaScript", "bytes": "223728" }, { "name": "PHP", "bytes": "409269" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> {% include head.html %} <body id="page-top"> {% include nav.html %} {% include header.html %} {% include about.html %} {% include features.html %} {% include exchanges.html %} {% include partners.html %} {% include community.html %} {% include aside.html %} {% include download.html %} {% include scripts.html %} </body> </html>
{ "content_hash": "1f6bf731966fa2daa53ed921e5aeb8f4", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 30, "avg_line_length": 20.210526315789473, "alnum_prop": 0.6119791666666666, "repo_name": "42-coin/42-coin.github.io", "id": "7a2664ca334d0f1b3c4a5d53ba2efbf0b71f3320", "size": "384", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "_layouts/front.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "18742" }, { "name": "JavaScript", "bytes": "12044" }, { "name": "SCSS", "bytes": "9370" } ], "symlink_target": "" }
$:.unshift "#{File.dirname(__FILE__)}/../vendor/tinder/lib" require "rubygems" require "tinder" class Campfire class Bot attr_accessor :campfire, :room, :domain, :token, :name, :ssl, :debug def initialize(params = {}) self.debug = params[:debug] self.ssl = params[:ssl] self.domain = params[:domain] self.token = params[:token] self.campfire = Tinder::Campfire.new(domain, :ssl => ssl, :token => token) self.name = campfire.me["name"] begin self.room = campfire.find_room_by_name(params[:room]) or raise "Could not find a room named '#{params[:room]}'" rescue Tinder::AuthenticationFailed => e raise # maybe do some friendlier error handling later end room.join end def base_uri campfire.connection.uri.to_s end # convenience method so I don't have to change all the old #say method to #speak def say(*args) room.speak(*args) end # pick something at random from an array of sayings def say_random(sayings) say(sayings[rand(sayings.size)]) end # Proxy everything to the room. def method_missing(m, *args) room.send(m, *args) end end end
{ "content_hash": "78de7aa46dbbe3f90f9bf7b3324749ad", "timestamp": "", "source": "github", "line_count": 43, "max_line_length": 119, "avg_line_length": 28.232558139534884, "alnum_prop": 0.6235584843492586, "repo_name": "wesabe/wesabot", "id": "05b40dba320f6ec836a4cc049f3f6e8a779ab54c", "size": "1214", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "campfire/bot.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "58202" }, { "name": "Shell", "bytes": "76" } ], "symlink_target": "" }
SYNONYM #### According to Interim Register of Marine and Nonmarine Genera #### Published in null #### Original name null ### Remarks null
{ "content_hash": "73f3d0be6ee2cdd2a71805cc9d9d5b89", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 47, "avg_line_length": 10.846153846153847, "alnum_prop": 0.7163120567375887, "repo_name": "mdoering/backbone", "id": "d7cd4f983549058aedb01349a7207a2e115fde72", "size": "186", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Icacinales/Icacinaceae/Medusanthera/ Syn. Tylocarpus/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package main import ( "github.com/google/wire" ) func injectFooer() Fooer { // wrong: arg0 must be a pointer to an interface. wire.Build(wire.Bind("foo", "bar")) return nil }
{ "content_hash": "196503fc480ea9b24f650b22d1a8e2bf", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 50, "avg_line_length": 16.454545454545453, "alnum_prop": 0.6850828729281768, "repo_name": "google/wire", "id": "8ce49b6460e0ddfbbfe2671b7e39e9106558b914", "size": "797", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "internal/wire/testdata/InterfaceBindingInvalidArg0/foo/wire.go", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "142842" }, { "name": "Shell", "bytes": "6639" } ], "symlink_target": "" }
from __future__ import with_statement from flask_security.utils import capture_registrations, \ capture_reset_password_requests, capture_signals from flask_security.signals import user_registered, user_confirmed, \ confirm_instructions_sent, login_instructions_sent, \ password_reset, password_changed, reset_password_instructions_sent from tests import SecurityTest def compare_user(a, b): """Helper to compare two users.""" return a.id == b.id and a.email == b.email and a.password == b.password class RegisterableSignalsTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_CONFIRMABLE': True, 'SECURITY_REGISTERABLE': True, } def test_register(self): e = 'dude@lp.com' with capture_signals() as mocks: self.register(e) user = self.app.security.datastore.find_user(email='dude@lp.com') self.assertEqual(mocks.signals_sent(), set([user_registered])) calls = mocks[user_registered] self.assertEqual(len(calls), 1) args, kwargs = calls[0] self.assertTrue(compare_user(kwargs['user'], user)) self.assertIn('confirm_token', kwargs) self.assertEqual(args[0], self.app) def test_register_without_password(self): e = 'dude@lp.com' with capture_signals() as mocks: self.register(e, password='') self.assertEqual(mocks.signals_sent(), set()) class ConfirmableSignalsTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_CONFIRMABLE': True, 'SECURITY_REGISTERABLE': True, } def test_confirm(self): e = 'dude@lp.com' with capture_registrations() as registrations: self.register(e) token = registrations[0]['confirm_token'] with capture_signals() as mocks: self.client.get('/confirm/' + token, follow_redirects=True) user = self.app.security.datastore.find_user(email='dude@lp.com') self.assertTrue(mocks.signals_sent(), set([user_confirmed])) calls = mocks[user_confirmed] self.assertEqual(len(calls), 1) args, kwargs = calls[0] self.assertEqual(args[0], self.app) self.assertTrue(compare_user(kwargs['user'], user)) def test_confirm_bad_token(self): e = 'dude@lp.com' with capture_registrations(): self.register(e) with capture_signals() as mocks: self.client.get('/confirm/bogus', follow_redirects=True) self.assertEqual(mocks.signals_sent(), set()) def test_confirm_twice(self): e = 'dude@lp.com' with capture_registrations() as registrations: self.register(e) token = registrations[0]['confirm_token'] self.client.get('/confirm/' + token, follow_redirects=True) self.logout() with capture_signals() as mocks: self.client.get('/confirm/' + token, follow_redirects=True) self.assertEqual(mocks.signals_sent(), set([user_confirmed])) # TODO: is that the desired behaviour? def test_resend_confirmation(self): e = 'dude@lp.com' self.register(e) with capture_signals() as mocks: self._post('/confirm', data={'email': e}) user = self.app.security.datastore.find_user(email='dude@lp.com') self.assertEqual(mocks.signals_sent(), set([confirm_instructions_sent])) calls = mocks[confirm_instructions_sent] self.assertEqual(len(calls), 1) args, kwargs = calls[0] self.assertTrue(compare_user(kwargs['user'], user)) self.assertEqual(args[0], self.app) def test_send_confirmation_bad_email(self): with capture_signals() as mocks: self._post('/confirm', data=dict(email='bogus@bogus.com')) self.assertEqual(mocks.signals_sent(), set()) class RecoverableSignalsTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_RECOVERABLE': True, 'SECURITY_RESET_PASSWORD_ERROR_VIEW': '/', 'SECURITY_POST_FORGOT_VIEW': '/' } def test_reset_password_request(self): with capture_signals() as mocks: self._post('/reset', data=dict(email='joe@lp.com'), follow_redirects=True) self.assertEqual(mocks.signals_sent(), set([reset_password_instructions_sent])) user = self.app.security.datastore.find_user(email='joe@lp.com') calls = mocks[reset_password_instructions_sent] self.assertEqual(len(calls), 1) args, kwargs = calls[0] self.assertTrue(compare_user(kwargs['user'], user)) self.assertIn('token', kwargs) self.assertEqual(args[0], self.app) def test_reset_password(self): with capture_reset_password_requests() as requests: self._post('/reset', data=dict(email='joe@lp.com'), follow_redirects=True) token = requests[0]['token'] with capture_signals() as mocks: data = dict(password='newpassword', password_confirm='newpassword') self._post('/reset/' + token, data, follow_redirects=True) self.assertEqual(mocks.signals_sent(), set([password_reset])) user = self.app.security.datastore.find_user(email='joe@lp.com') calls = mocks[password_reset] self.assertEqual(len(calls), 1) args, kwargs = calls[0] self.assertTrue(compare_user(kwargs['user'], user)) self.assertEqual(args[0], self.app) def test_reset_password_invalid_emails(self): with capture_signals() as mocks: self._post('/reset', data=dict(email='nobody@lp.com'), follow_redirects=True) self.assertEqual(mocks.signals_sent(), set()) def test_reset_password_invalid_token(self): with capture_signals() as mocks: data = dict(password='newpassword', password_confirm='newpassword') self._post('/reset/bogus', data, follow_redirects=True) self.assertEqual(mocks.signals_sent(), set()) class ChangeableSignalsTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_CHANGEABLE': True, } def test_change_password(self): self.authenticate('joe@lp.com') with capture_signals() as mocks: with self.client as client: client.post('/change', data=dict(password='password', new_password='newpassword', new_password_confirm='newpassword', csrf_token=self.csrf_token)) self.assertEqual(mocks.signals_sent(), set([password_changed])) user = self.app.security.datastore.find_user(email='joe@lp.com') calls = mocks[password_changed] self.assertEqual(len(calls), 1) args, kwargs = calls[0] self.assertTrue(compare_user(args[0], user)) self.assertEqual(kwargs['app'], self.app) def test_change_password_invalid_password(self): with capture_signals() as mocks: self.client.post('/change', data=dict(password='notpassword', new_password='newpassword', new_password_confirm='newpassword'), follow_redirects=True) self.assertEqual(mocks.signals_sent(), set()) def test_change_password_bad_password(self): with capture_signals() as mocks: self.client.post('/change', data=dict(password='notpassword', new_password='a', new_password_confirm='a'), follow_redirects=True) self.assertEqual(mocks.signals_sent(), set()) def test_change_password_mismatch_password(self): with capture_signals() as mocks: self.client.post('/change', data=dict(password='password', new_password='newpassword', new_password_confirm='notnewpassword'), follow_redirects=True) self.assertEqual(mocks.signals_sent(), set()) class PasswordlessTests(SecurityTest): AUTH_CONFIG = { 'SECURITY_PASSWORDLESS': True } def test_login_request_for_inactive_user(self): with capture_signals() as mocks: self._post('/login', data=dict(email='tiya@lp.com'), follow_redirects=True) self.assertEqual(mocks.signals_sent(), set()) def test_login_request_for_invalid_email(self): with capture_signals() as mocks: self._post('/login', data=dict(email='nobody@lp.com'), follow_redirects=True) self.assertEqual(mocks.signals_sent(), set()) def test_request_login_token_sends_email_and_can_login(self): e = 'matt@lp.com' with capture_signals() as mocks: self._post('/login', data=dict(email=e), follow_redirects=True) self.assertEqual(mocks.signals_sent(), set([login_instructions_sent])) user = self.app.security.datastore.find_user(email='matt@lp.com') calls = mocks[login_instructions_sent] self.assertEqual(len(calls), 1) args, kwargs = calls[0] self.assertTrue(compare_user(kwargs['user'], user)) self.assertIn('login_token', kwargs) self.assertEqual(args[0], self.app)
{ "content_hash": "9f4c3167aab2d654e2e177025294af98", "timestamp": "", "source": "github", "line_count": 236, "max_line_length": 87, "avg_line_length": 40.559322033898304, "alnum_prop": 0.5908900961136648, "repo_name": "maxcountryman/flask-security", "id": "386f05dd5f7366e70e4e88c4c52efb46992efb76", "size": "9597", "binary": false, "copies": "2", "ref": "refs/heads/develop", "path": "tests/signals_tests.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "152941" } ], "symlink_target": "" }
module Linkage # A {ResultSet} is a convenience class for wrapping a {ScoreSet} and a # {MatchSet}. Most of the time, you'll want to use the same storage format for # both scores and matches. {ResultSet} provides a way to group both sets # together. # # The default implementation of {ResultSet} merely returns whatever {ScoreSet} # and {MatchSet} you pass to it during creation (see {#initialize}). However, # {ResultSet} can be subclassed to provide easy initialization of sets of the # same format. Currently there are two subclasses: # # * CSV ({ResultSets::CSV}) # * Database ({ResultSets::Database}) # # If you want to implement a custom {ResultSet}, create a class that inherits # {ResultSet} and defines both {#score_set} and {#match_set} to return a # {ScoreSet} and {MatchSet} respectively. You can then register that class via # {.register} to make it easier to use. class ResultSet class << self # Register a new result set. Subclasses must define {#score_set} and # {#match_set}. Otherwise, an `ArgumentError` will be raised when you try # to call {.register}. # # @param [String] name Result set name used in {.klass_for} # @param [Class] klass ResultSet subclass def register(name, klass) methods = klass.instance_methods missing = [] unless methods.include?(:score_set) missing.push("#score_set") end unless methods.include?(:match_set) missing.push("#match_set") end unless missing.empty? raise ArgumentError, "class must define #{missing.join(" and ")}" end @result_set ||= {} @result_set[name] = klass end # Return a registered ResultSet subclass or `nil` if it doesn't exist. # # @param [String] name of registered result set # @return [Class, nil] def klass_for(name) @result_set ? @result_set[name] : nil end alias :[] :klass_for end # @param [ScoreSet] score_set # @param [MatchSet] match_set def initialize(score_set, match_set) @score_set = score_set @match_set = match_set end # Returns a {ScoreSet}. # # @return [ScoreSet] def score_set @score_set end # Returns a {MatchSet}. # # @return [MatchSet] def match_set @match_set end end end require 'linkage/result_sets/csv' require 'linkage/result_sets/database'
{ "content_hash": "3b44c80df1f86eefa60a588dfac26413", "timestamp": "", "source": "github", "line_count": 78, "max_line_length": 80, "avg_line_length": 31.756410256410255, "alnum_prop": 0.6289866774323779, "repo_name": "coupler/linkage", "id": "fb020c0d87a28a7db824e6de63d491f96a005bda", "size": "2477", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/linkage/result_set.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "172899" }, { "name": "VimL", "bytes": "937" } ], "symlink_target": "" }
#include "config.h" #include "bindings/core/v8/V8Window.h" #include "bindings/core/v8/BindingSecurity.h" #include "bindings/core/v8/ExceptionMessages.h" #include "bindings/core/v8/ExceptionState.h" #include "bindings/core/v8/ScriptController.h" #include "bindings/core/v8/ScriptSourceCode.h" #include "bindings/core/v8/SerializedScriptValue.h" #include "bindings/core/v8/SerializedScriptValueFactory.h" #include "bindings/core/v8/V8Binding.h" #include "bindings/core/v8/V8EventListener.h" #include "bindings/core/v8/V8EventListenerList.h" #include "bindings/core/v8/V8HTMLCollection.h" #include "bindings/core/v8/V8HiddenValue.h" #include "bindings/core/v8/V8Node.h" #include "core/dom/DOMArrayBuffer.h" #include "core/dom/ExceptionCode.h" #include "core/dom/MessagePort.h" #include "core/frame/FrameView.h" #include "core/frame/LocalDOMWindow.h" #include "core/frame/LocalFrame.h" #include "core/frame/Settings.h" #include "core/frame/UseCounter.h" #include "core/frame/csp/ContentSecurityPolicy.h" #include "core/html/HTMLCollection.h" #include "core/html/HTMLDocument.h" #include "core/inspector/ScriptCallStack.h" #include "core/loader/FrameLoadRequest.h" #include "core/loader/FrameLoader.h" #include "core/loader/FrameLoaderClient.h" #include "platform/LayoutTestSupport.h" #include "wtf/Assertions.h" #include "wtf/OwnPtr.h" namespace blink { void V8Window::eventAttributeGetterCustom(const v8::PropertyCallbackInfo<v8::Value>& info) { LocalFrame* frame = toLocalDOMWindow(V8Window::toImpl(info.Holder()))->frame(); ExceptionState exceptionState(ExceptionState::GetterContext, "event", "Window", info.Holder(), info.GetIsolate()); if (!BindingSecurity::shouldAllowAccessToFrame(info.GetIsolate(), frame, exceptionState)) { exceptionState.throwIfNeeded(); return; } ASSERT(frame); // This is a fast path to retrieve info.Holder()->CreationContext(). v8::Local<v8::Context> context = toV8Context(frame, DOMWrapperWorld::current(info.GetIsolate())); if (context.IsEmpty()) return; v8::Local<v8::Value> jsEvent = V8HiddenValue::getHiddenValue(info.GetIsolate(), context->Global(), V8HiddenValue::event(info.GetIsolate())); if (jsEvent.IsEmpty()) return; v8SetReturnValue(info, jsEvent); } void V8Window::eventAttributeSetterCustom(v8::Local<v8::Value> value, const v8::PropertyCallbackInfo<void>& info) { LocalFrame* frame = toLocalDOMWindow(V8Window::toImpl(info.Holder()))->frame(); ExceptionState exceptionState(ExceptionState::SetterContext, "event", "Window", info.Holder(), info.GetIsolate()); if (!BindingSecurity::shouldAllowAccessToFrame(info.GetIsolate(), frame, exceptionState)) { exceptionState.throwIfNeeded(); return; } ASSERT(frame); // This is a fast path to retrieve info.Holder()->CreationContext(). v8::Local<v8::Context> context = toV8Context(frame, DOMWrapperWorld::current(info.GetIsolate())); if (context.IsEmpty()) return; V8HiddenValue::setHiddenValue(info.GetIsolate(), context->Global(), V8HiddenValue::event(info.GetIsolate()), value); } void V8Window::frameElementAttributeGetterCustom(const v8::PropertyCallbackInfo<v8::Value>& info) { LocalDOMWindow* impl = toLocalDOMWindow(V8Window::toImpl(info.Holder())); ExceptionState exceptionState(ExceptionState::GetterContext, "frame", "Window", info.Holder(), info.GetIsolate()); // Do the security check against the parent frame rather than // frameElement() itself, so that a remote parent frame can be handled // properly. In that case, there's no frameElement(), yet we should still // throw a proper exception and deny access. Frame* target = impl->frame() ? impl->frame()->tree().parent() : nullptr; if (!BindingSecurity::shouldAllowAccessToFrame(info.GetIsolate(), target, exceptionState)) { v8SetReturnValueNull(info); exceptionState.throwIfNeeded(); return; } // The wrapper for an <iframe> should get its prototype from the context of the frame it's in, rather than its own frame. // So, use its containing document as the creation context when wrapping. v8::Local<v8::Value> creationContext = toV8(&impl->frameElement()->document(), info.Holder(), info.GetIsolate()); RELEASE_ASSERT(!creationContext.IsEmpty()); v8::Local<v8::Value> wrapper = toV8(impl->frameElement(), v8::Local<v8::Object>::Cast(creationContext), info.GetIsolate()); v8SetReturnValue(info, wrapper); } void V8Window::openerAttributeSetterCustom(v8::Local<v8::Value> value, const v8::PropertyCallbackInfo<void>& info) { v8::Isolate* isolate = info.GetIsolate(); DOMWindow* impl = V8Window::toImpl(info.Holder()); ExceptionState exceptionState(ExceptionState::SetterContext, "opener", "Window", info.Holder(), isolate); if (!BindingSecurity::shouldAllowAccessToFrame(info.GetIsolate(), impl->frame(), exceptionState)) { exceptionState.throwIfNeeded(); return; } // Opener can be shadowed if it is in the same domain. // Have a special handling of null value to behave // like Firefox. See bug http://b/1224887 & http://b/791706. if (value->IsNull()) { // impl->frame() has to be a non-null LocalFrame. Otherwise, the // same-origin check would have failed. ASSERT(impl->frame()); toLocalFrame(impl->frame())->loader().setOpener(0); } // Delete the accessor from this object. info.Holder()->Delete(isolate->GetCurrentContext(), v8AtomicString(isolate, "opener")); // Put property on the front (this) object. if (info.This()->IsObject()) { v8::Maybe<bool> unused = v8::Local<v8::Object>::Cast(info.This())->Set(isolate->GetCurrentContext(), v8AtomicString(isolate, "opener"), value); ALLOW_UNUSED_LOCAL(unused); } } static bool isLegacyTargetOriginDesignation(v8::Local<v8::Value> value) { if (value->IsString() || value->IsStringObject()) return true; return false; } void V8Window::postMessageMethodCustom(const v8::FunctionCallbackInfo<v8::Value>& info) { ExceptionState exceptionState(ExceptionState::ExecutionContext, "postMessage", "Window", info.Holder(), info.GetIsolate()); if (UNLIKELY(info.Length() < 2)) { setMinimumArityTypeError(exceptionState, 2, info.Length()); exceptionState.throwIfNeeded(); return; } // None of these need to be RefPtr because info and context are guaranteed // to hold on to them. DOMWindow* window = V8Window::toImpl(info.Holder()); LocalDOMWindow* source = callingDOMWindow(info.GetIsolate()); ASSERT(window); UseCounter::countIfNotPrivateScript(info.GetIsolate(), window->frame(), UseCounter::WindowPostMessage); // If called directly by WebCore we don't have a calling context. if (!source) { exceptionState.throwTypeError("No active calling context exists."); exceptionState.throwIfNeeded(); return; } // This function has variable arguments and can be: // Per current spec: // postMessage(message, targetOrigin) // postMessage(message, targetOrigin, {sequence of transferrables}) // Legacy non-standard implementations in webkit allowed: // postMessage(message, {sequence of transferrables}, targetOrigin); OwnPtrWillBeRawPtr<MessagePortArray> portArray = adoptPtrWillBeNoop(new MessagePortArray); ArrayBufferArray arrayBufferArray; int targetOriginArgIndex = 1; if (info.Length() > 2) { int transferablesArgIndex = 2; if (isLegacyTargetOriginDesignation(info[2])) { UseCounter::countIfNotPrivateScript(info.GetIsolate(), window->frame(), UseCounter::WindowPostMessageWithLegacyTargetOriginArgument); targetOriginArgIndex = 2; transferablesArgIndex = 1; } if (!SerializedScriptValue::extractTransferables(info.GetIsolate(), info[transferablesArgIndex], transferablesArgIndex, *portArray, arrayBufferArray, exceptionState)) { exceptionState.throwIfNeeded(); return; } } TOSTRING_VOID(V8StringResource<TreatNullAndUndefinedAsNullString>, targetOrigin, info[targetOriginArgIndex]); RefPtr<SerializedScriptValue> message = SerializedScriptValueFactory::instance().create(info.GetIsolate(), info[0], portArray.get(), &arrayBufferArray, exceptionState); if (exceptionState.throwIfNeeded()) return; window->postMessage(message.release(), portArray.get(), targetOrigin, source, exceptionState); exceptionState.throwIfNeeded(); } // FIXME(fqian): returning string is cheating, and we should // fix this by calling toString function on the receiver. // However, V8 implements toString in JavaScript, which requires // switching context of receiver. I consider it is dangerous. void V8Window::toStringMethodCustom(const v8::FunctionCallbackInfo<v8::Value>& info) { v8::Local<v8::Object> domWrapper = V8Window::findInstanceInPrototypeChain(info.This(), info.GetIsolate()); v8::Local<v8::Object> target = domWrapper.IsEmpty() ? info.This() : domWrapper; v8::Local<v8::String> value; if (target->ObjectProtoToString(info.GetIsolate()->GetCurrentContext()).ToLocal(&value)) v8SetReturnValue(info, value); } void V8Window::openMethodCustom(const v8::FunctionCallbackInfo<v8::Value>& info) { DOMWindow* impl = V8Window::toImpl(info.Holder()); ExceptionState exceptionState(ExceptionState::ExecutionContext, "open", "Window", info.Holder(), info.GetIsolate()); if (!BindingSecurity::shouldAllowAccessToFrame(info.GetIsolate(), impl->frame(), exceptionState)) { exceptionState.throwIfNeeded(); return; } TOSTRING_VOID(V8StringResource<TreatNullAndUndefinedAsNullString>, urlString, info[0]); AtomicString frameName; if (info[1]->IsUndefined() || info[1]->IsNull()) { frameName = "_blank"; } else { TOSTRING_VOID(V8StringResource<>, frameNameResource, info[1]); frameName = frameNameResource; } TOSTRING_VOID(V8StringResource<TreatNullAndUndefinedAsNullString>, windowFeaturesString, info[2]); // |impl| has to be a LocalDOMWindow, since RemoteDOMWindows wouldn't have // passed the BindingSecurity check above. RefPtrWillBeRawPtr<DOMWindow> openedWindow = toLocalDOMWindow(impl)->open(urlString, frameName, windowFeaturesString, callingDOMWindow(info.GetIsolate()), enteredDOMWindow(info.GetIsolate())); if (!openedWindow) return; v8SetReturnValueFast(info, openedWindow.release(), impl); } // We lazy create interfaces like testRunner and internals on first access // inside layout tests since creating the bindings is expensive. Then we store // them in a hidden Map on the window so that later accesses will reuse the same // wrapper. static bool installTestInterfaceIfNeeded(LocalFrame& frame, v8::Local<v8::String> name, const v8::PropertyCallbackInfo<v8::Value>& info) { if (!LayoutTestSupport::isRunningLayoutTest()) return false; v8::Isolate* isolate = info.GetIsolate(); v8::Local<v8::Context> context = isolate->GetCurrentContext(); AtomicString propName = toCoreAtomicString(name); v8::Local<v8::Value> interfaces = V8HiddenValue::getHiddenValue(isolate, info.Holder(), V8HiddenValue::testInterfaces(isolate)); if (interfaces.IsEmpty()) { interfaces = v8::Map::New(isolate); V8HiddenValue::setHiddenValue(isolate, info.Holder(), V8HiddenValue::testInterfaces(isolate), interfaces); } v8::Local<v8::Map> interfacesMap = interfaces.As<v8::Map>(); v8::Local<v8::Value> result = v8CallOrCrash(interfacesMap->Get(context, name)); if (!result->IsUndefined()) { v8SetReturnValue(info, result); return true; } v8::Local<v8::Value> interface = frame.loader().client()->createTestInterface(propName); if (!interface.IsEmpty()) { v8CallOrCrash(interfacesMap->Set(context, name, interface)); v8SetReturnValue(info, interface); return true; } return false; } void V8Window::namedPropertyGetterCustom(v8::Local<v8::Name> name, const v8::PropertyCallbackInfo<v8::Value>& info) { if (!name->IsString()) return; auto nameString = name.As<v8::String>(); DOMWindow* window = V8Window::toImpl(info.Holder()); if (!window) return; Frame* frame = window->frame(); // window is detached from a frame. if (!frame) return; AtomicString propName = toCoreAtomicString(nameString); // Note that the spec doesn't allow any cross-origin named access to the window object. However, // UAs have traditionally allowed named access to named child browsing contexts, even across // origins. So first, search child frames for a frame with a matching name. Frame* child = frame->tree().scopedChild(propName); if (child) { v8SetReturnValueFast(info, child->domWindow(), window); return; } // Search IDL functions defined in the prototype if (!info.Holder()->GetRealNamedProperty(info.GetIsolate()->GetCurrentContext(), nameString).IsEmpty()) return; // Frame could have been detached in call to GetRealNamedProperty. frame = window->frame(); // window is detached. if (!frame) return; // If the frame is remote, the caller will never be able to access further named results. if (!frame->isLocalFrame()) return; if (installTestInterfaceIfNeeded(toLocalFrame(*frame), nameString, info)) return; // Search named items in the document. Document* doc = toLocalFrame(frame)->document(); if (!doc || !doc->isHTMLDocument()) return; // This is an AllCanRead interceptor. Check that the caller has access to the named results. if (!BindingSecurity::shouldAllowAccessToFrame(info.GetIsolate(), frame, DoNotReportSecurityError)) return; bool hasNamedItem = toHTMLDocument(doc)->hasNamedItem(propName); bool hasIdItem = doc->hasElementWithId(propName); if (!hasNamedItem && !hasIdItem) return; if (!hasNamedItem && hasIdItem && !doc->containsMultipleElementsWithId(propName)) { v8SetReturnValueFast(info, doc->getElementById(propName), window); return; } RefPtrWillBeRawPtr<HTMLCollection> items = doc->windowNamedItems(propName); if (!items->isEmpty()) { // TODO(esprehn): Firefox doesn't return an HTMLCollection here if there's // multiple with the same name, but Chrome and Safari does. What's the // right behavior? if (items->hasExactlyOneItem()) { v8SetReturnValueFast(info, items->item(0), window); return; } v8SetReturnValueFast(info, items.release(), window); return; } } static bool securityCheck(v8::Local<v8::Object> host) { v8::Isolate* isolate = v8::Isolate::GetCurrent(); v8::Local<v8::Object> window = V8Window::findInstanceInPrototypeChain(host, isolate); if (window.IsEmpty()) return false; // the frame is gone. DOMWindow* targetWindow = V8Window::toImpl(window); ASSERT(targetWindow); if (!targetWindow->isLocalDOMWindow()) return false; LocalFrame* target = toLocalDOMWindow(targetWindow)->frame(); if (!target) return false; // Notify the loader's client if the initial document has been accessed. if (target->loader().stateMachine()->isDisplayingInitialEmptyDocument()) target->loader().didAccessInitialDocument(); return BindingSecurity::shouldAllowAccessToFrame(isolate, target, DoNotReportSecurityError); } bool V8Window::namedSecurityCheckCustom(v8::Local<v8::Object> host, v8::Local<v8::Value> key, v8::AccessType type, v8::Local<v8::Value>) { return securityCheck(host); } bool V8Window::indexedSecurityCheckCustom(v8::Local<v8::Object> host, uint32_t index, v8::AccessType type, v8::Local<v8::Value>) { return securityCheck(host); } } // namespace blink
{ "content_hash": "169d90144a3b34ff874078185e905f0d", "timestamp": "", "source": "github", "line_count": 383, "max_line_length": 196, "avg_line_length": 41.765013054830284, "alnum_prop": 0.7056764191047762, "repo_name": "crosswalk-project/blink-crosswalk", "id": "18fa0f9846b509a19ce6d5f54c0e2cd3282e84b1", "size": "17564", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "Source/bindings/core/v8/custom/V8WindowCustom.cpp", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.admanager.jaxws.v202211; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * * A list of all errors associated with the Range constraint. * * * <p>Java class for RangeError complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="RangeError"> * &lt;complexContent> * &lt;extension base="{https://www.google.com/apis/ads/publisher/v202211}ApiError"> * &lt;sequence> * &lt;element name="reason" type="{https://www.google.com/apis/ads/publisher/v202211}RangeError.Reason" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "RangeError", propOrder = { "reason" }) public class RangeError extends ApiError { @XmlSchemaType(name = "string") protected RangeErrorReason reason; /** * Gets the value of the reason property. * * @return * possible object is * {@link RangeErrorReason } * */ public RangeErrorReason getReason() { return reason; } /** * Sets the value of the reason property. * * @param value * allowed object is * {@link RangeErrorReason } * */ public void setReason(RangeErrorReason value) { this.reason = value; } }
{ "content_hash": "ba5626e70a899e629b92c83eab42a327", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 128, "avg_line_length": 27.109756097560975, "alnum_prop": 0.6603688708951867, "repo_name": "googleads/googleads-java-lib", "id": "4afca5fc0226986df706aa8b560083638ed2c8b3", "size": "2223", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/dfp_appengine/src/main/java/com/google/api/ads/admanager/jaxws/v202211/RangeError.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "81068791" } ], "symlink_target": "" }
package com.devicehive.model.rpc; import com.devicehive.shim.api.Action; import com.devicehive.shim.api.Body; public class CountResponse extends Body { private long count; public CountResponse(long count) { super(Action.COUNT_RESPONSE); this.count = count; } public long getCount() { return count; } public void setCount(long count) { this.count = count; } }
{ "content_hash": "598d6a00d4325743d528bc2a22013681", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 41, "avg_line_length": 17.791666666666668, "alnum_prop": 0.6487119437939111, "repo_name": "devicehive/devicehive-java-server", "id": "f0418c198c69eb227cb930f7d8a45383f6e71b58", "size": "1073", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "devicehive-common/src/main/java/com/devicehive/model/rpc/CountResponse.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "3007" }, { "name": "HTML", "bytes": "16626" }, { "name": "Java", "bytes": "1519376" }, { "name": "PLpgSQL", "bytes": "8639" }, { "name": "Shell", "bytes": "12940" } ], "symlink_target": "" }
package com.innovaee.eorder.dao.hibernate; import com.innovaee.eorder.dao.MemberShipDao; import com.innovaee.eorder.entity.MemberShip; /** * @Title: MemberShipDao * @Description: 用户会员数据库访问接口Hibernate实现 * * @version V1.0 */ public class HibernateMemberShipDao extends HibernateBaseDao<MemberShip> implements MemberShipDao { }
{ "content_hash": "db92ca65599866b74865d4fd537d7d6c", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 72, "avg_line_length": 19.166666666666668, "alnum_prop": 0.7652173913043478, "repo_name": "aaronluo/cuoxiazi", "id": "6246a8f76b36a2cabe5d8a721d433b2c0530f755", "size": "618", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "03_release/build_v2.0_20150330/code/eorder-ws/src/main/java/com/innovaee/eorder/dao/hibernate/HibernateMemberShipDao.java", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "504471" }, { "name": "HTML", "bytes": "112444" }, { "name": "Java", "bytes": "5247385" }, { "name": "JavaScript", "bytes": "1383858" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_45) on Sat Apr 09 10:10:56 EDT 2016 --> <title>StreamHook (apache-cassandra API)</title> <meta name="date" content="2016-04-09"> <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="StreamHook (apache-cassandra API)"; } } catch(err) { } //--> var methods = {"i0":17,"i1":6,"i2":6,"i3":6}; var tabs = {65535:["t0","All Methods"],1:["t1","Static Methods"],2:["t2","Instance Methods"],4:["t3","Abstract Methods"],16:["t5","Default Methods"]}; var altColor = "altColor"; var rowColor = "rowColor"; var tableTab = "tableTab"; var activeTableTab = "activeTableTab"; </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="class-use/StreamHook.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../org/apache/cassandra/streaming/StreamException.html" title="class in org.apache.cassandra.streaming"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li><a href="../../../../org/apache/cassandra/streaming/StreamManager.html" title="class in org.apache.cassandra.streaming"><span class="typeNameLink">Next&nbsp;Class</span></a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?org/apache/cassandra/streaming/StreamHook.html" target="_top">Frames</a></li> <li><a href="StreamHook.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li><a href="#field.summary">Field</a>&nbsp;|&nbsp;</li> <li>Constr&nbsp;|&nbsp;</li> <li><a href="#method.summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li><a href="#field.detail">Field</a>&nbsp;|&nbsp;</li> <li>Constr&nbsp;|&nbsp;</li> <li><a href="#method.detail">Method</a></li> </ul> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <!-- ======== START OF CLASS DATA ======== --> <div class="header"> <div class="subTitle">org.apache.cassandra.streaming</div> <h2 title="Interface StreamHook" class="title">Interface StreamHook</h2> </div> <div class="contentContainer"> <div class="description"> <ul class="blockList"> <li class="blockList"> <hr> <br> <pre>public interface <span class="typeNameLabel">StreamHook</span></pre> </li> </ul> </div> <div class="summary"> <ul class="blockList"> <li class="blockList"> <!-- =========== FIELD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="field.summary"> <!-- --> </a> <h3>Field Summary</h3> <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Field Summary table, listing fields, and an explanation"> <caption><span>Fields</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Field and Description</th> </tr> <tr class="altColor"> <td class="colFirst"><code>static <a href="../../../../org/apache/cassandra/streaming/StreamHook.html" title="interface in org.apache.cassandra.streaming">StreamHook</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/streaming/StreamHook.html#instance">instance</a></span></code>&nbsp;</td> </tr> </table> </li> </ul> <!-- ========== METHOD SUMMARY =========== --> <ul class="blockList"> <li class="blockList"><a name="method.summary"> <!-- --> </a> <h3>Method Summary</h3> <table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation"> <caption><span id="t0" class="activeTableTab"><span>All Methods</span><span class="tabEnd">&nbsp;</span></span><span id="t1" class="tableTab"><span><a href="javascript:show(1);">Static Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t2" class="tableTab"><span><a href="javascript:show(2);">Instance Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t3" class="tableTab"><span><a href="javascript:show(4);">Abstract Methods</a></span><span class="tabEnd">&nbsp;</span></span><span id="t5" class="tableTab"><span><a href="javascript:show(16);">Default Methods</a></span><span class="tabEnd">&nbsp;</span></span></caption> <tr> <th class="colFirst" scope="col">Modifier and Type</th> <th class="colLast" scope="col">Method and Description</th> </tr> <tr id="i0" class="altColor"> <td class="colFirst"><code>static <a href="../../../../org/apache/cassandra/streaming/StreamHook.html" title="interface in org.apache.cassandra.streaming">StreamHook</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/streaming/StreamHook.html#createHook--">createHook</a></span>()</code>&nbsp;</td> </tr> <tr id="i1" class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/streaming/StreamHook.html#reportIncomingFile-org.apache.cassandra.db.ColumnFamilyStore-org.apache.cassandra.io.sstable.SSTableMultiWriter-org.apache.cassandra.streaming.StreamSession-int-">reportIncomingFile</a></span>(<a href="../../../../org/apache/cassandra/db/ColumnFamilyStore.html" title="class in org.apache.cassandra.db">ColumnFamilyStore</a>&nbsp;cfs, <a href="../../../../org/apache/cassandra/io/sstable/SSTableMultiWriter.html" title="interface in org.apache.cassandra.io.sstable">SSTableMultiWriter</a>&nbsp;writer, <a href="../../../../org/apache/cassandra/streaming/StreamSession.html" title="class in org.apache.cassandra.streaming">StreamSession</a>&nbsp;session, int&nbsp;sequenceNumber)</code>&nbsp;</td> </tr> <tr id="i2" class="altColor"> <td class="colFirst"><code><a href="../../../../org/apache/cassandra/streaming/messages/OutgoingFileMessage.html" title="class in org.apache.cassandra.streaming.messages">OutgoingFileMessage</a></code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/streaming/StreamHook.html#reportOutgoingFile-org.apache.cassandra.streaming.StreamSession-org.apache.cassandra.io.sstable.format.SSTableReader-org.apache.cassandra.streaming.messages.OutgoingFileMessage-">reportOutgoingFile</a></span>(<a href="../../../../org/apache/cassandra/streaming/StreamSession.html" title="class in org.apache.cassandra.streaming">StreamSession</a>&nbsp;session, <a href="../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html" title="class in org.apache.cassandra.io.sstable.format">SSTableReader</a>&nbsp;sstable, <a href="../../../../org/apache/cassandra/streaming/messages/OutgoingFileMessage.html" title="class in org.apache.cassandra.streaming.messages">OutgoingFileMessage</a>&nbsp;message)</code>&nbsp;</td> </tr> <tr id="i3" class="rowColor"> <td class="colFirst"><code>void</code></td> <td class="colLast"><code><span class="memberNameLink"><a href="../../../../org/apache/cassandra/streaming/StreamHook.html#reportStreamFuture-org.apache.cassandra.streaming.StreamSession-org.apache.cassandra.streaming.StreamResultFuture-">reportStreamFuture</a></span>(<a href="../../../../org/apache/cassandra/streaming/StreamSession.html" title="class in org.apache.cassandra.streaming">StreamSession</a>&nbsp;session, <a href="../../../../org/apache/cassandra/streaming/StreamResultFuture.html" title="class in org.apache.cassandra.streaming">StreamResultFuture</a>&nbsp;future)</code>&nbsp;</td> </tr> </table> </li> </ul> </li> </ul> </div> <div class="details"> <ul class="blockList"> <li class="blockList"> <!-- ============ FIELD DETAIL =========== --> <ul class="blockList"> <li class="blockList"><a name="field.detail"> <!-- --> </a> <h3>Field Detail</h3> <a name="instance"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>instance</h4> <pre>static final&nbsp;<a href="../../../../org/apache/cassandra/streaming/StreamHook.html" title="interface in org.apache.cassandra.streaming">StreamHook</a> instance</pre> </li> </ul> </li> </ul> <!-- ============ METHOD DETAIL ========== --> <ul class="blockList"> <li class="blockList"><a name="method.detail"> <!-- --> </a> <h3>Method Detail</h3> <a name="reportOutgoingFile-org.apache.cassandra.streaming.StreamSession-org.apache.cassandra.io.sstable.format.SSTableReader-org.apache.cassandra.streaming.messages.OutgoingFileMessage-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>reportOutgoingFile</h4> <pre><a href="../../../../org/apache/cassandra/streaming/messages/OutgoingFileMessage.html" title="class in org.apache.cassandra.streaming.messages">OutgoingFileMessage</a>&nbsp;reportOutgoingFile(<a href="../../../../org/apache/cassandra/streaming/StreamSession.html" title="class in org.apache.cassandra.streaming">StreamSession</a>&nbsp;session, <a href="../../../../org/apache/cassandra/io/sstable/format/SSTableReader.html" title="class in org.apache.cassandra.io.sstable.format">SSTableReader</a>&nbsp;sstable, <a href="../../../../org/apache/cassandra/streaming/messages/OutgoingFileMessage.html" title="class in org.apache.cassandra.streaming.messages">OutgoingFileMessage</a>&nbsp;message)</pre> </li> </ul> <a name="reportStreamFuture-org.apache.cassandra.streaming.StreamSession-org.apache.cassandra.streaming.StreamResultFuture-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>reportStreamFuture</h4> <pre>void&nbsp;reportStreamFuture(<a href="../../../../org/apache/cassandra/streaming/StreamSession.html" title="class in org.apache.cassandra.streaming">StreamSession</a>&nbsp;session, <a href="../../../../org/apache/cassandra/streaming/StreamResultFuture.html" title="class in org.apache.cassandra.streaming">StreamResultFuture</a>&nbsp;future)</pre> </li> </ul> <a name="reportIncomingFile-org.apache.cassandra.db.ColumnFamilyStore-org.apache.cassandra.io.sstable.SSTableMultiWriter-org.apache.cassandra.streaming.StreamSession-int-"> <!-- --> </a> <ul class="blockList"> <li class="blockList"> <h4>reportIncomingFile</h4> <pre>void&nbsp;reportIncomingFile(<a href="../../../../org/apache/cassandra/db/ColumnFamilyStore.html" title="class in org.apache.cassandra.db">ColumnFamilyStore</a>&nbsp;cfs, <a href="../../../../org/apache/cassandra/io/sstable/SSTableMultiWriter.html" title="interface in org.apache.cassandra.io.sstable">SSTableMultiWriter</a>&nbsp;writer, <a href="../../../../org/apache/cassandra/streaming/StreamSession.html" title="class in org.apache.cassandra.streaming">StreamSession</a>&nbsp;session, int&nbsp;sequenceNumber)</pre> </li> </ul> <a name="createHook--"> <!-- --> </a> <ul class="blockListLast"> <li class="blockList"> <h4>createHook</h4> <pre>static&nbsp;<a href="../../../../org/apache/cassandra/streaming/StreamHook.html" title="interface in org.apache.cassandra.streaming">StreamHook</a>&nbsp;createHook()</pre> </li> </ul> </li> </ul> </li> </ul> </div> </div> <!-- ========= END OF CLASS DATA ========= --> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li><a href="package-summary.html">Package</a></li> <li class="navBarCell1Rev">Class</li> <li><a href="class-use/StreamHook.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../org/apache/cassandra/streaming/StreamException.html" title="class in org.apache.cassandra.streaming"><span class="typeNameLink">Prev&nbsp;Class</span></a></li> <li><a href="../../../../org/apache/cassandra/streaming/StreamManager.html" title="class in org.apache.cassandra.streaming"><span class="typeNameLink">Next&nbsp;Class</span></a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?org/apache/cassandra/streaming/StreamHook.html" target="_top">Frames</a></li> <li><a href="StreamHook.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <div> <ul class="subNavList"> <li>Summary:&nbsp;</li> <li>Nested&nbsp;|&nbsp;</li> <li><a href="#field.summary">Field</a>&nbsp;|&nbsp;</li> <li>Constr&nbsp;|&nbsp;</li> <li><a href="#method.summary">Method</a></li> </ul> <ul class="subNavList"> <li>Detail:&nbsp;</li> <li><a href="#field.detail">Field</a>&nbsp;|&nbsp;</li> <li>Constr&nbsp;|&nbsp;</li> <li><a href="#method.detail">Method</a></li> </ul> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &copy; 2016 The Apache Software Foundation</small></p> </body> </html>
{ "content_hash": "e6646d8b8e0d699747322e474785785a", "timestamp": "", "source": "github", "line_count": 309, "max_line_length": 659, "avg_line_length": 48.773462783171524, "alnum_prop": 0.6626633932718466, "repo_name": "jasonwee/videoOnCloud", "id": "d9939a4f563489ad92461fb7781cd60f1f6b284a", "size": "15071", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ahkl/apache-cassandra-3.5/javadoc/org/apache/cassandra/streaming/StreamHook.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "116270" }, { "name": "C", "bytes": "2209717" }, { "name": "C++", "bytes": "375267" }, { "name": "CSS", "bytes": "1134648" }, { "name": "Dockerfile", "bytes": "1656" }, { "name": "HTML", "bytes": "306558398" }, { "name": "Java", "bytes": "1465506" }, { "name": "JavaScript", "bytes": "9028509" }, { "name": "Jupyter Notebook", "bytes": "30907" }, { "name": "Less", "bytes": "107003" }, { "name": "PHP", "bytes": "856" }, { "name": "PowerShell", "bytes": "77807" }, { "name": "Pug", "bytes": "2968" }, { "name": "Python", "bytes": "1001861" }, { "name": "R", "bytes": "7390" }, { "name": "Roff", "bytes": "3553" }, { "name": "Shell", "bytes": "206191" }, { "name": "Thrift", "bytes": "80564" }, { "name": "XSLT", "bytes": "4740" } ], "symlink_target": "" }
GitHub Issue Reporter Module for XBMC ===================================== This addon provides an exported module to report issues in your addon to GitHub. Usage ----- * Add `<import addon="script.module.githubissuereporter"/>` in the `<requires>` section of your addon's addon XML file * Wrap your addon code within a try / raise block * Import and configure your IssueReporter somewhere, for example: ```python from issue_reporter import IssueReporter issue_reporter = IssueReporter({ 'github_api_url': 'https://api.github.com/repos/myorg/myaddon, 'addon_name': 'My Addon Name', 'addon_id': 'plugin.video.myaddon', 'addon_version': '1.2.3' }) ``` * When an exception occurs, use an XBMC dialog to ask the user for consent to report the error, and call `issue_reporter.report_issue(traceback)` where traceback is the python traceback string. Projects using this module -------------------------- * [ABC iView Addon](https://github.com/andybotting/xbmc-addon-abc-iview) (planned) * [TenPlay Addon](https://github.com/xbmc-catchuptv-au/plugin.video.catchuptv.au.ten) (planned)
{ "content_hash": "437b9dba368f0d3f7c04d4ed2e440a86", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 193, "avg_line_length": 39.357142857142854, "alnum_prop": 0.6978221415607986, "repo_name": "xbmc-catchuptv-au/script.module.githubissuereporter", "id": "23c2632c4fd6f3a9493a6ecc54f9122d2b0595c8", "size": "1102", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "11474" } ], "symlink_target": "" }
The MIT License (MIT) Copyright (c) [2014] [Wasim Halani 'washal'] Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
{ "content_hash": "70fe571fa1c8a1055565cbea34e88be4", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 78, "avg_line_length": 51.95238095238095, "alnum_prop": 0.8001833180568286, "repo_name": "washal/etcpasswd", "id": "a0b3cc15690bc9466f9cb3c1218433f39b218d29", "size": "1091", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "LICENSE.md", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "16877" }, { "name": "PHP", "bytes": "3130" } ], "symlink_target": "" }
/** * @requires OpenLayers/Marker.js */ /** * Class: OpenLayers.Marker.Box * * Inherits from: * - <OpenLayers.Marker> */ OpenLayers.Marker.Box = OpenLayers.Class(OpenLayers.Marker, { /** * Property: bounds * {<OpenLayers.Bounds>} */ bounds: null, /** * Property: div * {DOMElement} */ div: null, /** * Constructor: OpenLayers.Marker.Box * * Parameters: * bounds - {<OpenLayers.Bounds>} * borderColor - {String} * borderWidth - {int} */ initialize: function (bounds, borderColor, borderWidth) { this.bounds = bounds; this.div = OpenLayers.Util.createDiv(); this.div.style.overflow = 'hidden'; this.events = new OpenLayers.Events(this, this.div); this.setBorder(borderColor, borderWidth); }, /** * Method: destroy */ destroy: function () { this.bounds = null; this.div = null; OpenLayers.Marker.prototype.destroy.apply(this, arguments); }, /** * Method: setBorder * Allow the user to change the box's color and border width * * Parameters: * color - {String} Default is "red" * width - {int} Default is 2 */ setBorder: function (color, width) { if (!color) { color = "red"; } if (!width) { width = 2; } this.div.style.border = width + "px solid " + color; }, /** * Method: draw * * Parameters: * px - {<OpenLayers.Pixel>} * sz - {<OpenLayers.Size>} * * Returns: * {DOMElement} A new DOM Image with this marker's icon set at the * location passed-in */ draw: function (px, sz) { OpenLayers.Util.modifyDOMElement(this.div, null, px, sz); return this.div; }, /** * Method: onScreen * * Rreturn: * {Boolean} Whether or not the marker is currently visible on screen. */ onScreen: function () { var onScreen = false; if (this.map) { var screenBounds = this.map.getExtent(); onScreen = screenBounds.containsBounds(this.bounds, true, true); } return onScreen; }, /** * Method: display * Hide or show the icon * * Parameters: * display - {Boolean} */ display: function (display) { this.div.style.display = (display) ? "" : "none"; }, CLASS_NAME: "OpenLayers.Marker.Box" });
{ "content_hash": "db56b9e7bd157327e13cbd132f974d87", "timestamp": "", "source": "github", "line_count": 117, "max_line_length": 76, "avg_line_length": 21.45299145299145, "alnum_prop": 0.5310756972111553, "repo_name": "girc/dmis", "id": "510320cd565ea00d016109b69c527c82766e0d35", "size": "2761", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "common/asset-files/openlayers/ol2/lib/OpenLayers/Marker/Box.js", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ApacheConf", "bytes": "838" }, { "name": "CSS", "bytes": "220710" }, { "name": "Emacs Lisp", "bytes": "2410" }, { "name": "HTML", "bytes": "1616675" }, { "name": "JavaScript", "bytes": "33905102" }, { "name": "PHP", "bytes": "1202081" }, { "name": "Python", "bytes": "76407" }, { "name": "Shell", "bytes": "3444" } ], "symlink_target": "" }
package ca.uhn.fhir.rest.api.server; import org.hl7.fhir.instance.model.api.IBaseResource; /** * This object is an abstraction for a server response that is going to * return one or more resources to the user. This can be used by interceptors * to make decisions about whether a resource should be visible or not * to the user making the request. */ public interface IPreResourceAccessDetails { int size(); IBaseResource getResource(int theIndex); void setDontReturnResourceAtIndex(int theIndex); }
{ "content_hash": "389838241ef96747675dd82009ebad3a", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 77, "avg_line_length": 24.523809523809526, "alnum_prop": 0.7689320388349514, "repo_name": "SingingTree/hapi-fhir", "id": "8d53d02068de6312d90d7f3c0220947ccfddfa28", "size": "1181", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/api/server/IPreResourceAccessDetails.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "3861" }, { "name": "CSS", "bytes": "7305" }, { "name": "Dockerfile", "bytes": "130" }, { "name": "GAP", "bytes": "25037" }, { "name": "HTML", "bytes": "244839" }, { "name": "Java", "bytes": "23821620" }, { "name": "JavaScript", "bytes": "31583" }, { "name": "Kotlin", "bytes": "3972" }, { "name": "Ruby", "bytes": "230674" }, { "name": "Shell", "bytes": "46254" } ], "symlink_target": "" }
namespace helene { namespace static_json { namespace detail { template <class...> struct type_list { }; //////////////////////////////////////////////////////////////////////////////// // get first type of a type_list template <class TypeList> struct head; template <class First, class... Rest> struct head<type_list<First, Rest...>> { typedef First type; }; template <class TypeList> using head_t = typename head<TypeList>::type; //////////////////////////////////////////////////////////////////////////////// // get tail of a type_list as another type_list template <class TypeList> struct tail; template <class First, class... Rest> struct tail<type_list<First, Rest...>> { typedef type_list<Rest...> type; }; template <class TypeList> using tail_t = typename tail<TypeList>::type; //////////////////////////////////////////////////////////////////////////////// // returns length of a type_list template <class TypeList> struct length; template <class... Types> struct length<type_list<Types...>> { static const std::size_t value = sizeof...(Types); }; template <class TypeList> constexpr std::size_t length_v = length<TypeList>::value; //////////////////////////////////////////////////////////////////////////////// // returns type at index in a type_list template <class TypeList, std::size_t Index> struct type_at_index : type_at_index<tail_t<TypeList>, Index - 1> { static_assert(length_v<TypeList>> Index, "out of bounds access"); }; template <class TypeList> struct type_at_index<TypeList, 0> { typedef head_t<TypeList> type; }; template <class TypeList, std::size_t Index> using type_at_index_t = typename type_at_index<TypeList, Index>::type; //////////////////////////////////////////////////////////////////////////////// // return index of first type encountered that matches T template <class TypeList, class T, std::size_t Index = 0> struct index_of_type; template <class First, class... Rest, class T, std::size_t Index> struct index_of_type<type_list<First, Rest...>, T, Index> : index_of_type<type_list<Rest...>, T, Index + 1> { }; template <class... Rest, class T, std::size_t Index> struct index_of_type<type_list<T, Rest...>, T, Index> { static const std::size_t value = Index; }; template <class TypeList, class T> constexpr std::size_t index_of_type_v = index_of_type<TypeList, T>::value; template <class NameProvider, char C, std::size_t P> struct static_string_length_ : static_string_length_<NameProvider, NameProvider::value[P], P + 1> { }; template <class NameProvider, std::size_t P> struct static_string_length_<NameProvider, '\0', P> { static const std::size_t value = P; }; template <class NameProvider> constexpr std::size_t static_string_length_v = static_string_length_<NameProvider, NameProvider::value[0], 1>::value; } // namespace detail template <class NameProvider, class Type> struct field { Type value; std::string name() const { return NameProvider::value; } std::string str() const { return std::to_string(value); } }; template <class NameProvider> struct field<NameProvider, bool> { bool value; std::string name() const { return NameProvider::value; } std::string str() const { if(value) { return "true"; } return "false"; } }; template <class NameProvider> struct field<NameProvider, std::string> { std::string value; std::string name() const { return NameProvider::value; } std::string str() const { return value; } }; template <class... Fields> class json; template <class NameProvider, class... Subfields> struct field<NameProvider, json<Subfields...>>; template <class... NameProviders, class... Types> class json<field<NameProviders, Types>...> : field<NameProviders, Types>... { public: template <class NameProvider> struct type_from_name { typedef detail::type_at_index_t< detail::type_list<Types...>, detail::index_of_type_v<detail::type_list<NameProviders...>, NameProvider>> type; }; template <class NameProvider> using type_from_name_t = typename type_from_name<NameProvider>::type; private: template <class FirstField, class... RestFields> struct serialize_helper { static std::string dispatch_str(const json<field<NameProviders, Types>...>* self) { const auto field_ptr = static_cast<const FirstField*>(self); return field_ptr->name() + ": " + field_ptr->str() + ", " + serialize_helper<RestFields...>::dispatch_str(self); }; static void dispatch_stream_set(std::istream& in, json<field<NameProviders, Types>...>* self) { in >> static_cast<FirstField*>(self)->value; in.ignore(std::numeric_limits<std::streamsize>::max(), ':'); serialize_helper<RestFields...>::dispatch_stream_set(in, self); } }; template <class LastField> struct serialize_helper<LastField> { static std::string dispatch_str(const json<field<NameProviders, Types>...>* self) { const auto field_ptr = static_cast<const LastField*>(self); return field_ptr->name() + ": " + field_ptr->str(); }; static void dispatch_stream_set(std::istream& in, json<field<NameProviders, Types>...>* self) { in >> static_cast<LastField*>(self)->value; } }; public: template <class Name> type_from_name_t<Name>& get() { return static_cast<field<Name, type_from_name_t<Name>>*>(this)->value; } template <class Name> const type_from_name_t<Name>& get() const { return static_cast<const field<Name, type_from_name_t<Name>>*>(this) ->value; } std::string str() const { std::string out("{ "); out.append( serialize_helper<field<NameProviders, Types>...>::dispatch_str( this)); out.append(" }"); return out; } void stream_set(std::istream& in) { // strip beginning of json stream ie. "{ somename:" in.ignore(std::numeric_limits<std::streamsize>::max(), ':'); serialize_helper<field<NameProviders, Types>...>::dispatch_stream_set( in, this); // ensure whole json message is consumed in.ignore(std::numeric_limits<std::streamsize>::max(), '}'); } private: }; template <class NameProvider, class... Subfields> struct field<NameProvider, json<Subfields...>> { json<Subfields...> value; std::string name() const { return NameProvider::value; } std::string str() const { return value.str(); } }; template <class... Fields> std::ostream& operator<<(std::ostream& out, const json<Fields...>& obj) { out << obj.str(); return out; } template <class... Fields> std::istream& operator>>(std::istream& in, json<Fields...>& obj) { obj.stream_set(in); return in; } } // namespace static_json } // namespace helene
{ "content_hash": "a5626e0108b8c12dc2d2710657f18ca8", "timestamp": "", "source": "github", "line_count": 326, "max_line_length": 80, "avg_line_length": 22.447852760736197, "alnum_prop": 0.57310740639519, "repo_name": "bergesenha/helene", "id": "6a59161bdf902fd1a13d00ab21be1605f09d5562", "size": "7373", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "include/static_json.hpp", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "152952" }, { "name": "CMake", "bytes": "1339" } ], "symlink_target": "" }
package net.eusashead.bjugquerydsl.controller; import static net.eusashead.hateoas.converter.hal.HalHttpMessageConverter.HAL_JSON; import static net.eusashead.hateoas.converter.hal.HalHttpMessageConverter.HAL_XML; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import static org.springframework.test.web.servlet.setup.MockMvcBuilders.webAppContextSetup; import java.io.IOException; import java.io.StringReader; import junit.framework.Assert; import net.eusashead.bjugquerydsl.config.WebConfig; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.test.context.web.WebAppConfiguration; import org.springframework.test.web.servlet.MvcResult; import org.springframework.transaction.annotation.Transactional; import org.springframework.web.context.WebApplicationContext; import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.theoryinpractise.halbuilder.DefaultRepresentationFactory; import com.theoryinpractise.halbuilder.api.ReadableRepresentation; /** * Tests for a Spring MVC controller * using QueryDSL to create JSON * endpoints from JPA entities. * * @author patrickvk * */ @RunWith(SpringJUnit4ClassRunner.class) @WebAppConfiguration @ContextConfiguration(classes={WebConfig.class}) public class SkuControllerTest { @Autowired private WebApplicationContext context; @Autowired private ObjectMapper mapper; @Test @Transactional public void testSkuSearchPaging() throws Exception { MvcResult result = webAppContextSetup(context) .build() .perform(get("http://localhost/sku/?page=0&size=3&sort=skuId,desc") .contentType(HAL_JSON).accept(HAL_JSON)) .andExpect(status().isOk()) .andExpect(content().contentType(HAL_JSON)) .andReturn(); // Verify result ReadableRepresentation skus = getContent(result); Assert.assertEquals(Integer.valueOf(3), Integer.valueOf(skus.getResourcesByRel("content").size())); } @Test @Transactional public void testSkuSearchWithAttr() throws Exception { MvcResult result = webAppContextSetup(context) .build() .perform(get("http://localhost/sku/?price.min=1000&price.max=1150&attr.neck=maple&attr.colour=sonic blue") .contentType(HAL_XML).accept(HAL_XML)) .andExpect(status().isOk()) .andExpect(content().contentType(HAL_XML)) .andReturn(); // Verify result ReadableRepresentation skus = getContent(result); Assert.assertEquals(Integer.valueOf(1), Integer.valueOf(skus.getResourcesByRel("content").size())); } @Test @Transactional public void testFindOneJson() throws Exception { MvcResult result = webAppContextSetup(context) .build() .perform(get("http://localhost/sku/1") .contentType(HAL_JSON).accept(HAL_JSON)) .andExpect(status().isOk()) .andExpect(content().contentType(HAL_JSON)) .andReturn(); // Verify result ReadableRepresentation sku = getContent(result); Assert.assertEquals("1", sku.getValue("skuId").toString()); } @Test @Transactional public void testFindOneXml() throws Exception { MvcResult result = webAppContextSetup(context) .build() .perform(get("http://localhost/sku/1") .contentType(HAL_XML).accept(HAL_XML)) .andExpect(status().isOk()) .andExpect(content().contentType(HAL_XML)) .andReturn(); // Verify result ReadableRepresentation sku = getContent(result); Assert.assertEquals("1", sku.getValue("skuId").toString()); } /** * Extract a SKU from the response * @param result * @return * @throws IOException * @throws JsonParseException * @throws JsonMappingException */ private ReadableRepresentation getContent(MvcResult result) throws IOException, JsonParseException, JsonMappingException { return new DefaultRepresentationFactory().readRepresentation(new StringReader(result.getResponse().getContentAsString())); } }
{ "content_hash": "a27ee38e687ca56fa339c42b7e18bfee", "timestamp": "", "source": "github", "line_count": 130, "max_line_length": 124, "avg_line_length": 33.707692307692305, "alnum_prop": 0.7676859881332725, "repo_name": "patrickvankann/bjug-querydsl", "id": "b1c073fcf0937399ba9790a298347cb2237fde3b", "size": "4382", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/test/java/net/eusashead/bjugquerydsl/controller/SkuControllerTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "198183" } ], "symlink_target": "" }
<?php include('rjvheader.php'); include('rjvfooter.php'); ?> <html> <?php head(); ?> <script type='text/javascript'> jQuery.noConflict();//it Works :D harus semua file pake jquery var site = "<?php echo site_url();?>"; jQuery(function(){ jQuery('.auto_search_poli').autocomplete({ // serviceUrl berisi URL ke controller/fungsi yang menangani request kita serviceUrl: site+'/irj/rjcautocomplete/data_poli', // fungsi ini akan dijalankan ketika user memilih salah satu hasil request onSelect: function (suggestion) { $('#nm_poli').val(''+suggestion.nm_poli+' ('+suggestion.id_poli+')'); $('#id_poli').val(''+suggestion.id_poli); $('#kd_ruang').val(''+suggestion.kd_ruang); } }); }); </script> <body class="hold-transition skin-blue sidebar-mini"> <div class="wrapper"> <?php include('rjvnav.php'); ?> <div class="content-wrapper"> <div class="container-fluid"><br/> <div class="row"> <?php echo form_open('irj/rjcpelayanan/pasien_poli');?> <div class="col-lg-12"> <div class="input-group"> <input type="search" class="auto_search_poli form-control" name="nm_poli" id="nm_poli" placeholder="Cari Poli" required> <input type="hidden" class="form-control" name="id_poli" id="id_poli" required> <span class="input-group-btn"> <button class="btn btn-primary" type="submit">Cari</button> </span> </div><!-- /input-group --> </div><!-- /.col-lg-6 --> <?php echo form_close();?> </div><!-- /.row --><br/> <div style="display:block;overflow:auto;"> <table class="table table-hover table-striped"> <?php foreach($poliklinik as $row){ ?> <tr> <td> <a href="<?php echo site_url('irj/rjcpelayanan/kunj_pasien_poli/'.$row->id_poli)?>"> <?php echo $row->nm_poli.' ('.$row->id_poli.')'; if($row->counter>0){ echo '<span class="label label-danger pull-right">'.$row->counter.'</span>'; } ?> </a> </td> </tr> <?php } ?> </table> </div> </div> </div><!-- content-wrapper --> <?php foot(); ?> </div><!-- wrapper --> </body> </html>
{ "content_hash": "47237fdbb1a32bedd47e93c787c6cb4d", "timestamp": "", "source": "github", "line_count": 75, "max_line_length": 124, "avg_line_length": 29.786666666666665, "alnum_prop": 0.5608773500447628, "repo_name": "asepmulyadi011/SistemRumahSakit", "id": "b424d612d71690d05301ec8c78078ccdce5d5cee", "size": "2234", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "application/views/irj/rjvlistpoli.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "366" }, { "name": "CSS", "bytes": "144232" }, { "name": "HTML", "bytes": "8139111" }, { "name": "JavaScript", "bytes": "560449" }, { "name": "PHP", "bytes": "1852280" } ], "symlink_target": "" }
This list is sorted in an alphabetically order. * [Alexey Kuzmin](https://github.com/alexeykuzmin) * Grunt 0.4 support * Made `update_url` optional * [Bryan Ehrlich](https://github.com/adotout) * Windows compatibility * [Changwoo Park](https://github.com/pismute) * Various bugfixes * [Patrick Williams](https://github.com/pwmckenna) * Node 0.6 and 0.8 API deprecation cleanup
{ "content_hash": "943ee74523aa674c8d332ccac08d7c4f", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 50, "avg_line_length": 34.81818181818182, "alnum_prop": 0.7389033942558747, "repo_name": "graydon/stxt", "id": "3b97740bb10e24e6bfeea11b06fac41c2f1e2fbe", "size": "409", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "node_modules/grunt-crx/CONTRIBUTORS.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "9430" }, { "name": "JavaScript", "bytes": "229962" }, { "name": "Shell", "bytes": "187" } ], "symlink_target": "" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/> <meta http-equiv="X-UA-Compatible" content="IE=9"/> <meta name="generator" content="Doxygen 1.8.9.1"/> <title>V8 API Reference Guide for node.js v0.4.0: Member List</title> <link href="tabs.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="jquery.js"></script> <script type="text/javascript" src="dynsections.js"></script> <link href="search/search.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="search/searchdata.js"></script> <script type="text/javascript" src="search/search.js"></script> <script type="text/javascript"> $(document).ready(function() { init_search(); }); </script> <link href="doxygen.css" rel="stylesheet" type="text/css" /> </head> <body> <div id="top"><!-- do not remove this div, it is closed by doxygen! --> <div id="titlearea"> <table cellspacing="0" cellpadding="0"> <tbody> <tr style="height: 56px;"> <td style="padding-left: 0.5em;"> <div id="projectname">V8 API Reference Guide for node.js v0.4.0 </div> </td> </tr> </tbody> </table> </div> <!-- end header part --> <!-- Generated by Doxygen 1.8.9.1 --> <script type="text/javascript"> var searchBox = new SearchBox("searchBox", "search",false,'Search'); </script> <div id="navrow1" class="tabs"> <ul class="tablist"> <li><a href="index.html"><span>Main&#160;Page</span></a></li> <li><a href="namespaces.html"><span>Namespaces</span></a></li> <li class="current"><a href="annotated.html"><span>Classes</span></a></li> <li><a href="files.html"><span>Files</span></a></li> <li><a href="examples.html"><span>Examples</span></a></li> <li> <div id="MSearchBox" class="MSearchBoxInactive"> <span class="left"> <img id="MSearchSelect" src="search/mag_sel.png" onmouseover="return searchBox.OnSearchSelectShow()" onmouseout="return searchBox.OnSearchSelectHide()" alt=""/> <input type="text" id="MSearchField" value="Search" accesskey="S" onfocus="searchBox.OnSearchFieldFocus(true)" onblur="searchBox.OnSearchFieldFocus(false)" onkeyup="searchBox.OnSearchFieldChange(event)"/> </span><span class="right"> <a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a> </span> </div> </li> </ul> </div> <div id="navrow2" class="tabs2"> <ul class="tablist"> <li><a href="annotated.html"><span>Class&#160;List</span></a></li> <li><a href="classes.html"><span>Class&#160;Index</span></a></li> <li><a href="hierarchy.html"><span>Class&#160;Hierarchy</span></a></li> <li><a href="functions.html"><span>Class&#160;Members</span></a></li> </ul> </div> <!-- window showing the filter options --> <div id="MSearchSelectWindow" onmouseover="return searchBox.OnSearchSelectShow()" onmouseout="return searchBox.OnSearchSelectHide()" onkeydown="return searchBox.OnSearchSelectKey(event)"> </div> <!-- iframe showing the search results (closed by default) --> <div id="MSearchResultsWindow"> <iframe src="javascript:void(0)" frameborder="0" name="MSearchResults" id="MSearchResults"> </iframe> </div> <div id="nav-path" class="navpath"> <ul> <li class="navelem"><a class="el" href="namespacev8.html">v8</a></li><li class="navelem"><a class="el" href="classv8_1_1_cpu_profiler.html">CpuProfiler</a></li> </ul> </div> </div><!-- top --> <div class="header"> <div class="headertitle"> <div class="title">v8::CpuProfiler Member List</div> </div> </div><!--header--> <div class="contents"> <p>This is the complete list of members for <a class="el" href="classv8_1_1_cpu_profiler.html">v8::CpuProfiler</a>, including all inherited members.</p> <table class="directory"> <tr class="even"><td class="entry"><a class="el" href="classv8_1_1_cpu_profiler.html#a8165ab53a0d9669757e64efd80d4f5d0">FindProfile</a>(unsigned uid, Handle&lt; Value &gt; security_token=Handle&lt; Value &gt;())</td><td class="entry"><a class="el" href="classv8_1_1_cpu_profiler.html">v8::CpuProfiler</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr><td class="entry"><a class="el" href="classv8_1_1_cpu_profiler.html#aca7acdfb0f9a4e7744638dddf97d5ada">GetProfile</a>(int index, Handle&lt; Value &gt; security_token=Handle&lt; Value &gt;())</td><td class="entry"><a class="el" href="classv8_1_1_cpu_profiler.html">v8::CpuProfiler</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr class="even"><td class="entry"><a class="el" href="classv8_1_1_cpu_profiler.html#a44a4ffe2e95237dfeb892972d14d344e">GetProfilesCount</a>()</td><td class="entry"><a class="el" href="classv8_1_1_cpu_profiler.html">v8::CpuProfiler</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr><td class="entry"><a class="el" href="classv8_1_1_cpu_profiler.html#a04c07069ea985ee67a9bcd7a97ab17fc">StartProfiling</a>(Handle&lt; String &gt; title)</td><td class="entry"><a class="el" href="classv8_1_1_cpu_profiler.html">v8::CpuProfiler</a></td><td class="entry"><span class="mlabel">static</span></td></tr> <tr class="even"><td class="entry"><a class="el" href="classv8_1_1_cpu_profiler.html#ac2702be592e9218e6584f562a9ad7dd8">StopProfiling</a>(Handle&lt; String &gt; title, Handle&lt; Value &gt; security_token=Handle&lt; Value &gt;())</td><td class="entry"><a class="el" href="classv8_1_1_cpu_profiler.html">v8::CpuProfiler</a></td><td class="entry"><span class="mlabel">static</span></td></tr> </table></div><!-- contents --> <!-- start footer part --> <hr class="footer"/><address class="footer"><small> Generated on Tue Aug 11 2015 23:46:54 for V8 API Reference Guide for node.js v0.4.0 by &#160;<a href="http://www.doxygen.org/index.html"> <img class="footer" src="doxygen.png" alt="doxygen"/> </a> 1.8.9.1 </small></address> </body> </html>
{ "content_hash": "9d7f9d9ee4901e92585f4cd45ea08d76", "timestamp": "", "source": "github", "line_count": 111, "max_line_length": 391, "avg_line_length": 55.873873873873876, "alnum_prop": 0.6623669783940664, "repo_name": "v8-dox/v8-dox.github.io", "id": "902387fa2b0766fe16728771fe19a5ab8a28dbf8", "size": "6202", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "583f2e5/html/classv8_1_1_cpu_profiler-members.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (1.8.0_45) on Sat Apr 09 10:11:04 EDT 2016 --> <title>org.apache.cassandra.index.internal (apache-cassandra API)</title> <meta name="date" content="2016-04-09"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <h1 class="bar"><a href="../../../../../org/apache/cassandra/index/internal/package-summary.html" target="classFrame">org.apache.cassandra.index.internal</a></h1> <div class="indexContainer"> <h2 title="Interfaces">Interfaces</h2> <ul title="Interfaces"> <li><a href="CassandraIndexFunctions.html" title="interface in org.apache.cassandra.index.internal" target="classFrame"><span class="interfaceName">CassandraIndexFunctions</span></a></li> </ul> <h2 title="Classes">Classes</h2> <ul title="Classes"> <li><a href="CassandraIndex.html" title="class in org.apache.cassandra.index.internal" target="classFrame">CassandraIndex</a></li> <li><a href="CassandraIndexSearcher.html" title="class in org.apache.cassandra.index.internal" target="classFrame">CassandraIndexSearcher</a></li> <li><a href="CollatedViewIndexBuilder.html" title="class in org.apache.cassandra.index.internal" target="classFrame">CollatedViewIndexBuilder</a></li> <li><a href="IndexEntry.html" title="class in org.apache.cassandra.index.internal" target="classFrame">IndexEntry</a></li> </ul> </div> </body> </html>
{ "content_hash": "080e61f912eeb07a71ee34d1ca29acd7", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 187, "avg_line_length": 59, "alnum_prop": 0.7175141242937854, "repo_name": "jasonwee/videoOnCloud", "id": "10e28691d9a154dfc063d5418a43c5a3c8200a3f", "size": "1593", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ahkl/apache-cassandra-3.5/javadoc/org/apache/cassandra/index/internal/package-frame.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "116270" }, { "name": "C", "bytes": "2209717" }, { "name": "C++", "bytes": "375267" }, { "name": "CSS", "bytes": "1134648" }, { "name": "Dockerfile", "bytes": "1656" }, { "name": "HTML", "bytes": "306558398" }, { "name": "Java", "bytes": "1465506" }, { "name": "JavaScript", "bytes": "9028509" }, { "name": "Jupyter Notebook", "bytes": "30907" }, { "name": "Less", "bytes": "107003" }, { "name": "PHP", "bytes": "856" }, { "name": "PowerShell", "bytes": "77807" }, { "name": "Pug", "bytes": "2968" }, { "name": "Python", "bytes": "1001861" }, { "name": "R", "bytes": "7390" }, { "name": "Roff", "bytes": "3553" }, { "name": "Shell", "bytes": "206191" }, { "name": "Thrift", "bytes": "80564" }, { "name": "XSLT", "bytes": "4740" } ], "symlink_target": "" }
<?xml version="1.0" ?><!DOCTYPE TS><TS language="el_GR" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Cubits</source> <translation>Σχετικά με το Cubits</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Cubits&lt;/b&gt; version</source> <translation>Έκδοση Cubits</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.</source> <translation> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation>Πνευματική ιδιοκτησία </translation> </message> <message> <location line="+0"/> <source>Dr. Kimoto Chan</source> <translation>Dr. Kimoto Chan</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Βιβλίο Διευθύνσεων</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Διπλό-κλικ για επεξεργασία της διεύθυνσης ή της ετικέτας</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Δημιούργησε νέα διεύθυνση</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Αντέγραψε την επιλεγμένη διεύθυνση στο πρόχειρο του συστήματος</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Νέα διεύθυνση</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Cubits addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Αυτές είναι οι Cubits διευθύνσεις σας για να λαμβάνετε πληρωμές. Δίνοντας μία ξεχωριστή διεύθυνση σε κάθε αποστολέα, θα μπορείτε να ελέγχετε ποιος σας πληρώνει.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Αντιγραφή διεύθυνσης</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Δείξε &amp;QR κωδικα</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Cubits address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως σας ανήκει μια συγκεκριμένη διεύθυνση Cubits</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>&amp;Υπέγραψε το μήνυμα</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Αντιγραφη της επιλεγμενης διεύθυνσης στο πρόχειρο του συστηματος</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Εξαγωγή δεδομένων καρτέλας σε αρχείο</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation>&amp;Εξαγωγή</translation> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Cubits address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως ανήκει μια συγκεκριμένη διεύθυνση Cubits</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Διαγραφή</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Cubits addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Αυτές είναι οι Cubits διευθύνσεις σας για να λαμβάνετε πληρωμές. Δίνοντας μία ξεχωριστή διεύθυνση σε κάθε αποστολέα, θα μπορείτε να ελέγχετε ποιος σας πληρώνει.</translation> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Αντιγραφή &amp;επιγραφής</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Επεξεργασία</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation>Αποστολή νομισμάτων</translation> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Εξαγωγή Δεδομενων Βιβλίου Διευθύνσεων</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Αρχείο οριοθετημένο με κόμματα (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Εξαγωγή λαθών</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Αδυναμία εγγραφής στο αρχείο %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Ετικέτα</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Διεύθυνση</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(χωρίς ετικέτα)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Φράση πρόσβασης </translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Βάλτε κωδικό πρόσβασης</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Νέος κωδικός πρόσβασης</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Επανέλαβε τον νέο κωδικό πρόσβασης</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Εισάγετε τον νέο κωδικό πρόσβασης στον πορτοφόλι &lt;br/&gt; Παρακαλώ χρησιμοποιείστε ένα κωδικό με &lt;b&gt; 10 ή περισσότερους τυχαίους χαρακτήρες&lt;/b&gt; ή &lt;b&gt; οχτώ ή παραπάνω λέξεις&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Κρυπτογράφησε το πορτοφόλι</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Αυτη η ενεργεία χρειάζεται τον κωδικό του πορτοφολιού για να ξεκλειδώσει το πορτοφόλι.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Ξεκλειδωσε το πορτοφολι</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Αυτη η ενεργεια χρειάζεται τον κωδικο του πορτοφολιου για να αποκρυπτογραφησειι το πορτοφολι.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Αποκρυπτογράφησε το πορτοφολι</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Άλλαξε κωδικο πρόσβασης</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Εισάγετε τον παλιό και τον νεο κωδικο στο πορτοφολι.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Επιβεβαίωσε την κρυπτογραφηση του πορτοφολιού</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR BOUNTYCOINS&lt;/b&gt;!</source> <translation>Προσοχη: Εαν κρυπτογραφησεις το πορτοφολι σου και χάσεις τον κωδικο σου θα χάσεις &lt;b&gt; ΟΛΑ ΣΟΥ ΤΑ BOUNTYCOINS&lt;/b&gt;! Είσαι σίγουρος ότι θέλεις να κρυπτογραφησεις το πορτοφολι;</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Είστε σίγουροι ότι θέλετε να κρυπτογραφήσετε το πορτοφόλι σας;</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>ΣΗΜΑΝΤΙΚΟ: Τα προηγούμενα αντίγραφα ασφαλείας που έχετε κάνει από το αρχείο του πορτοφόλιου σας θα πρέπει να αντικατασταθουν με το νέο που δημιουργείται, κρυπτογραφημένο αρχείο πορτοφόλιου. Για λόγους ασφαλείας, τα προηγούμενα αντίγραφα ασφαλείας του μη κρυπτογραφημένου αρχείου πορτοφόλιου θα καταστουν άχρηστα μόλις αρχίσετε να χρησιμοποιείτε το νέο κρυπτογραφημένο πορτοφόλι. </translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Προσοχη: το πλήκτρο Caps Lock είναι ενεργο.</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Κρυπτογραφημενο πορτοφολι</translation> </message> <message> <location line="-56"/> <source>Cubits will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your Cubits from being stolen by malware infecting your computer.</source> <translation>Το Cubits θα κλεισει τώρα για να τελειώσει την διαδικασία κρυπτογραφησης. Θυμησου ότι κρυπτογραφώντας το πορτοφολι σου δεν μπορείς να προστατέψεις πλήρως τα Cubits σου από κλοπή στην περίπτωση όπου μολυνθεί ο υπολογιστής σου με κακόβουλο λογισμικο.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Η κρυπτογραφηση του πορτοφολιού απέτυχε</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Η κρυπτογράφηση του πορτοφολιού απέτυχε λογω εσωτερικού σφάλματος. Το πορτοφολι δεν κρυπτογραφηθηκε.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Οι εισαχθέντες κωδικοί δεν ταιριάζουν.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>το ξεκλείδωμα του πορτοφολιού απέτυχε</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Ο κωδικος που εισήχθη για την αποκρυπτογραφηση του πορτοφολιού ήταν λαθος.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Η αποκρυπτογραφηση του πορτοφολιού απέτυχε</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Ο κωδικος του πορτοφολιού άλλαξε με επιτυχία.</translation> </message> </context> <context> <name>CubitsGUI</name> <message> <location filename="../Cubitsgui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Υπογραφή &amp;Μηνύματος...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Συγχρονισμός με το δίκτυο...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Επισκόπηση</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Εμφάνισε γενική εικονα του πορτοφολιού</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Συναλλαγές</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Περιήγηση στο ιστορικο συνναλαγων</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Εξεργασια της λιστας των αποθηκευμενων διευθύνσεων και ετικετων</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Εμφάνισε την λίστα των διευθύνσεων για την παραλαβή πληρωμων</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>Έ&amp;ξοδος</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Εξοδος από την εφαρμογή</translation> </message> <message> <location line="+4"/> <source>Show information about Cubits</source> <translation>Εμφάνισε πληροφορίες σχετικά με το Cubits</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Σχετικά με &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Εμφάνισε πληροφορίες σχετικά με Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Επιλογές...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Κρυπτογράφησε το πορτοφόλι</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Αντίγραφο ασφαλείας του πορτοφολιού</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Άλλαξε κωδικο πρόσβασης</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>Εισαγωγή μπλοκ από τον σκληρο δίσκο ... </translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>Φόρτωση ευρετηρίου μπλοκ στον σκληρο δισκο...</translation> </message> <message> <location line="-347"/> <source>Send coins to a Cubits address</source> <translation>Στείλε νομισματα σε μια διεύθυνση Cubits</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Cubits</source> <translation>Επεργασία ρυθμισεων επιλογών για το Cubits</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Δημιουργία αντιγράφου ασφαλείας πορτοφολιού σε άλλη τοποθεσία</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Αλλαγή του κωδικού κρυπτογράφησης του πορτοφολιού</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Παράθυρο αποσφαλμάτωσης</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Άνοιγμα κονσόλας αποσφαλμάτωσης και διαγνωστικών</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>&amp;Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>Cubits</source> <translation>Cubits</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Πορτοφόλι</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation>&amp;Αποστολή</translation> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation>&amp;Παραλαβή </translation> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation>&amp;Διεύθυνσεις</translation> </message> <message> <location line="+22"/> <source>&amp;About Cubits</source> <translation>&amp;Σχετικα:Cubits</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Εμφάνισε/Κρύψε</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Εμφάνιση ή αποκρύψη του κεντρικου παράθυρου </translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation>Κρυπτογραφήστε τα ιδιωτικά κλειδιά που ανήκουν στο πορτοφόλι σας </translation> </message> <message> <location line="+7"/> <source>Sign messages with your Cubits addresses to prove you own them</source> <translation>Υπογράψτε ένα μήνυμα για να βεβαιώσετε πως είστε ο κάτοχος αυτής της διεύθυνσης</translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Cubits addresses</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως ανήκει μια συγκεκριμένη διεύθυνση Cubits</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Αρχείο</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Ρυθμίσεις</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Βοήθεια</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Εργαλειοθήκη καρτελών</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>Cubits client</source> <translation>Πελάτης Cubits</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Cubits network</source> <translation><numerusform>%n ενεργή σύνδεση στο δίκτυο Cubits</numerusform><numerusform>%n ενεργές συνδέσεις στο δίκτυο Βitcoin</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation>Η πηγή του μπλοκ δεν ειναι διαθέσιμη... </translation> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation>Μεταποιημένα %1 απο % 2 (κατ &apos;εκτίμηση) μπλοκ της ιστορίας της συναλλαγής. </translation> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Έγινε λήψη %1 μπλοκ ιστορικού συναλλαγών</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation><numerusform>%n ώρες </numerusform><numerusform>%n ώρες </numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n ημέρες </numerusform><numerusform>%n ημέρες </numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation><numerusform>%n εβδομαδες</numerusform><numerusform>%n εβδομαδες</numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation>%1 πίσω</translation> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation>Το τελευταίο μπλοκ που ελήφθη δημιουργήθηκε %1 πριν.</translation> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation>Οι συναλλαγές μετά από αυτό δεν θα είναι ακόμη ορατες.</translation> </message> <message> <location line="+22"/> <source>Error</source> <translation>Σφάλμα</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Προειδοποίηση</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Πληροφορία</translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Η συναλλαγή ξεπερνάει το όριο. Μπορεί να ολοκληρωθεί με μια αμοιβή των %1, η οποία αποδίδεται στους κόμβους που επεξεργάζονται τις συναλλαγές και βοηθούν στην υποστήριξη του δικτύου. Θέλετε να συνεχίσετε;</translation> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Ενημερωμένο</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Ενημέρωση...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Επιβεβαίωση αμοιβής συναλλαγής</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Η συναλλαγή απεστάλη</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Εισερχόμενη συναλλαγή</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Ημερομηνία: %1 Ποσό: %2 Τύπος: %3 Διεύθυνση: %4 </translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>Χειρισμός URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Cubits address or malformed URI parameters.</source> <translation>Το URI δεν μπορεί να αναλυθεί! Αυτό μπορεί να προκληθεί από μια μη έγκυρη διεύθυνση Cubits ή ακατάλληλη παραμέτρο URI.</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Το πορτοφόλι είναι &lt;b&gt;κρυπτογραφημένο&lt;/b&gt; και &lt;b&gt;ξεκλείδωτο&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Το πορτοφόλι είναι &lt;b&gt;κρυπτογραφημένο&lt;/b&gt; και &lt;b&gt;κλειδωμένο&lt;/b&gt;</translation> </message> <message> <location filename="../Cubits.cpp" line="+111"/> <source>A fatal error occurred. Cubits can no longer continue safely and will quit.</source> <translation>Παρουσιάστηκε ανεπανόρθωτο σφάλμα. Το Cubits δεν μπορεί πλέον να συνεχίσει με ασφάλεια και θα τερματισθει.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>Ειδοποίηση Δικτύου</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Επεξεργασία Διεύθυνσης</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Επιγραφή</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Η επιγραφή που σχετίζεται με αυτή την καταχώρηση του βιβλίου διευθύνσεων</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Διεύθυνση</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Η διεύθυνση που σχετίζεται με αυτή την καταχώρηση του βιβλίου διευθύνσεων. Μπορεί να τροποποιηθεί μόνο για τις διευθύνσεις αποστολής.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Νέα διεύθυνση λήψης</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Νέα διεύθυνση αποστολής</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Επεξεργασία διεύθυνσης λήψης</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Επεξεργασία διεύθυνσης αποστολής</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Η διεύθυνση &quot;%1&quot; βρίσκεται ήδη στο βιβλίο διευθύνσεων.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Cubits address.</source> <translation>Η διεύθυνση &quot;%1&quot; δεν είναι έγκυρη Cubits διεύθυνση.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Δεν είναι δυνατό το ξεκλείδωμα του πορτοφολιού.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Η δημιουργία νέου κλειδιού απέτυχε.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Cubits-Qt</source> <translation>Cubits-qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>έκδοση</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Χρήση:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>επιλογής γραμμής εντολών</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>επιλογές UI</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Όρισε γλώσσα, για παράδειγμα &quot;de_DE&quot;(προεπιλογή:τοπικές ρυθμίσεις)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Έναρξη ελαχιστοποιημένο</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Εμφάνισε την οθόνη εκκίνησης κατά την εκκίνηση(προεπιλογή:1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Ρυθμίσεις</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Κύριο</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation>Η προαιρετική αμοιβή για κάθε kB επισπεύδει την επεξεργασία των συναλλαγών σας. Οι περισσότερες συναλλαγές είναι 1 kB. </translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Αμοιβή &amp;συναλλαγής</translation> </message> <message> <location line="+31"/> <source>Automatically start Cubits after logging in to the system.</source> <translation>Αυτόματη εκκίνηση του Cubits μετά την εισαγωγή στο σύστημα</translation> </message> <message> <location line="+3"/> <source>&amp;Start Cubits on system login</source> <translation>&amp;Έναρξη του Βιtcoin κατά την εκκίνηση του συστήματος</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation>Επαναφορα όλων των επιλογων του πελάτη σε default.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>Επαναφορα ρυθμίσεων</translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>&amp;Δίκτυο</translation> </message> <message> <location line="+6"/> <source>Automatically open the Cubits client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Αυτόματο άνοιγμα των θυρών Cubits στον δρομολογητή. Λειτουργεί μόνο αν ο δρομολογητής σας υποστηρίζει τη λειτουργία UPnP.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Απόδοση θυρών με χρήστη &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Cubits network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Σύνδεση στο Cubits δίκτυο μέσω διαμεσολαβητή SOCKS4 (π.χ. για σύνδεση μέσω Tor)</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Σύνδεση μέσω διαμεσολαβητή SOCKS</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP διαμεσολαβητή:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Διεύθυνση IP του διαμεσολαβητή (π.χ. 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Θύρα:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Θύρα διαμεσολαβητή</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS &amp;Έκδοση:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>SOCKS εκδοση του διαμεσολαβητη (e.g. 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Παράθυρο</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Εμφάνιση μόνο εικονιδίου στην περιοχή ειδοποιήσεων κατά την ελαχιστοποίηση</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Ελαχιστοποίηση στην περιοχή ειδοποιήσεων αντί της γραμμής εργασιών</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Ελαχιστοποίηση αντί για έξοδο κατά το κλείσιμο του παραθύρου</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>Ε&amp;λαχιστοποίηση κατά το κλείσιμο</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>%Απεικόνιση</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Γλώσσα περιβάλλοντος εργασίας: </translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Cubits.</source> <translation>Εδώ μπορεί να ρυθμιστεί η γλώσσα διεπαφής χρήστη. Αυτή η ρύθμιση θα ισχύσει μετά την επανεκκίνηση του Cubits.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Μονάδα μέτρησης:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Διαλέξτε την προεπιλεγμένη υποδιαίρεση που θα εμφανίζεται όταν στέλνετε νομίσματα.</translation> </message> <message> <location line="+9"/> <source>Whether to show Cubits addresses in the transaction list or not.</source> <translation>Επιλέξτε αν θέλετε να εμφανίζονται οι διευθύνσεις Cubits στη λίστα συναλλαγών.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>Εμφάνιση διευθύνσεων στη λίστα συναλλαγών</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;ΟΚ</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Ακύρωση</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Εφαρμογή</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>προεπιλογή</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation>Επιβεβαιώση των επιλογων επαναφοράς </translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation>Για ορισμένες ρυθμίσεις πρεπει η επανεκκίνηση να τεθεί σε ισχύ.</translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation>Θέλετε να προχωρήσετε;</translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Προειδοποίηση</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Cubits.</source> <translation>Αυτή η ρύθμιση θα ισχύσει μετά την επανεκκίνηση του Cubits.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Φόρμα</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Cubits network after a connection is established, but this process has not completed yet.</source> <translation>Οι πληροφορίες που εμφανίζονται μπορεί να είναι ξεπερασμένες. Το πορτοφόλι σας συγχρονίζεται αυτόματα με το δίκτυο Cubits μετά από μια σύνδεση, αλλά αυτή η διαδικασία δεν έχει ακόμη ολοκληρωθεί. </translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Υπόλοιπο</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Ανεπιβεβαίωτες</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Πορτοφόλι</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation>Ανώριμος</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Εξορυγμενο υπόλοιπο που δεν έχει ακόμα ωριμάσει </translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Πρόσφατες συναλλαγές&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Το τρέχον υπόλοιπο</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Το άθροισμα των συναλλαγών που δεν έχουν ακόμα επιβεβαιωθεί και δεν προσμετρώνται στο τρέχον υπόλοιπό σας</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>εκτός συγχρονισμού</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start Cubits: click-to-pay handler</source> <translation>Δεν είναι δυνατή η εκκίνηση του Cubits: click-to-pay handler</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Κώδικας QR</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Αίτηση πληρωμής</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Ποσό:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Επιγραφή:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Μήνυμα:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Αποθήκευση ως...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Σφάλμα κατά την κωδικοποίηση του URI σε κώδικα QR</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>Το αναγραφόμενο ποσό δεν είναι έγκυρο, παρακαλούμε να το ελέγξετε.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Το αποτέλεσμα της διεύθυνσης είναι πολύ μεγάλο. Μειώστε το μέγεθος για το κείμενο της ετικέτας/ μηνύματος.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Αποθήκευση κώδικα QR</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>Εικόνες PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Όνομα Πελάτη</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>Μη διαθέσιμο</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Έκδοση Πελάτη</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Πληροφορία</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Χρησιμοποιηση της OpenSSL εκδοσης</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Χρόνος εκκίνησης</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Δίκτυο</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Αριθμός συνδέσεων</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>Στο testnet</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>αλυσίδα εμποδισμού</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Τρέχον αριθμός μπλοκ</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Κατ&apos; εκτίμηση συνολικά μπλοκς</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Χρόνος τελευταίου μπλοκ</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Άνοιγμα</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>επιλογής γραμμής εντολών</translation> </message> <message> <location line="+7"/> <source>Show the Cubits-Qt help message to get a list with possible Cubits command-line options.</source> <translation>Εμφανιση του Cubits-Qt μήνυματος βοήθειας για να πάρετε μια λίστα με τις πιθανές επιλογές Cubits γραμμής εντολών.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Εμφάνιση</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Κονσόλα</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Ημερομηνία κατασκευής</translation> </message> <message> <location line="-104"/> <source>Cubits - Debug window</source> <translation>Cubits - Παράθυρο αποσφαλμάτωσης</translation> </message> <message> <location line="+25"/> <source>Cubits Core</source> <translation>Cubits Core</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Αρχείο καταγραφής εντοπισμού σφαλμάτων </translation> </message> <message> <location line="+7"/> <source>Open the Cubits debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Ανοίξτε το αρχείο καταγραφής εντοπισμού σφαλμάτων από τον τρέχοντα κατάλογο δεδομένων. Αυτό μπορεί να πάρει μερικά δευτερόλεπτα για τα μεγάλα αρχεία καταγραφής. </translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Καθαρισμός κονσόλας</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Cubits RPC console.</source> <translation>Καλώς ήρθατε στην Cubits RPC κονσόλα.</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Χρησιμοποιήστε το πάνω και κάτω βέλος για να περιηγηθείτε στο ιστορικο, και &lt;b&gt;Ctrl-L&lt;/b&gt; για εκκαθαριση οθονης.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Γράψτε &lt;b&gt;βοήθεια&lt;/b&gt; για μια επισκόπηση των διαθέσιμων εντολών</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Αποστολή νομισμάτων</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Αποστολή σε πολλούς αποδέκτες ταυτόχρονα</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>&amp;Προσθήκη αποδέκτη</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Διαγραφή όλων των πεδίων συναλλαγής</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Καθαρισμός &amp;Όλων</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Υπόλοιπο:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123,456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Επιβεβαίωση αποστολής</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>Αποστολη</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; σε %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Επιβεβαίωση αποστολής νομισμάτων</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Είστε βέβαιοι για την αποστολή %1;</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>και</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>Η διεύθυνση του αποδέκτη δεν είναι σωστή. Παρακαλώ ελέγξτε ξανά.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Το ποσό πληρωμής πρέπει να είναι μεγαλύτερο από 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Το ποσό ξεπερνάει το διαθέσιμο υπόλοιπο</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Το σύνολο υπερβαίνει το υπόλοιπό σας όταν συμπεριληφθεί και η αμοιβή %1</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Βρέθηκε η ίδια διεύθυνση δύο φορές. Επιτρέπεται μία μόνο εγγραφή για κάθε διεύθυνση, σε κάθε διαδικασία αποστολής.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation>Σφάλμα: Η δημιουργία της συναλλαγής απέτυχε</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Σφάλμα: Η συναλλαγή απερρίφθη. Αυτό ενδέχεται να συμβαίνει αν κάποια από τα νομίσματα έχουν ήδη ξοδευθεί, όπως αν χρησιμοποιήσατε αντίγραφο του wallet.dat και τα νομίσματα ξοδεύθηκαν εκεί.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Φόρμα</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>&amp;Ποσό:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Πληρωμή &amp;σε:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Διεύθυνση αποστολής της πληρωμής (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Εισάγετε μια επιγραφή για αυτή τη διεύθυνση ώστε να καταχωρηθεί στο βιβλίο διευθύνσεων</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Επιγραφή</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Επιλογή διεύθυνσης από το βιβλίο διευθύνσεων</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Επικόλληση διεύθυνσης από το πρόχειρο</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Αφαίρεση αποδέκτη</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Cubits address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Εισάγετε μια διεύθυνση Cubits (π.χ. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Υπογραφές - Είσοδος / Επαλήθευση μήνυματος </translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Υπογραφή Μηνύματος</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Μπορείτε να υπογράφετε μηνύματα με τις διευθύνσεις σας, ώστε ν&apos; αποδεικνύετε πως αυτές σας ανήκουν. Αποφεύγετε να υπογράφετε κάτι αόριστο καθώς ενδέχεται να εξαπατηθείτε. Υπογράφετε μόνο πλήρης δηλώσεις με τις οποίες συμφωνείτε.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Εισάγετε μια διεύθυνση Cubits (π.χ. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Επιλογή διεύθυνσης από το βιβλίο διευθύνσεων</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Επικόλληση διεύθυνσης από το βιβλίο διευθύνσεων</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Εισάγετε εδώ το μήνυμα που θέλετε να υπογράψετε</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Υπογραφή</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Αντέγραφη της επιλεγμενης διεύθυνσης στο πρόχειρο του συστηματος</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Cubits address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως σας ανήκει μια συγκεκριμένη διεύθυνση Cubits</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Υπογραφη μήνυματος</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Επαναφορά όλων των πεδίων μήνυματος</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Καθαρισμός &amp;Όλων</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>&amp;Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Πληκτρολογήστε την υπογραφή διεύθυνσης, μήνυμα (βεβαιωθείτε ότι έχετε αντιγράψει τις αλλαγές γραμμής, κενά, tabs, κ.λπ. ακριβώς) και την υπογραφή παρακάτω, για να ελέγξει το μήνυμα. Να είστε προσεκτικοί για να μην διαβάσετε περισσότερα στην υπογραφή ό, τι είναι στην υπογραφή ίδιο το μήνυμα , για να μην εξαπατηθούν από έναν άνθρωπο -in - the-middle επίθεση.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Εισάγετε μια διεύθυνση Cubits (π.χ. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Cubits address</source> <translation>Υπογράψτε ένα μήνυμα για ν&apos; αποδείξετε πως υπογραφθηκε απο μια συγκεκριμένη διεύθυνση Cubits</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>Επιβεβαίωση μηνύματος</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Επαναφορά όλων επαλήθευμενων πεδίων μήνυματος </translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Cubits address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Εισάγετε μια διεύθυνση Cubits (π.χ. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Κάντε κλικ στο &quot;Υπογραφή Μηνύματος&quot; για να λάβετε την υπογραφή</translation> </message> <message> <location line="+3"/> <source>Enter Cubits signature</source> <translation>Εισαγωγή υπογραφής Cubits</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>Η διεύθυνση που εισήχθη είναι λάθος.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Παρακαλούμε ελέγξτε την διεύθυνση και δοκιμάστε ξανά.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>Η διεύθυνση που έχει εισαχθεί δεν αναφέρεται σε ένα πλήκτρο.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>το ξεκλείδωμα του πορτοφολιού απέτυχε</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>Το προσωπικό κλειδί εισαγμενης διευθυνσης δεν είναι διαθέσιμο.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Η υπογραφή του μηνύματος απέτυχε.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Μήνυμα υπεγράφη.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>Η υπογραφή δεν μπόρεσε να αποκρυπτογραφηθεί.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Παρακαλούμε ελέγξτε την υπογραφή και δοκιμάστε ξανά.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>Η υπογραφή δεν ταιριάζει με το μήνυμα. </translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Η επιβεβαίωση του μηνύματος απέτυχε</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Μήνυμα επιβεβαιώθηκε.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>Dr. Kimoto Chan</source> <translation>Dr. Kimoto Chan</translation> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Ανοιχτό μέχρι %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/χωρίς σύνδεση;</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/χωρίς επιβεβαίωση</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 επιβεβαιώσεις</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Κατάσταση</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, έχει μεταδοθεί μέσω %n κόμβων</numerusform><numerusform>, έχει μεταδοθεί μέσω %n κόμβων</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Ημερομηνία</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Πηγή</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Δημιουργία </translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Από</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>Προς</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation> δική σας διεύθυνση </translation> </message> <message> <location line="-2"/> <source>label</source> <translation>eπιγραφή</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Πίστωση </translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>ωρίμανση σε %n επιπλέον μπλοκ</numerusform><numerusform>ωρίμανση σε %n επιπλέον μπλοκ</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>μη αποδεκτό</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Debit</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Τέλος συναλλαγής </translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Καθαρό ποσό</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Μήνυμα</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Σχόλιο:</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID Συναλλαγής:</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 50 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Πρέπει να περιμένετε 120 μπλοκ πριν μπορέσετε να χρησιμοποιήσετε τα νομίσματα που έχετε δημιουργήσει. Το μπλοκ που δημιουργήσατε μεταδόθηκε στο δίκτυο για να συμπεριληφθεί στην αλυσίδα των μπλοκ. Αν δεν μπει σε αυτή θα μετατραπεί σε &quot;μη αποδεκτό&quot; και δε θα μπορεί να καταναλωθεί. Αυτό συμβαίνει σπάνια όταν κάποιος άλλος κόμβος δημιουργήσει ένα μπλοκ λίγα δευτερόλεπτα πριν από εσάς.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Πληροφορίες αποσφαλμάτωσης</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Συναλλαγή</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation>εισροές </translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Ποσό</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>αληθής</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>αναληθής </translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, δεν έχει ακόμα μεταδοθεί μ&apos; επιτυχία</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation><numerusform>Ανοιχτό για %n μπλοκ</numerusform><numerusform>Ανοιχτό για %n μπλοκ</numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>άγνωστο</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Λεπτομέρειες συναλλαγής</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Αυτό το παράθυρο δείχνει μια λεπτομερή περιγραφή της συναλλαγής</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Ημερομηνία</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Τύπος</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Διεύθυνση</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Ποσό</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation><numerusform>Ανοιχτό για %n μπλοκ</numerusform><numerusform>Ανοιχτό για %n μπλοκ</numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Ανοιχτό μέχρι %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Χωρίς σύνδεση (%1 επικυρώσεις)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Χωρίς επιβεβαίωση (%1 από %2 επικυρώσεις)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Επικυρωμένη (%1 επικυρώσεις)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation><numerusform>Το υπόλοιπο από την εξόρυξη θα είναι διαθέσιμο μετά από %n μπλοκ</numerusform><numerusform>Το υπόλοιπο από την εξόρυξη θα είναι διαθέσιμο μετά από %n μπλοκ</numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Αυτό το μπλοκ δεν έχει παραληφθεί από κανέναν άλλο κόμβο και κατά πάσα πιθανότητα θα απορριφθεί!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Δημιουργήθηκε αλλά απορρίφθηκε</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Παραλαβή με</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Ελήφθη από</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Αποστολή προς</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Πληρωμή προς εσάς</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Εξόρυξη</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(δ/α)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Κατάσταση συναλλαγής. Πηγαίνετε το ποντίκι πάνω από αυτό το πεδίο για να δείτε τον αριθμό των επικυρώσεων</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Ημερομηνία κι ώρα λήψης της συναλλαγής.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Είδος συναλλαγής.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Διεύθυνση αποστολής της συναλλαγής.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Ποσό που αφαιρέθηκε ή προστέθηκε στο υπόλοιπο.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Όλα</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Σήμερα</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Αυτή την εβδομάδα</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Αυτόν τον μήνα</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Τον προηγούμενο μήνα</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Αυτό το έτος</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Έκταση...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Ελήφθη με</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Απεστάλη προς</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Προς εσάς</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Εξόρυξη</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Άλλο</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Αναζήτηση με βάση τη διεύθυνση ή την επιγραφή</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Ελάχιστο ποσό</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Αντιγραφή διεύθυνσης</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Αντιγραφή επιγραφής</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Αντιγραφή ποσού</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Αντιγραφη του ID Συναλλαγής</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Επεξεργασία επιγραφής</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Εμφάνιση λεπτομερειών συναλλαγής</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Εξαγωγή Στοιχείων Συναλλαγών</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Αρχείο οριοθετημένο με κόμματα (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Επικυρωμένες</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Ημερομηνία</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Τύπος</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Επιγραφή</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Διεύθυνση</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Ποσό</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Σφάλμα εξαγωγής</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Αδυναμία εγγραφής στο αρχείο %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Έκταση:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>έως</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Αποστολή νομισμάτων</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation>&amp;Εξαγωγή</translation> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Εξαγωγή δεδομένων καρτέλας σε αρχείο</translation> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation>Αντίγραφο ασφαλείας του πορτοφολιού</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Αρχεία δεδομένων πορτοφολιού (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Αποτυχία κατά τη δημιουργία αντιγράφου</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Παρουσιάστηκε σφάλμα κατά την αποθήκευση των δεδομένων πορτοφολιού στη νέα τοποθεσία.</translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation>Η δημιουργια αντιγραφου ασφαλειας πετυχε</translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation>Τα δεδομένα πορτοφόλιου αποθηκεύτηκαν με επιτυχία στη νέα θέση. </translation> </message> </context> <context> <name>Cubits-core</name> <message> <location filename="../Cubitstrings.cpp" line="+94"/> <source>Cubits version</source> <translation>Έκδοση Cubits</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Χρήση:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or Cubitsd</source> <translation>Αποστολή εντολής στον εξυπηρετητή ή στο Cubitsd</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Λίστα εντολών</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Επεξήγηση εντολής</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Επιλογές:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: Cubits.conf)</source> <translation>Ορίστε αρχείο ρυθμίσεων (προεπιλογή: Cubits.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: Cubitsd.pid)</source> <translation>Ορίστε αρχείο pid (προεπιλογή: Cubitsd.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Ορισμός φακέλου δεδομένων</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Όρισε το μέγεθος της βάσης προσωρινής αποθήκευσης σε megabytes(προεπιλογή:25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 7951 or testnet: 17951)</source> <translation>Εισερχόμενες συνδέσεις στη θύρα &lt;port&gt; (προεπιλογή: 7951 ή στο testnet: 17951)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Μέγιστες αριθμός συνδέσεων με τους peers &lt;n&gt; (προεπιλογή: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Σύνδεση σε έναν κόμβο για την ανάκτηση διευθύνσεων από ομοτίμους, και αποσυνδέσh</translation> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation>Διευκρινίστε τη δικιά σας δημόσια διεύθυνση.</translation> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Όριο αποσύνδεσης προβληματικών peers (προεπιλογή: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Δευτερόλεπτα πριν επιτραπεί ξανά η σύνδεση των προβληματικών peers (προεπιλογή: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Ένα σφάλμα συνέβη καθώς προετοιμαζόταν η πόρτα RPC %u για αναμονή IPv4: %s</translation> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 7950 or testnet: 17950)</source> <translation>Εισερχόμενες συνδέσεις JSON-RPC στη θύρα &lt;port&gt; (προεπιλογή: 7950 or testnet: 17950)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Αποδοχή εντολών κονσόλας και JSON-RPC</translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Εκτέλεση στο παρασκήνιο κι αποδοχή εντολών</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Χρήση του δοκιμαστικού δικτύου</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Να δέχεσαι συνδέσεις από έξω(προεπιλογή:1)</translation> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=Cubitsrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Cubits Alert&quot; admin@foo.com </source> <translation>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=Cubitsrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Cubits Alert&quot; admin@foo.com </translation> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Ένα σφάλμα συνέβη καθώς προετοιμαζόταν η υποδοχη RPC %u για αναμονη του IPv6, επεσε πισω στο IPv4:%s</translation> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Αποθηκευση σε συγκεκριμένη διεύθυνση. Χρησιμοποιήστε τα πλήκτρα [Host] : συμβολισμός θύρα για IPv6</translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Cubits is probably already running.</source> <translation>Αδυναμία κλειδώματος του φακέλου δεδομένων %s. Πιθανώς το Cubits να είναι ήδη ενεργό.</translation> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Σφάλμα: Η συναλλαγή απορρίφθηκε. Αυτό ίσως οφείλεται στο ότι τα νομίσματά σας έχουν ήδη ξοδευτεί, π.χ. με την αντιγραφή του wallet.dat σε άλλο σύστημα και την χρήση τους εκεί, χωρίς η συναλλαγή να έχει καταγραφεί στο παρόν σύστημα.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Σφάλμα: Αυτή η συναλλαγή απαιτεί αμοιβή συναλλαγής τουλάχιστον %s λόγω του μεγέθους, πολυπλοκότητας ή της χρήσης πρόσφατης παραλαβής κεφαλαίου</translation> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation>Εκτέλεση της εντολής όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Εκτέλεσε την εντολή όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Ορίστε το μέγιστο μέγεθος των high-priority/low-fee συναλλαγων σε bytes (προεπιλογή: 27000)</translation> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Αυτό είναι ένα προ-τεστ κυκλοφορίας - χρησιμοποιήστε το με δική σας ευθύνη - δεν χρησιμοποιείτε για εξόρυξη ή για αλλες εφαρμογές</translation> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Προειδοποίηση: Η παράμετρος -paytxfee είναι πολύ υψηλή. Πρόκειται για την αμοιβή που θα πληρώνετε για κάθε συναλλαγή που θα στέλνετε.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Προειδοποίηση: Εμφανίσεις συναλλαγων δεν μπορεί να είναι σωστες! Μπορεί να χρειαστεί να αναβαθμίσετε, ή άλλοι κόμβοι μπορεί να χρειαστεί να αναβαθμίστουν. </translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Cubits will not work properly.</source> <translation>Προειδοποίηση: Παρακαλώ βεβαιωθείτε πως η ημερομηνία κι ώρα του συστήματός σας είναι σωστές. Αν το ρολόι του υπολογιστή σας πάει λάθος, ενδέχεται να μη λειτουργεί σωστά το Cubits.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Προειδοποίηση : Σφάλμα wallet.dat κατα την ανάγνωση ! Όλα τα κλειδιά αναγνωρισθηκαν σωστά, αλλά τα δεδομένα των συναλλαγών ή καταχωρήσεις στο βιβλίο διευθύνσεων μπορεί να είναι ελλιπείς ή λανθασμένα. </translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Προειδοποίηση : το αρχειο wallet.dat ειναι διεφθαρμένο, τα δεδομένα σώζονται ! Original wallet.dat αποθηκεύονται ως πορτοφόλι { timestamp } bak στο % s ? . . Αν το υπόλοιπο του ή τις συναλλαγές σας, είναι λάθος θα πρέπει να επαναφέρετε από ένα αντίγραφο ασφαλείας</translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Προσπάθεια για ανακτησει ιδιωτικων κλειδιων από ενα διεφθαρμένο αρχειο wallet.dat </translation> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation>Αποκλεισμός επιλογων δημιουργίας: </translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Σύνδεση μόνο με ορισμένους κόμβους</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation>Εντοπισθηκε διεφθαρμενη βαση δεδομενων των μπλοκ</translation> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Ανακαλύψτε την δικη σας IP διεύθυνση (προεπιλογή: 1 όταν ακούει και δεν - externalip) </translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation>Θελετε να δημιουργηθει τωρα η βαση δεδομενων του μπλοκ? </translation> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation>Σφάλμα κατά την ενεργοποίηση της βάσης δεδομένων μπλοκ</translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation>Σφάλμα κατά την ενεργοποίηση της βάσης δεδομένων πορτοφόλιου %s!</translation> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Σφάλμα φορτωσης της βασης δεδομενων των μπλοκ</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation>Σφάλμα φορτωσης της βασης δεδομενων των μπλοκ</translation> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Προειδοποίηση: Χαμηλός χώρος στο δίσκο </translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Σφάλμα: το πορτοφόλι είναι κλειδωμένο, δεν μπορεί να δημιουργηθεί συναλλαγή</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Λάθος: λάθος συστήματος:</translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>ταλαιπωρηθειτε για να ακούσετε σε οποιαδήποτε θύρα. Χρήση - ακούστε = 0 , αν θέλετε αυτό.</translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation>Αποτυχία αναγνωσης των block πληροφοριων</translation> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation>Η αναγνωση του μπλοκ απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation>Ο συγχρονισμος του μπλοκ ευρετηριου απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation>Η δημιουργια του μπλοκ ευρετηριου απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation>Η δημιουργια των μπλοκ πληροφοριων απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation>Η δημιουργια του μπλοκ απετυχε</translation> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation>Αδυναμία εγγραφής πληροφοριων αρχειου</translation> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation>Αποτυχία εγγραφής στη βάση δεδομένων νομίσματος</translation> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation>Αποτυχία εγγραφής δείκτη συναλλαγών </translation> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation>Αποτυχία εγγραφής αναίρεσης δεδομένων </translation> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>Βρες ομότιμους υπολογιστές χρησιμοποιώντας αναζήτηση DNS(προεπιλογή:1)</translation> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation>Δημιουργία νομισμάτων (προκαθορισμος: 0)</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Πόσα μπλοκ να ελέγχθουν κατά την εκκίνηση (προεπιλογή:288,0=όλα)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation>Πόσο εξονυχιστική να είναι η επιβεβαίωση του μπλοκ(0-4, προεπιλογή:3)</translation> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation>Δεν ειναι αρκετες περιγραφες αρχείων διαθέσιμες.</translation> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Εισαγωγή μπλοκ από εξωτερικό αρχείο blk000?.dat</translation> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation>Ορίσμος του αριθμόυ θεματων στην υπηρεσία κλήσεων RPC (προεπιλογή: 4) </translation> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation>Επαλήθευση των μπλοκ... </translation> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation>Επαλήθευση πορτοφολιου... </translation> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Εισαγωγή μπλοκ από εξωτερικό αρχείο blk000?.dat</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation>Ορίσμος του αριθμό των νημάτων ελέγχου σεναρίου (μέχρι 16, 0 = auto, &lt;0 = αφήνουν τους πολλους πυρήνες δωρεάν, default: 0)</translation> </message> <message> <location line="+77"/> <source>Information</source> <translation>Πληροφορία</translation> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation>Διατηρήση ένος πλήρες ευρετήριου συναλλαγών (προεπιλογή: 0) </translation> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Μέγιστος buffer λήψης ανά σύνδεση, &lt;n&gt;*1000 bytes (προεπιλογή: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Μέγιστος buffer αποστολής ανά σύνδεση, &lt;n&gt;*1000 bytes (προεπιλογή: 1000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Μονο αποδοχη αλυσίδας μπλοκ που ταιριάζει με τα ενσωματωμένα σημεία ελέγχου (προεπιλογή: 1) </translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation> Συνδέση μόνο σε κόμβους του δικτύου &lt;net&gt; (IPv4, IPv6 ή Tor) </translation> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Έξοδος επιπλέον πληροφοριών εντοπισμού σφαλμάτων</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Έξοδος επιπλέον πληροφοριών εντοπισμού σφαλμάτων</translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Χρονοσφραγίδα πληροφοριών εντοπισμού σφαλμάτων</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the Cubits Wiki for SSL setup instructions)</source> <translation>Ρυθμίσεις SSL: (ανατρέξτε στο Cubits Wiki για οδηγίες ρυθμίσεων SSL)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Επιλέξτε την έκδοση του διαμεσολαβητη για να χρησιμοποιήσετε (4-5 , προεπιλογή: 5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Αποστολή πληροφοριών εντοπισμού σφαλμάτων στην κονσόλα αντί του αρχείου debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Αποστολή πληροφοριών εντοπισμού σφαλμάτων στον debugger</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Ορίσμος του μέγιστου μέγεθος block σε bytes (προεπιλογή: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Ορίστε το μέγιστο μέγεθος block σε bytes (προεπιλογή: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Συρρίκνωση του αρχείο debug.log κατα την εκκίνηση του πελάτη (προεπιλογή: 1 όταν δεν-debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation>Η υπογραφή συναλλαγής απέτυχε </translation> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Ορισμός λήξης χρονικού ορίου σε χιλιοστά του δευτερολέπτου(προεπιλογή:5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation>Λάθος Συστήματος:</translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation>Το ποσό της συναλλαγής είναι πολύ μικρο </translation> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation>Τα ποσά των συναλλαγών πρέπει να είναι θετικα</translation> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation>Η συναλλαγή ειναι πολύ μεγάλη </translation> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Χρησιμοποίηση του UPnP για την χρήση της πόρτας αναμονής (προεπιλογή:0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Χρησιμοποίηση του UPnP για την χρήση της πόρτας αναμονής (προεπιλογή:1)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Χρήση διακομιστή μεσολάβησης για την επίτευξη των Tor κρυμμένων υπηρεσιων (προεπιλογή: ίδιο με το-proxy) </translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Όνομα χρήστη για τις συνδέσεις JSON-RPC</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation>Προειδοποίηση</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Προειδοποίηση: Αυτή η έκδοση είναι ξεπερασμένη, απαιτείται αναβάθμιση </translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation>Θα πρέπει να ξαναχτίστουν οι βάσεις δεδομένων που χρησιμοποιούντε-Αναδημιουργία αλλάγων-txindex </translation> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation>Το αρχειο wallet.dat ειναι διεφθαρμένο, η διάσωση απέτυχε</translation> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Κωδικός για τις συνδέσεις JSON-RPC</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Αποδοχή συνδέσεων JSON-RPC από συγκεκριμένη διεύθυνση IP</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Αποστολή εντολών στον κόμβο &lt;ip&gt; (προεπιλογή: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Εκτέλεσε την εντολή όταν το καλύτερο μπλοκ αλλάξει(%s στην εντολή αντικαθίσταται από το hash του μπλοκ)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Αναβάθμισε το πορτοφόλι στην τελευταία έκδοση</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Όριο πλήθους κλειδιών pool &lt;n&gt; (προεπιλογή: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Επανέλεγχος της αλυσίδας μπλοκ για απούσες συναλλαγές</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Χρήση του OpenSSL (https) για συνδέσεις JSON-RPC</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Αρχείο πιστοποιητικού του διακομιστή (προεπιλογή: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Προσωπικό κλειδί του διακομιστή (προεπιλογή: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Αποδεκτά κρυπτογραφήματα (προεπιλογή: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Αυτό το κείμενο βοήθειας</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Αδύνατη η σύνδεση με τη θύρα %s αυτού του υπολογιστή (bind returned error %d, %s) </translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Σύνδεση μέσω διαμεσολαβητή socks</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Να επιτρέπονται οι έλεγχοι DNS για προσθήκη και σύνδεση κόμβων</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Φόρτωση διευθύνσεων...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Σφάλμα φόρτωσης wallet.dat: Κατεστραμμένο Πορτοφόλι</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Cubits</source> <translation>Σφάλμα φόρτωσης wallet.dat: Το Πορτοφόλι απαιτεί μια νεότερη έκδοση του Cubits</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Cubits to complete</source> <translation>Απαιτείται η επανεγγραφή του Πορτοφολιού, η οποία θα ολοκληρωθεί στην επανεκκίνηση του Cubits</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Σφάλμα φόρτωσης αρχείου wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Δεν είναι έγκυρη η διεύθυνση διαμεσολαβητή: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Άγνωστo δίκτυο ορίζεται σε onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>Άγνωστo δίκτυο ορίζεται: %i</translation> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Μη έγκυρο ποσό για την παράμετρο -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Λάθος ποσότητα</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Ανεπαρκές κεφάλαιο</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Φόρτωση ευρετηρίου μπλοκ...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Προσέθεσε ένα κόμβο για σύνδεση και προσπάθησε να κρατήσεις την σύνδεση ανοιχτή</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Cubits is probably already running.</source> <translation>Αδύνατη η σύνδεση με τη θύρα %s αυτού του υπολογιστή. Το Cubits είναι πιθανώς ήδη ενεργό.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Αμοιβή ανά KB που θα προστίθεται στις συναλλαγές που στέλνεις</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Φόρτωση πορτοφολιού...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>Δεν μπορώ να υποβαθμίσω το πορτοφόλι</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Δεν μπορώ να γράψω την προεπιλεγμένη διεύθυνση</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Ανίχνευση...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Η φόρτωση ολοκληρώθηκε</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation>Χρήση της %s επιλογής</translation> </message> <message> <location line="-74"/> <source>Error</source> <translation>Σφάλμα</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Πρέπει να βάλεις ένα κωδικό στο αρχείο παραμέτρων: %s Εάν το αρχείο δεν υπάρχει, δημιούργησε το με δικαιώματα μόνο για ανάγνωση από τον δημιουργό</translation> </message> </context> </TS>
{ "content_hash": "7a8bb439f5d06e17ca7fafb0c3776f4a", "timestamp": "", "source": "github", "line_count": 2941, "max_line_length": 428, "avg_line_length": 40.48112886773206, "alnum_prop": 0.6392927638486414, "repo_name": "scificrypto/CubitsV3", "id": "ef62627717a7125bbebdc80a4207ab74da665487", "size": "137897", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/qt/locale/Cubits_el_GR.ts", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "7217" }, { "name": "C++", "bytes": "1571705" }, { "name": "Makefile", "bytes": "3687" }, { "name": "NSIS", "bytes": "5877" }, { "name": "Objective-C", "bytes": "747" }, { "name": "Objective-C++", "bytes": "2451" }, { "name": "Python", "bytes": "2879" }, { "name": "QMake", "bytes": "12277" }, { "name": "Shell", "bytes": "1144" } ], "symlink_target": "" }
package by.istin.android.xcore.processor; import android.content.Context; import by.istin.android.xcore.XCoreHelper.IAppServiceKey; import by.istin.android.xcore.source.DataSourceRequest; import by.istin.android.xcore.source.IDataSource; public interface IProcessor<Result, DataSourceResult> extends IAppServiceKey { public Result execute(DataSourceRequest dataSourceRequest, IDataSource<DataSourceResult> dataSource, DataSourceResult dataSourceResult) throws Exception; public void cache(Context context, DataSourceRequest dataSourceRequest, Result result) throws Exception; }
{ "content_hash": "4c2927ecd23550b253e8cbfca4792c0d", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 154, "avg_line_length": 39.2, "alnum_prop": 0.8452380952380952, "repo_name": "IstiN/android_xcore", "id": "431270fe8db52de7422dd6f4e238e377e48f8ed3", "size": "588", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "xcore-library/xcore/src/main/java/by/istin/android/xcore/processor/IProcessor.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Groovy", "bytes": "10742" }, { "name": "Java", "bytes": "1018102" } ], "symlink_target": "" }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package practica3.medicos; import java.util.concurrent.Semaphore; /** * * @author joseangel */ public class Practica3Medicos { /** * @param args the command line arguments */ public static void main(String[] args) { // TODO code application logic here int i; Semaphore sillas = new Semaphore(5, true); Semaphore boxes = new Semaphore(3, true); Semaphore medico = new Semaphore(0, true); Semaphore enfermo = new Semaphore(0, true); Semaphore pantalla = new Semaphore(1, true); for(i=0; i<20; i++) { (new Paciente(i, sillas, boxes, medico, enfermo, pantalla)).start(); } for (int j=0; j<2; j++){ (new Medico(j, medico, enfermo, pantalla)).start(); } } }
{ "content_hash": "4a148b68c274d543b587eb72b7611169", "timestamp": "", "source": "github", "line_count": 40, "max_line_length": 80, "avg_line_length": 25.525, "alnum_prop": 0.5837414299706171, "repo_name": "josegury/Concurrencia-en-Java", "id": "4c2b3ae21d0bdb81657adcf50509c61b242855ab", "size": "1021", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Semaforos/Practica3-Medicos/src/practica3/medicos/Practica3Medicos.java", "mode": "33261", "license": "mit", "language": [ { "name": "Java", "bytes": "42010" }, { "name": "Rust", "bytes": "415" } ], "symlink_target": "" }
<?php /* Safe sample input : backticks interpretation, reading the file /tmp/tainted.txt sanitize : check if there is only letters and/or numbers construction : use of sprintf via a %s with simple quote */ /*Copyright 2015 Bertrand STIVALET Permission is hereby granted, without written agreement or royalty fee, to use, copy, modify, and distribute this software and its documentation for any purpose, provided that the above copyright notice and the following three paragraphs appear in all copies of this software. IN NO EVENT SHALL AUTHORS BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF AUTHORS HAVE BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. AUTHORS SPECIFICALLY DISCLAIM ANY WARRANTIES INCLUDING, BUT NOT LIMITED TO THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. THE SOFTWARE IS PROVIDED ON AN "AS-IS" BASIS AND AUTHORS HAVE NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.*/ $tainted = `cat /tmp/tainted.txt`; $re = "/^[a-zA-Z0-9]*$/"; if(preg_match($re, $tainted) == 1){ $tainted = $tainted; } else{ $tainted = ""; } $var = http_redirect(sprintf("'%s'.php", $tainted)); ?>
{ "content_hash": "686cf1254f848f5646995ff828620290", "timestamp": "", "source": "github", "line_count": 60, "max_line_length": 75, "avg_line_length": 22.1, "alnum_prop": 0.7496229260935143, "repo_name": "stivalet/PHP-Vulnerability-test-suite", "id": "61ca2bd79446076bc2c9169a372ae336d4ce2478", "size": "1326", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "URF/CWE_601/safe/CWE_601__backticks__func_preg_match-letters_numbers__http_redirect_file_name-sprintf_%s_simple_quote.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "64184004" } ], "symlink_target": "" }
<?php defined('BASEPATH') OR exit('No direct script access allowed'); /* |-------------------------------------------------------------------------- | Base Site URL |-------------------------------------------------------------------------- | | URL to your CodeIgniter root. Typically this will be your base URL, | WITH a trailing slash: | | http://example.com/ | | WARNING: You MUST set this value! | | If it is not set, then CodeIgniter will try guess the protocol and path | your installation, but due to security concerns the hostname will be set | to $_SERVER['SERVER_ADDR'] if available, or localhost otherwise. | The auto-detection mechanism exists only for convenience during | development and MUST NOT be used in production! | | If you need to allow multiple domains, remember that this file is still | a PHP script and you can easily do that on your own. | */ $config['base_url'] = "http://".$_SERVER['HTTP_HOST']; /* |-------------------------------------------------------------------------- | Index File |-------------------------------------------------------------------------- | | Typically this will be your index.php file, unless you've renamed it to | something else. If you are using mod_rewrite to remove the page set this | variable so that it is blank. | */ $config['index_page'] = ''; /* |-------------------------------------------------------------------------- | URI PROTOCOL |-------------------------------------------------------------------------- | | This item determines which server global should be used to retrieve the | URI string. The default setting of 'REQUEST_URI' works for most servers. | If your links do not seem to work, try one of the other delicious flavors: | | 'REQUEST_URI' Uses $_SERVER['REQUEST_URI'] | 'QUERY_STRING' Uses $_SERVER['QUERY_STRING'] | 'PATH_INFO' Uses $_SERVER['PATH_INFO'] | | WARNING: If you set this to 'PATH_INFO', URIs will always be URL-decoded! */ $config['uri_protocol'] = 'REQUEST_URI'; /* |-------------------------------------------------------------------------- | URL suffix |-------------------------------------------------------------------------- | | This option allows you to add a suffix to all URLs generated by CodeIgniter. | For more information please see the user guide: | | https://codeigniter.com/user_guide/general/urls.html */ $config['url_suffix'] = ''; /* |-------------------------------------------------------------------------- | Default Language |-------------------------------------------------------------------------- | | This determines which set of language files should be used. Make sure | there is an available translation if you intend to use something other | than english. | */ $config['language'] = 'english'; /* |-------------------------------------------------------------------------- | Default Character Set |-------------------------------------------------------------------------- | | This determines which character set is used by default in various methods | that require a character set to be provided. | | See http://php.net/htmlspecialchars for a list of supported charsets. | */ $config['charset'] = 'UTF-8'; /* |-------------------------------------------------------------------------- | Enable/Disable System Hooks |-------------------------------------------------------------------------- | | If you would like to use the 'hooks' feature you must enable it by | setting this variable to TRUE (boolean). See the user guide for details. | */ $config['enable_hooks'] = FALSE; /* |-------------------------------------------------------------------------- | Class Extension Prefix |-------------------------------------------------------------------------- | | This item allows you to set the filename/classname prefix when extending | native libraries. For more information please see the user guide: | | https://codeigniter.com/user_guide/general/core_classes.html | https://codeigniter.com/user_guide/general/creating_libraries.html | */ $config['subclass_prefix'] = 'MY_'; /* |-------------------------------------------------------------------------- | Composer auto-loading |-------------------------------------------------------------------------- | | Enabling this setting will tell CodeIgniter to look for a Composer | package auto-loader script in application/vendor/autoload.php. | | $config['composer_autoload'] = TRUE; | | Or if you have your vendor/ directory located somewhere else, you | can opt to set a specific path as well: | | $config['composer_autoload'] = '/path/to/vendor/autoload.php'; | | For more information about Composer, please visit http://getcomposer.org/ | | Note: This will NOT disable or override the CodeIgniter-specific | autoloading (application/config/autoload.php) */ $config['composer_autoload'] = FALSE; /* |-------------------------------------------------------------------------- | Allowed URL Characters |-------------------------------------------------------------------------- | | This lets you specify which characters are permitted within your URLs. | When someone tries to submit a URL with disallowed characters they will | get a warning message. | | As a security measure you are STRONGLY encouraged to restrict URLs to | as few characters as possible. By default only these are allowed: a-z 0-9~%.:_- | | Leave blank to allow all characters -- but only if you are insane. | | The configured value is actually a regular expression character group | and it will be executed as: ! preg_match('/^[<permitted_uri_chars>]+$/i | | DO NOT CHANGE THIS UNLESS YOU FULLY UNDERSTAND THE REPERCUSSIONS!! | */ $config['permitted_uri_chars'] = 'a-z 0-9~%.:_\-'; /* |-------------------------------------------------------------------------- | Enable Query Strings |-------------------------------------------------------------------------- | | By default CodeIgniter uses search-engine friendly segment based URLs: | example.com/who/what/where/ | | By default CodeIgniter enables access to the $_GET array. If for some | reason you would like to disable it, set 'allow_get_array' to FALSE. | | You can optionally enable standard query string based URLs: | example.com?who=me&what=something&where=here | | Options are: TRUE or FALSE (boolean) | | The other items let you set the query string 'words' that will | invoke your controllers and its functions: | example.com/index.php?c=controller&m=function | | Please note that some of the helpers won't work as expected when | this feature is enabled, since CodeIgniter is designed primarily to | use segment based URLs. | */ $config['allow_get_array'] = TRUE; $config['enable_query_strings'] = FALSE; $config['controller_trigger'] = 'c'; $config['function_trigger'] = 'm'; $config['directory_trigger'] = 'd'; /* |-------------------------------------------------------------------------- | Error Logging Threshold |-------------------------------------------------------------------------- | | You can enable error logging by setting a threshold over zero. The | threshold determines what gets logged. Threshold options are: | | 0 = Disables logging, Error logging TURNED OFF | 1 = Error Messages (including PHP errors) | 2 = Debug Messages | 3 = Informational Messages | 4 = All Messages | | You can also pass an array with threshold levels to show individual error types | | array(2) = Debug Messages, without Error Messages | | For a live site you'll usually only enable Errors (1) to be logged otherwise | your log files will fill up very fast. | */ $config['log_threshold'] = 0; /* |-------------------------------------------------------------------------- | Error Logging Directory Path |-------------------------------------------------------------------------- | | Leave this BLANK unless you would like to set something other than the default | application/logs/ directory. Use a full server path with trailing slash. | */ $config['log_path'] = ''; /* |-------------------------------------------------------------------------- | Log File Extension |-------------------------------------------------------------------------- | | The default filename extension for log files. The default 'php' allows for | protecting the log files via basic scripting, when they are to be stored | under a publicly accessible directory. | | Note: Leaving it blank will default to 'php'. | */ $config['log_file_extension'] = ''; /* |-------------------------------------------------------------------------- | Log File Permissions |-------------------------------------------------------------------------- | | The file system permissions to be applied on newly created log files. | | IMPORTANT: This MUST be an integer (no quotes) and you MUST use octal | integer notation (i.e. 0700, 0644, etc.) */ $config['log_file_permissions'] = 0644; /* |-------------------------------------------------------------------------- | Date Format for Logs |-------------------------------------------------------------------------- | | Each item that is logged has an associated date. You can use PHP date | codes to set your own date formatting | */ $config['log_date_format'] = 'Y-m-d H:i:s'; /* |-------------------------------------------------------------------------- | Error Views Directory Path |-------------------------------------------------------------------------- | | Leave this BLANK unless you would like to set something other than the default | application/views/errors/ directory. Use a full server path with trailing slash. | */ $config['error_views_path'] = ''; /* |-------------------------------------------------------------------------- | Cache Directory Path |-------------------------------------------------------------------------- | | Leave this BLANK unless you would like to set something other than the default | application/cache/ directory. Use a full server path with trailing slash. | */ $config['cache_path'] = ''; /* |-------------------------------------------------------------------------- | Cache Include Query String |-------------------------------------------------------------------------- | | Whether to take the URL query string into consideration when generating | output cache files. Valid options are: | | FALSE = Disabled | TRUE = Enabled, take all query parameters into account. | Please be aware that this may result in numerous cache | files generated for the same page over and over again. | array('q') = Enabled, but only take into account the specified list | of query parameters. | */ $config['cache_query_string'] = FALSE; /* |-------------------------------------------------------------------------- | Encryption Key |-------------------------------------------------------------------------- | | If you use the Encryption class, you must set an encryption key. | See the user guide for more info. | | https://codeigniter.com/user_guide/libraries/encryption.html | */ $config['encryption_key'] = "(00|_3n(rYp+!0n_k3Y"; /* |-------------------------------------------------------------------------- | Session Variables |-------------------------------------------------------------------------- | | 'sess_driver' | | The storage driver to use: files, database, redis, memcached | | 'sess_cookie_name' | | The session cookie name, must contain only [0-9a-z_-] characters | | 'sess_expiration' | | The number of SECONDS you want the session to last. | Setting to 0 (zero) means expire when the browser is closed. | | 'sess_save_path' | | The location to save sessions to, driver dependent. | | For the 'files' driver, it's a path to a writable directory. | WARNING: Only absolute paths are supported! | | For the 'database' driver, it's a table name. | Please read up the manual for the format with other session drivers. | | IMPORTANT: You are REQUIRED to set a valid save path! | | 'sess_match_ip' | | Whether to match the user's IP address when reading the session data. | | WARNING: If you're using the database driver, don't forget to update | your session table's PRIMARY KEY when changing this setting. | | 'sess_time_to_update' | | How many seconds between CI regenerating the session ID. | | 'sess_regenerate_destroy' | | Whether to destroy session data associated with the old session ID | when auto-regenerating the session ID. When set to FALSE, the data | will be later deleted by the garbage collector. | | Other session cookie settings are shared with the rest of the application, | except for 'cookie_prefix' and 'cookie_httponly', which are ignored here. | */ $config['sess_driver'] = 'files'; $config['sess_cookie_name'] = 'ci_session'; $config['sess_expiration'] = 0; $config['sess_save_path'] = NULL; $config['sess_match_ip'] = FALSE; $config['sess_time_to_update'] = 300; $config['sess_regenerate_destroy'] = FALSE; /* |-------------------------------------------------------------------------- | Cookie Related Variables |-------------------------------------------------------------------------- | | 'cookie_prefix' = Set a cookie name prefix if you need to avoid collisions | 'cookie_domain' = Set to .your-domain.com for site-wide cookies | 'cookie_path' = Typically will be a forward slash | 'cookie_secure' = Cookie will only be set if a secure HTTPS connection exists. | 'cookie_httponly' = Cookie will only be accessible via HTTP(S) (no javascript) | | Note: These settings (with the exception of 'cookie_prefix' and | 'cookie_httponly') will also affect sessions. | */ $config['cookie_prefix'] = ''; $config['cookie_domain'] = ''; $config['cookie_path'] = '/'; $config['cookie_secure'] = FALSE; $config['cookie_httponly'] = FALSE; /* |-------------------------------------------------------------------------- | Standardize newlines |-------------------------------------------------------------------------- | | Determines whether to standardize newline characters in input data, | meaning to replace \r\n, \r, \n occurrences with the PHP_EOL value. | | This is particularly useful for portability between UNIX-based OSes, | (usually \n) and Windows (\r\n). | */ $config['standardize_newlines'] = FALSE; /* |-------------------------------------------------------------------------- | Global XSS Filtering |-------------------------------------------------------------------------- | | Determines whether the XSS filter is always active when GET, POST or | COOKIE data is encountered | | WARNING: This feature is DEPRECATED and currently available only | for backwards compatibility purposes! | */ $config['global_xss_filtering'] = TRUE; /* |-------------------------------------------------------------------------- | Cross Site Request Forgery |-------------------------------------------------------------------------- | Enables a CSRF cookie token to be set. When set to TRUE, token will be | checked on a submitted form. If you are accepting user data, it is strongly | recommended CSRF protection be enabled. | | 'csrf_token_name' = The token name | 'csrf_cookie_name' = The cookie name | 'csrf_expire' = The number in seconds the token should expire. | 'csrf_regenerate' = Regenerate token on every submission | 'csrf_exclude_uris' = Array of URIs which ignore CSRF checks */ $config['csrf_protection'] = FALSE; $config['csrf_token_name'] = 'csrf_test_name'; $config['csrf_cookie_name'] = 'csrf_cookie_name'; $config['csrf_expire'] = 7200; $config['csrf_regenerate'] = TRUE; $config['csrf_exclude_uris'] = array(); /* |-------------------------------------------------------------------------- | Output Compression |-------------------------------------------------------------------------- | | Enables Gzip output compression for faster page loads. When enabled, | the output class will test whether your server supports Gzip. | Even if it does, however, not all browsers support compression | so enable only if you are reasonably sure your visitors can handle it. | | Only used if zlib.output_compression is turned off in your php.ini. | Please do not use it together with httpd-level output compression. | | VERY IMPORTANT: If you are getting a blank page when compression is enabled it | means you are prematurely outputting something to your browser. It could | even be a line of whitespace at the end of one of your scripts. For | compression to work, nothing can be sent before the output buffer is called | by the output class. Do not 'echo' any values with compression enabled. | */ $config['compress_output'] = FALSE; /* |-------------------------------------------------------------------------- | Master Time Reference |-------------------------------------------------------------------------- | | Options are 'local' or any PHP supported timezone. This preference tells | the system whether to use your server's local time as the master 'now' | reference, or convert it to the configured one timezone. See the 'date | helper' page of the user guide for information regarding date handling. | */ $config['time_reference'] = 'local'; /* |-------------------------------------------------------------------------- | Rewrite PHP Short Tags |-------------------------------------------------------------------------- | | If your PHP installation does not have short tag support enabled CI | can rewrite the tags on-the-fly, enabling you to utilize that syntax | in your view files. Options are TRUE or FALSE (boolean) | | Note: You need to have eval() enabled for this to work. | */ $config['rewrite_short_tags'] = FALSE; /* |-------------------------------------------------------------------------- | Reverse Proxy IPs |-------------------------------------------------------------------------- | | If your server is behind a reverse proxy, you must whitelist the proxy | IP addresses from which CodeIgniter should trust headers such as | HTTP_X_FORWARDED_FOR and HTTP_CLIENT_IP in order to properly identify | the visitor's IP address. | | You can use both an array or a comma-separated list of proxy addresses, | as well as specifying whole subnets. Here are a few examples: | | Comma-separated: '10.0.1.200,192.168.5.0/24' | Array: array('10.0.1.200', '192.168.5.0/24') */ $config['proxy_ips'] = '';
{ "content_hash": "12e09a64ff101c2ba4813bf1b847e472", "timestamp": "", "source": "github", "line_count": 513, "max_line_length": 83, "avg_line_length": 35.40350877192982, "alnum_prop": 0.5453694527034467, "repo_name": "barbarybaer/LampBeltExam", "id": "e8654dbf1a660bfc7c123e6f9aa43d39742ad148", "size": "18162", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "application/config/config.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "1466" }, { "name": "CSS", "bytes": "14726" }, { "name": "HTML", "bytes": "8139111" }, { "name": "JavaScript", "bytes": "56182" }, { "name": "PHP", "bytes": "1747500" } ], "symlink_target": "" }
module JmPlayer module Resources VERSION = 10022014 end end
{ "content_hash": "3bd4dce0ad708ac286f92530c86fbcc3", "timestamp": "", "source": "github", "line_count": 5, "max_line_length": 22, "avg_line_length": 13.6, "alnum_prop": 0.7352941176470589, "repo_name": "gregory/jm_player", "id": "777c983963b83ae78f7785dae29f15313ba27825", "size": "68", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/jm_player/resources/version.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "16701" }, { "name": "Shell", "bytes": "226" } ], "symlink_target": "" }
-------------------------------- -- set the impulse that break the constraint -- @function [parent=#Physics3DConstraint] setBreakingImpulse -- @param self -- @param #float impulse -- @return Physics3DConstraint#Physics3DConstraint self (return value: cc.Physics3DConstraint) -------------------------------- -- get user data -- @function [parent=#Physics3DConstraint] getUserData -- @param self -- @return void#void ret (return value: void) -------------------------------- -- get the impulse that break the constraint -- @function [parent=#Physics3DConstraint] getBreakingImpulse -- @param self -- @return float#float ret (return value: float) -------------------------------- -- get rigid body a -- @function [parent=#Physics3DConstraint] getBodyA -- @param self -- @return Physics3DRigidBody#Physics3DRigidBody ret (return value: cc.Physics3DRigidBody) -------------------------------- -- is it enabled -- @function [parent=#Physics3DConstraint] isEnabled -- @param self -- @return bool#bool ret (return value: bool) -------------------------------- -- get override number of solver iterations -- @function [parent=#Physics3DConstraint] getOverrideNumSolverIterations -- @param self -- @return int#int ret (return value: int) -------------------------------- -- get rigid body b -- @function [parent=#Physics3DConstraint] getBodyB -- @param self -- @return Physics3DRigidBody#Physics3DRigidBody ret (return value: cc.Physics3DRigidBody) -------------------------------- -- override the number of constraint solver iterations used to solve this constraint, -1 will use the default number of iterations, as specified in SolverInfo.m_numIterations -- @function [parent=#Physics3DConstraint] setOverrideNumSolverIterations -- @param self -- @param #int overrideNumIterations -- @return Physics3DConstraint#Physics3DConstraint self (return value: cc.Physics3DConstraint) -------------------------------- -- get constraint type -- @function [parent=#Physics3DConstraint] getConstraintType -- @param self -- @return int#int ret (return value: int) -------------------------------- -- get user data -- @function [parent=#Physics3DConstraint] setUserData -- @param self -- @param #void userData -- @return Physics3DConstraint#Physics3DConstraint self (return value: cc.Physics3DConstraint) -------------------------------- -- -- @function [parent=#Physics3DConstraint] getbtContraint -- @param self -- @return btTypedConstraint#btTypedConstraint ret (return value: btTypedConstraint) return nil
{ "content_hash": "8dcd0c59d66c222dd26992f147311981", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 174, "avg_line_length": 36.74647887323944, "alnum_prop": 0.6331927941740131, "repo_name": "oregamikiller/cocos2dPod", "id": "379d08b92a7bb787254b2bb06264de5ddd041639", "size": "2954", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "cocos/scripting/lua-bindings/auto/api/Physics3DConstraint.lua", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1361424" }, { "name": "C++", "bytes": "27103220" }, { "name": "CMake", "bytes": "39237" }, { "name": "GLSL", "bytes": "64406" }, { "name": "Java", "bytes": "324989" }, { "name": "JavaScript", "bytes": "2735830" }, { "name": "Lua", "bytes": "1899745" }, { "name": "Makefile", "bytes": "43351" }, { "name": "Objective-C", "bytes": "420064" }, { "name": "Objective-C++", "bytes": "351566" }, { "name": "Ruby", "bytes": "1730" } ], "symlink_target": "" }
package org.apache.geode.cache; import static org.apache.geode.internal.cache.tier.sockets.CacheClientNotifier.getInstance; import static org.apache.geode.test.dunit.Assert.assertEquals; import static org.apache.geode.test.dunit.Assert.assertFalse; import static org.apache.geode.test.dunit.Assert.assertNotNull; import static org.apache.geode.test.dunit.Assert.assertNull; import static org.apache.geode.test.dunit.Assert.assertTrue; import static org.apache.geode.test.dunit.Assert.fail; import static org.junit.runners.MethodSorters.NAME_ASCENDING; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import org.junit.FixMethodOrder; import org.junit.Ignore; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.CancelException; import org.apache.geode.DataSerializable; import org.apache.geode.LogWriter; import org.apache.geode.cache.client.NoAvailableServersException; import org.apache.geode.cache.client.Pool; import org.apache.geode.cache.client.PoolManager; import org.apache.geode.cache.client.internal.Endpoint; import org.apache.geode.cache.client.internal.PoolImpl; import org.apache.geode.cache.server.CacheServer; import org.apache.geode.cache.util.CacheListenerAdapter; import org.apache.geode.cache30.CacheSerializableRunnable; import org.apache.geode.cache30.CertifiableTestCacheListener; import org.apache.geode.cache30.ClientServerTestCase; import org.apache.geode.cache30.TestCacheLoader; import org.apache.geode.cache30.TestCacheWriter; import org.apache.geode.distributed.DistributedMember; import org.apache.geode.distributed.internal.ClusterDistributionManager; import org.apache.geode.distributed.internal.InternalDistributedSystem; import org.apache.geode.internal.Assert; import org.apache.geode.internal.cache.CacheServerImpl; import org.apache.geode.internal.cache.EntryExpiryTask; import org.apache.geode.internal.cache.LocalRegion; import org.apache.geode.internal.cache.PoolStats; import org.apache.geode.internal.cache.tier.sockets.CacheClientNotifier; import org.apache.geode.internal.cache.tier.sockets.CacheClientNotifierStats; import org.apache.geode.internal.cache.tier.sockets.ClientProxyMembershipID; import org.apache.geode.internal.logging.InternalLogWriter; import org.apache.geode.internal.logging.LocalLogWriter; import org.apache.geode.test.awaitility.GeodeAwaitility; import org.apache.geode.test.dunit.AsyncInvocation; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.Invoke; import org.apache.geode.test.dunit.NetworkUtils; import org.apache.geode.test.dunit.SerializableRunnable; import org.apache.geode.test.dunit.ThreadUtils; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.Wait; import org.apache.geode.test.dunit.WaitCriterion; import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase; import org.apache.geode.test.junit.categories.ClientServerTest; /** * This class tests the client connection pool in GemFire. It does so by creating a cache server * with a cache and a pre-defined region and a data loader. The client creates the same region with * a pool (this happens in the controller VM). the client then spins up 10 different threads and * issues gets on keys. The server data loader returns the data to the client. * * Test uses Groboutils TestRunnable objects to achieve multi threading behavior in the test. */ @Category({ClientServerTest.class}) @FixMethodOrder(NAME_ASCENDING) public class ConnectionPoolDUnitTest extends JUnit4CacheTestCase { /** The port on which the cache server was started in this VM */ private static int bridgeServerPort; protected static int port = 0; protected static int port2 = 0; protected static int numberOfAfterInvalidates; protected static int numberOfAfterCreates; protected static int numberOfAfterUpdates; protected static final int TYPE_CREATE = 0; protected static final int TYPE_UPDATE = 1; protected static final int TYPE_INVALIDATE = 2; protected static final int TYPE_DESTROY = 3; @Override public final void postSetUp() throws Exception { // avoid IllegalStateException from HandShake by connecting all vms to // system before creating pool getSystem(); Invoke.invokeInEveryVM(new SerializableRunnable("getSystem") { public void run() { getSystem(); } }); postSetUpConnectionPoolDUnitTest(); } protected void postSetUpConnectionPoolDUnitTest() throws Exception {} @Override public final void postTearDownCacheTestCase() throws Exception { Invoke.invokeInEveryVM(new SerializableRunnable() { public void run() { Map pools = PoolManager.getAll(); if (!pools.isEmpty()) { org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .warning("found pools remaining after teardown: " + pools); assertEquals(0, pools.size()); } } }); postTearDownConnectionPoolDUnitTest(); } protected void postTearDownConnectionPoolDUnitTest() throws Exception {} protected/* GemStoneAddition */ static PoolImpl getPool(Region r) { PoolImpl result = null; String poolName = r.getAttributes().getPoolName(); if (poolName != null) { result = (PoolImpl) PoolManager.find(poolName); } return result; } protected static TestCacheWriter getTestWriter(Region r) { return (TestCacheWriter) r.getAttributes().getCacheWriter(); } /** * Create a cache server on the given port without starting it. * * @since GemFire 5.0.2 */ protected void createBridgeServer(int port) throws IOException { CacheServer bridge = getCache().addCacheServer(); bridge.setPort(port); bridge.setMaxThreads(getMaxThreads()); bridgeServerPort = bridge.getPort(); } /** * Starts a cache server on the given port, using the given deserializeValues and * notifyBySubscription to serve up the given region. * * @since GemFire 4.0 */ protected void startBridgeServer(int port) throws IOException { startBridgeServer(port, -1); } protected void startBridgeServer(int port, int socketBufferSize) throws IOException { startBridgeServer(port, socketBufferSize, CacheServer.DEFAULT_LOAD_POLL_INTERVAL); } protected void startBridgeServer(int port, int socketBufferSize, long loadPollInterval) throws IOException { Cache cache = getCache(); CacheServer bridge = cache.addCacheServer(); bridge.setPort(port); if (socketBufferSize != -1) { bridge.setSocketBufferSize(socketBufferSize); } bridge.setMaxThreads(getMaxThreads()); bridge.setLoadPollInterval(loadPollInterval); bridge.start(); bridgeServerPort = bridge.getPort(); } /** * By default return 0 which turns off selector and gives thread per cnx. Test subclasses can * override to run with selector. * * @since GemFire 5.1 */ protected int getMaxThreads() { return 0; } /** * Stops the cache server that serves up the given cache. * * @since GemFire 4.0 */ void stopBridgeServer(Cache cache) { CacheServer bridge = cache.getCacheServers().iterator().next(); bridge.stop(); assertFalse(bridge.isRunning()); } void stopBridgeServers(Cache cache) { CacheServer bridge = null; for (Iterator bsI = cache.getCacheServers().iterator(); bsI.hasNext();) { bridge = (CacheServer) bsI.next(); bridge.stop(); assertFalse(bridge.isRunning()); } } private void restartBridgeServers(Cache cache) throws IOException { CacheServer bridge = null; for (Iterator bsI = cache.getCacheServers().iterator(); bsI.hasNext();) { bridge = (CacheServer) bsI.next(); bridge.start(); assertTrue(bridge.isRunning()); } } protected InternalDistributedSystem createLonerDS() { disconnectFromDS(); InternalDistributedSystem ds = getLonerSystem(); assertEquals(0, ds.getDistributionManager().getOtherDistributionManagerIds().size()); return ds; } /** * Returns region attributes for a <code>LOCAL</code> region */ protected RegionAttributes getRegionAttributes() { AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // test validation expects this behavior return factory.create(); } private static String createBridgeClientConnection(String host, int[] ports) { StringBuffer sb = new StringBuffer(); for (int i = 0; i < ports.length; i++) { if (i > 0) sb.append(","); sb.append("name" + i + "="); sb.append(host + ":" + ports[i]); } return sb.toString(); } private class EventWrapper { public final EntryEvent event; public final Object key; public final Object val; public final Object arg; public final int type; public EventWrapper(EntryEvent ee, int type) { this.event = ee; this.key = ee.getKey(); this.val = ee.getNewValue(); this.arg = ee.getCallbackArgument(); this.type = type; } public String toString() { return "EventWrapper: event=" + event + ", type=" + type; } } protected class ControlListener extends CacheListenerAdapter { public final LinkedList events = new LinkedList(); public final Object CONTROL_LOCK = new Object(); public boolean waitWhileNotEnoughEvents(long sleepMs, int eventCount) { long maxMillis = System.currentTimeMillis() + sleepMs; synchronized (this.CONTROL_LOCK) { try { while (this.events.size() < eventCount) { long waitMillis = maxMillis - System.currentTimeMillis(); if (waitMillis < 10) { break; } this.CONTROL_LOCK.wait(waitMillis); } } catch (InterruptedException abort) { fail("interrupted"); } return !this.events.isEmpty(); } } public void afterCreate(EntryEvent e) { // System.out.println("afterCreate: " + e); synchronized (this.CONTROL_LOCK) { this.events.add(new EventWrapper(e, TYPE_CREATE)); this.CONTROL_LOCK.notifyAll(); } } public void afterUpdate(EntryEvent e) { // System.out.println("afterUpdate: " + e); synchronized (this.CONTROL_LOCK) { this.events.add(new EventWrapper(e, TYPE_UPDATE)); this.CONTROL_LOCK.notifyAll(); } } public void afterInvalidate(EntryEvent e) { // System.out.println("afterInvalidate: " + e); synchronized (this.CONTROL_LOCK) { this.events.add(new EventWrapper(e, TYPE_INVALIDATE)); this.CONTROL_LOCK.notifyAll(); } } public void afterDestroy(EntryEvent e) { // System.out.println("afterDestroy: " + e); synchronized (this.CONTROL_LOCK) { this.events.add(new EventWrapper(e, TYPE_DESTROY)); this.CONTROL_LOCK.notifyAll(); } } } /** * Create a fake EntryEvent that returns the provided region for {@link CacheEvent#getRegion()} * and returns {@link org.apache.geode.cache.Operation#LOCAL_LOAD_CREATE} for * {@link CacheEvent#getOperation()} * * @return fake entry event */ protected static EntryEvent createFakeyEntryEvent(final Region r) { return new EntryEvent() { public Operation getOperation() { return Operation.LOCAL_LOAD_CREATE; // fake out pool to exit early } public Region getRegion() { return r; } public Object getKey() { return null; } public Object getOldValue() { return null; } public boolean isOldValueAvailable() { return true; } public Object getNewValue() { return null; } public boolean isLocalLoad() { return false; } public boolean isNetLoad() { return false; } public boolean isLoad() { return true; } public boolean isNetSearch() { return false; } public TransactionId getTransactionId() { return null; } public Object getCallbackArgument() { return null; } public boolean isCallbackArgumentAvailable() { return true; } public boolean isOriginRemote() { return false; } public DistributedMember getDistributedMember() { return null; } public boolean isExpiration() { return false; } public boolean isDistributed() { return false; } public boolean isBridgeEvent() { return hasClientOrigin(); } public boolean hasClientOrigin() { return false; } public ClientProxyMembershipID getContext() { return null; } public SerializedCacheValue getSerializedOldValue() { return null; } public SerializedCacheValue getSerializedNewValue() { return null; } }; } public void verifyBalanced(final PoolImpl pool, int expectedServer, final int expectedConsPerServer) { verifyServerCount(pool, expectedServer); WaitCriterion ev = new WaitCriterion() { public boolean done() { return balanced(pool, expectedConsPerServer); } public String description() { return "expected " + expectedConsPerServer + " but endpoints=" + outOfBalanceReport(pool); } }; GeodeAwaitility.await().untilAsserted(ev); assertEquals("expected " + expectedConsPerServer + " but endpoints=" + outOfBalanceReport(pool), true, balanced(pool, expectedConsPerServer)); } protected boolean balanced(PoolImpl pool, int expectedConsPerServer) { Iterator it = pool.getEndpointMap().values().iterator(); while (it.hasNext()) { Endpoint ep = (Endpoint) it.next(); if (ep.getStats().getConnections() != expectedConsPerServer) { return false; } } return true; } protected String outOfBalanceReport(PoolImpl pool) { StringBuffer result = new StringBuffer(); Iterator it = pool.getEndpointMap().values().iterator(); result.append("<"); while (it.hasNext()) { Endpoint ep = (Endpoint) it.next(); result.append("ep=" + ep); result.append(" conCount=" + ep.getStats().getConnections()); if (it.hasNext()) { result.append(", "); } } result.append(">"); return result.toString(); } public void waitForDenylistToClear(final PoolImpl pool) { WaitCriterion ev = new WaitCriterion() { public boolean done() { return pool.getDenylistedServers().size() == 0; } public String description() { return null; } }; GeodeAwaitility.await().untilAsserted(ev); assertEquals("unexpected denylistedServers=" + pool.getDenylistedServers(), 0, pool.getDenylistedServers().size()); } public void verifyServerCount(final PoolImpl pool, final int expectedCount) { getCache().getLogger().info("verifyServerCount expects=" + expectedCount); WaitCriterion ev = new WaitCriterion() { String excuse; public boolean done() { int actual = pool.getConnectedServerCount(); if (actual == expectedCount) { return true; } excuse = "Found only " + actual + " servers, expected " + expectedCount; return false; } public String description() { return excuse; } }; GeodeAwaitility.await().untilAsserted(ev); } /** * Tests that the callback argument is sent to the server */ @Test public void test001CallbackArg() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); final Object createCallbackArg = "CREATE CALLBACK ARG"; final Object updateCallbackArg = "PUT CALLBACK ARG"; vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheWriter cw = new TestCacheWriter() { public final void beforeUpdate2(EntryEvent event) throws CacheWriterException { Object beca = event.getCallbackArgument(); assertEquals(updateCallbackArg, beca); } public void beforeCreate2(EntryEvent event) throws CacheWriterException { Object beca = event.getCallbackArgument(); assertEquals(createCallbackArg, beca); } }; AttributesFactory factory = getBridgeServerRegionAttributes(null, cw); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, NetworkUtils.getServerHostName(host), port, -1, true, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm1.invoke(new CacheSerializableRunnable("Add entries") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.create(new Integer(i), "old" + i, createCallbackArg); } for (int i = 0; i < 10; i++) { region.put(new Integer(i), "new" + i, updateCallbackArg); } } }); vm0.invoke(new CacheSerializableRunnable("Check cache writer") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); TestCacheWriter writer = getTestWriter(region); assertTrue(writer.wasInvoked()); } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests that consecutive puts have the callback assigned appropriately. */ @Test public void test002CallbackArg2() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); final Object createCallbackArg = "CREATE CALLBACK ARG"; // final Object updateCallbackArg = "PUT CALLBACK ARG"; vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheWriter cw = new TestCacheWriter() { public void beforeCreate2(EntryEvent event) throws CacheWriterException { Integer key = (Integer) event.getKey(); if (key.intValue() % 2 == 0) { Object beca = event.getCallbackArgument(); assertEquals(createCallbackArg, beca); } else { Object beca = event.getCallbackArgument(); assertNull(beca); } } }; AttributesFactory factory = getBridgeServerRegionAttributes(null, cw); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm1.invoke(new CacheSerializableRunnable("Add entries") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { if (i % 2 == 0) { region.create(new Integer(i), "old" + i, createCallbackArg); } else { region.create(new Integer(i), "old" + i); } } } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm0.invoke(new CacheSerializableRunnable("Check cache writer") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); TestCacheWriter writer = getTestWriter(region); assertTrue(writer.wasInvoked()); } }); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests for bug 36684 by having two cache servers with cacheloaders that should always return a * value and one client connected to each server reading values. If the bug exists, the clients * will get null sometimes. * */ @Test public void test003Bug36684() throws CacheException, InterruptedException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); VM vm3 = host.getVM(3); // Create the cache servers with distributed, mirrored region SerializableRunnable createServer = new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheLoader cl = new CacheLoader() { public Object load(LoaderHelper helper) { return helper.getKey(); } public void close() { } }; AttributesFactory factory = getBridgeServerMirroredAckRegionAttributes(cl, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }; getSystem().getLogWriter().info("before create server"); vm0.invoke(createServer); vm1.invoke(createServer); // Create cache server clients final int numberOfKeys = 1000; final String host0 = NetworkUtils.getServerHostName(host); final int vm0Port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final int vm1Port = vm1.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); SerializableRunnable createClient = new CacheSerializableRunnable("Create Cache Server Client") { public void run2() throws CacheException { // reset all static listener variables in case this is being rerun in a subclass numberOfAfterInvalidates = 0; numberOfAfterCreates = 0; numberOfAfterUpdates = 0; // create the region getLonerSystem(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // test validation expects this behavior // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, vm0Port, vm1Port, true, -1, -1, null); createRegion(name, factory.create()); } }; getSystem().getLogWriter().info("before create client"); vm2.invoke(createClient); vm3.invoke(createClient); // Initialize each client with entries (so that afterInvalidate is called) SerializableRunnable initializeClient = new CacheSerializableRunnable("Initialize Client") { public void run2() throws CacheException { // StringBuffer errors = new StringBuffer(); numberOfAfterInvalidates = 0; numberOfAfterCreates = 0; numberOfAfterUpdates = 0; LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); for (int i = 0; i < numberOfKeys; i++) { String expected = "key-" + i; String actual = (String) region.get("key-" + i); assertEquals(expected, actual); } } }; getSystem().getLogWriter().info("before initialize client"); AsyncInvocation inv2 = vm2.invokeAsync(initializeClient); AsyncInvocation inv3 = vm3.invokeAsync(initializeClient); ThreadUtils.join(inv2, 30 * 1000); ThreadUtils.join(inv3, 30 * 1000); if (inv2.exceptionOccurred()) { org.apache.geode.test.dunit.Assert.fail("Error occurred in vm2", inv2.getException()); } if (inv3.exceptionOccurred()) { org.apache.geode.test.dunit.Assert.fail("Error occurred in vm3", inv3.getException()); } } /** * Test for client connection loss with CacheLoader Exception on the server. */ @Test public void test004ForCacheLoaderException() throws CacheException, InterruptedException { final String name = this.getName(); final Host host = Host.getHost(0); VM server = host.getVM(0); VM client = host.getVM(1); // Create the cache servers with distributed, mirrored region SerializableRunnable createServer = new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheLoader cl = new CacheLoader() { public Object load(LoaderHelper helper) { System.out.println("### CALLING CACHE LOADER...."); throw new CacheLoaderException( "Test for CahceLoaderException causing Client connection to disconnect."); } public void close() {} }; AttributesFactory factory = getBridgeServerMirroredAckRegionAttributes(cl, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }; getSystem().getLogWriter().info("before create server"); server.invoke(createServer); // Create cache server clients final int numberOfKeys = 10; final String host0 = NetworkUtils.getServerHostName(host); final int[] port = new int[] {server.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort())}; final String poolName = "myPool"; SerializableRunnable createClient = new CacheSerializableRunnable("Create Cache Server Client") { public void run2() throws CacheException { getLonerSystem(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPoolWithName(factory, host0, port, true, -1, -1, null, poolName); createRegion(name, factory.create()); } }; getSystem().getLogWriter().info("before create client"); client.invoke(createClient); // Initialize each client with entries (so that afterInvalidate is called) SerializableRunnable invokeServerCacheLaoder = new CacheSerializableRunnable("Initialize Client") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); PoolStats stats = ((PoolImpl) PoolManager.find(poolName)).getStats(); int oldConnects = stats.getConnects(); int oldDisConnects = stats.getDisConnects(); try { for (int i = 0; i < numberOfKeys; i++) { String actual = (String) region.get("key-" + i); } } catch (Exception ex) { if (!(ex.getCause() instanceof CacheLoaderException)) { fail( "UnExpected Exception, expected to receive CacheLoaderException from server, instead found: " + ex.getCause().getClass()); } } int newConnects = stats.getConnects(); int newDisConnects = stats.getDisConnects(); // System.out.println("#### new connects/disconnects :" + newConnects + ":" + // newDisConnects); if (newConnects != oldConnects && newDisConnects != oldDisConnects) { fail("New connection has created for Server side CacheLoaderException."); } } }; getSystem().getLogWriter().info("before initialize client"); AsyncInvocation inv2 = client.invokeAsync(invokeServerCacheLaoder); ThreadUtils.join(inv2, 30 * 1000); SerializableRunnable stopServer = new SerializableRunnable("stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }; server.invoke(stopServer); } protected void validateDS() { List l = InternalDistributedSystem.getExistingSystems(); if (l.size() > 1) { getSystem().getLogWriter().info("validateDS: size=" + l.size() + " isDedicatedAdminVM=" + ClusterDistributionManager.isDedicatedAdminVM() + " l=" + l); } assertFalse(ClusterDistributionManager.isDedicatedAdminVM()); assertEquals(1, l.size()); } /** * Tests the basic operations of the {@link Pool} * * @since GemFire 3.5 */ @Test public void test006Pool() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = Host.getHost(0).getVM(2); vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); factory.setConcurrencyChecksEnabled(false); factory.setCacheLoader(new CacheLoader() { public Object load(LoaderHelper helper) { // System.err.println("CacheServer data loader called"); return helper.getKey().toString(); } public void close() { } }); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); validateDS(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm1.invoke(new CacheSerializableRunnable("Get values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object value = region.get(new Integer(i)); assertEquals(String.valueOf(i), value); } } }); vm1.invoke(new CacheSerializableRunnable("Update values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.put(new Integer(i), new Integer(i)); } } }); vm2.invoke(create); vm2.invoke(new CacheSerializableRunnable("Validate values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object value = region.get(new Integer(i)); assertNotNull(value); assertTrue(value instanceof Integer); assertEquals(i, ((Integer) value).intValue()); } } }); vm1.invoke(new CacheSerializableRunnable("Close Pool") { // do some special close validation here public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); String pName = region.getAttributes().getPoolName(); PoolImpl p = (PoolImpl) PoolManager.find(pName); assertEquals(false, p.isDestroyed()); assertEquals(1, p.getAttachCount()); try { p.destroy(); fail("expected IllegalStateException"); } catch (IllegalStateException expected) { } region.localDestroyRegion(); assertEquals(false, p.isDestroyed()); assertEquals(0, p.getAttachCount()); } }); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests the BridgeServer failover (bug 31832). */ @Test public void test007BridgeServerFailoverCnx1() throws CacheException { disconnectAllFromDS(); basicTestBridgeServerFailover(1); } /** * Test BridgeServer failover with connectionsPerServer set to 0 */ @Test public void test008BridgeServerFailoverCnx0() throws CacheException { basicTestBridgeServerFailover(0); } private void basicTestBridgeServerFailover(final int cnxCount) throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); // Create two cache servers SerializableRunnable createCacheServer = new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }; vm0.invoke(createCacheServer); vm1.invoke(createCacheServer); final int port0 = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); final int port1 = vm1.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); // final String host1 = getServerHostName(vm1.getHost()); // Create one bridge client in this VM SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port0, port1, true, -1, cnxCount, null, 100); Region region = createRegion(name, factory.create()); // force connections to form region.put("keyInit", new Integer(0)); region.put("keyInit2", new Integer(0)); } }; vm2.invoke(create); // Launch async thread that puts objects into cache. This thread will execute until // the test has ended (which is why the RegionDestroyedException and CacheClosedException // are caught and ignored. If any other exception occurs, the test will fail. See // the putAI.exceptionOccurred() assertion below. AsyncInvocation putAI = vm2.invokeAsync(new CacheSerializableRunnable("Put objects") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); try { for (int i = 0; i < 100000; i++) { region.put("keyAI", new Integer(i)); try { Thread.sleep(100); } catch (InterruptedException ie) { fail("interrupted"); } } } catch (NoAvailableServersException ignore) { /* ignore */ } catch (RegionDestroyedException e) { // will be thrown when the test ends /* ignore */ } catch (CancelException e) { // will be thrown when the test ends /* ignore */ } } }); SerializableRunnable verify1Server = new CacheSerializableRunnable("verify1Server") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); PoolImpl pool = getPool(region); verifyServerCount(pool, 1); } }; SerializableRunnable verify2Servers = new CacheSerializableRunnable("verify2Servers") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); PoolImpl pool = getPool(region); verifyServerCount(pool, 2); } }; vm2.invoke(verify2Servers); SerializableRunnable stopCacheServer = new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }; final String expected = "java.io.IOException"; final String addExpected = "<ExpectedException action=add>" + expected + "</ExpectedException>"; final String removeExpected = "<ExpectedException action=remove>" + expected + "</ExpectedException>"; vm2.invoke(new SerializableRunnable() { public void run() { LogWriter bgexecLogger = new LocalLogWriter(InternalLogWriter.ALL_LEVEL, System.out); bgexecLogger.info(addExpected); } }); try { // make sure we removeExpected // Bounce the non-current server (I know that VM1 contains the non-current server // because ... vm1.invoke(stopCacheServer); vm2.invoke(verify1Server); final int restartPort = port1; vm1.invoke(new SerializableRunnable("Restart CacheServer") { public void run() { try { Region region = getRootRegion().getSubregion(name); assertNotNull(region); startBridgeServer(restartPort); } catch (Exception e) { getSystem().getLogWriter().fine(new Exception(e)); org.apache.geode.test.dunit.Assert.fail("Failed to start CacheServer", e); } } }); // Pause long enough for the monitor to realize the server has been bounced // and reconnect to it. vm2.invoke(verify2Servers); } finally { vm2.invoke(new SerializableRunnable() { public void run() { LogWriter bgexecLogger = new LocalLogWriter(InternalLogWriter.ALL_LEVEL, System.out); bgexecLogger.info(removeExpected); } }); } // Stop the other cache server vm0.invoke(stopCacheServer); // Run awhile vm2.invoke(verify1Server); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("FIXME: this thread does not terminate"); // FIXME // // Verify that no exception has occurred in the putter thread // join(putAI, 5 * 60 * 1000, getLogWriter()); // //assertTrue("Exception occurred while invoking " + putAI, !putAI.exceptionOccurred()); // if (putAI.exceptionOccurred()) { // fail("While putting entries: ", putAI.getException()); // } // Close Pool vm2.invoke(new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }); // Stop the last cache server vm1.invoke(stopCacheServer); } /** * Make sure cnx lifetime expiration working on thread local cnxs. * * @author darrel */ @Test public void test009LifetimeExpireOnTL() throws CacheException { basicTestLifetimeExpire(true); } /** * Make sure cnx lifetime expiration working on thread local cnxs. * * @author darrel */ @Test public void test010LifetimeExpireOnPoolCnx() throws CacheException { basicTestLifetimeExpire(false); } protected static volatile boolean stopTestLifetimeExpire = false; protected static volatile int baselineLifetimeCheck; protected static volatile int baselineLifetimeExtensions; protected static volatile int baselineLifetimeConnect; protected static volatile int baselineLifetimeDisconnect; private void basicTestLifetimeExpire(final boolean threadLocal) throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); AsyncInvocation putAI = null; AsyncInvocation putAI2 = null; try { // Create two cache servers SerializableRunnable createCacheServer = new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); factory.setCacheListener(new DelayListener(25)); createRegion(name, factory.create()); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }; vm0.invoke(createCacheServer); final int port0 = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); vm1.invoke(createCacheServer); final int port1 = vm1.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); SerializableRunnable stopCacheServer = new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }; // we only had to stop it to reserve a port vm1.invoke(stopCacheServer); // Create one bridge client in this VM SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port0, port1, false/* queue */, -1, 0, null, 100, 500, threadLocal, 500); Region region = createRegion(name, factory.create()); // force connections to form region.put("keyInit", new Integer(0)); region.put("keyInit2", new Integer(0)); } }; vm2.invoke(create); // Launch async thread that puts objects into cache. This thread will execute until // the test has ended. SerializableRunnable putter1 = new CacheSerializableRunnable("Put objects") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); PoolImpl pool = getPool(region); PoolStats stats = pool.getStats(); baselineLifetimeCheck = stats.getLoadConditioningCheck(); baselineLifetimeExtensions = stats.getLoadConditioningExtensions(); baselineLifetimeConnect = stats.getLoadConditioningConnect(); baselineLifetimeDisconnect = stats.getLoadConditioningDisconnect(); try { int count = 0; while (!stopTestLifetimeExpire) { count++; region.put("keyAI1", new Integer(count)); } } catch (NoAvailableServersException ex) { if (stopTestLifetimeExpire) { return; } else { throw ex; } // } catch (RegionDestroyedException e) { //will be thrown when the test ends // /*ignore*/ // } catch (CancelException e) { //will be thrown when the test ends // /*ignore*/ } } }; SerializableRunnable putter2 = new CacheSerializableRunnable("Put objects") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); try { int count = 0; while (!stopTestLifetimeExpire) { count++; region.put("keyAI2", new Integer(count)); } } catch (NoAvailableServersException ex) { if (stopTestLifetimeExpire) { return; } else { throw ex; } // } catch (RegionDestroyedException e) { //will be thrown when the test ends // /*ignore*/ // } catch (CancelException e) { //will be thrown when the test ends // /*ignore*/ } } }; putAI = vm2.invokeAsync(putter1); putAI2 = vm2.invokeAsync(putter2); SerializableRunnable verify1Server = new CacheSerializableRunnable("verify1Server") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); PoolImpl pool = getPool(region); final PoolStats stats = pool.getStats(); verifyServerCount(pool, 1); WaitCriterion ev = new WaitCriterion() { public boolean done() { return stats.getLoadConditioningCheck() >= (10 + baselineLifetimeCheck); } public String description() { return null; } }; GeodeAwaitility.await().untilAsserted(ev); // make sure no replacements are happening. // since we have 2 threads and 2 cnxs and 1 server // when lifetimes are up we should only want to connect back to the // server we are already connected to and thus just extend our lifetime assertTrue( "baselineLifetimeCheck=" + baselineLifetimeCheck + " but stats.getLoadConditioningCheck()=" + stats.getLoadConditioningCheck(), stats.getLoadConditioningCheck() >= (10 + baselineLifetimeCheck)); baselineLifetimeCheck = stats.getLoadConditioningCheck(); assertTrue(stats.getLoadConditioningExtensions() > baselineLifetimeExtensions); assertTrue(stats.getLoadConditioningConnect() == baselineLifetimeConnect); assertTrue(stats.getLoadConditioningDisconnect() == baselineLifetimeDisconnect); } }; SerializableRunnable verify2Servers = new CacheSerializableRunnable("verify2Servers") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); PoolImpl pool = getPool(region); final PoolStats stats = pool.getStats(); verifyServerCount(pool, 2); // make sure some replacements are happening. // since we have 2 threads and 2 cnxs and 2 servers // when lifetimes are up we should connect to the other server sometimes. // int retry = 300; // while ((retry-- > 0) // && (stats.getLoadConditioningCheck() < (10+baselineLifetimeCheck))) { // pause(100); // } // assertTrue("Bug 39209 expected " // + stats.getLoadConditioningCheck() // + " to be >= " // + (10+baselineLifetimeCheck), // stats.getLoadConditioningCheck() >= (10+baselineLifetimeCheck)); // TODO: does this WaitCriterion actually help? WaitCriterion wc = new WaitCriterion() { String excuse; public boolean done() { int actual = stats.getLoadConditioningCheck(); int expected = 10 + baselineLifetimeCheck; if (actual >= expected) { return true; } excuse = "Bug 39209 expected " + actual + " to be >= " + expected; return false; } public String description() { return excuse; } }; GeodeAwaitility.await().untilAsserted(wc); assertTrue(stats.getLoadConditioningConnect() > baselineLifetimeConnect); assertTrue(stats.getLoadConditioningDisconnect() > baselineLifetimeDisconnect); } }; vm2.invoke(verify1Server); assertEquals(true, putAI.isAlive()); assertEquals(true, putAI2.isAlive()); } finally { vm2.invoke(new SerializableRunnable("Stop Putters") { public void run() { stopTestLifetimeExpire = true; } }); try { if (putAI != null) { // Verify that no exception has occurred in the putter thread ThreadUtils.join(putAI, 30 * 1000); if (putAI.exceptionOccurred()) { org.apache.geode.test.dunit.Assert.fail("While putting entries: ", putAI.getException()); } } if (putAI2 != null) { // Verify that no exception has occurred in the putter thread ThreadUtils.join(putAI, 30 * 1000); // FIXME this thread does not terminate // if (putAI2.exceptionOccurred()) { // fail("While putting entries: ", putAI.getException()); // } } } finally { vm2.invoke(new SerializableRunnable("Stop Putters") { public void run() { stopTestLifetimeExpire = false; } }); // Close Pool vm2.invoke(new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); String poolName = region.getAttributes().getPoolName(); region.localDestroyRegion(); PoolManager.find(poolName).destroy(); } }); SerializableRunnable stopCacheServer = new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }; vm1.invoke(stopCacheServer); vm0.invoke(stopCacheServer); } } } /** * Tests the create operation of the {@link Pool} * * @since GemFire 3.5 */ @Test public void test011PoolCreate() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = Host.getHost(0).getVM(2); vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, false, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm1.invoke(new CacheSerializableRunnable("Create values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.create(new Integer(i), new Integer(i)); } } }); vm2.invoke(create); vm2.invoke(new CacheSerializableRunnable("Validate values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object value = region.get(new Integer(i)); assertNotNull(value); assertTrue(value instanceof Integer); assertEquals(i, ((Integer) value).intValue()); } } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests the put operation of the {@link Pool} * * @since GemFire 3.5 */ @Test public void test012PoolPut() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = Host.getHost(0).getVM(2); vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable createPool = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, false, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(createPool); vm1.invoke(new CacheSerializableRunnable("Put values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { // put string values region.put("key-string-" + i, "value-" + i); // put object values Order order = new Order(); order.init(i); region.put("key-object-" + i, order); // put byte[] values region.put("key-bytes-" + i, ("value-" + i).getBytes()); } } }); vm2.invoke(createPool); vm2.invoke(new CacheSerializableRunnable("Get / validate string values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object value = region.get("key-string-" + i); assertNotNull(value); assertTrue(value instanceof String); assertEquals("value-" + i, value); } } }); vm2.invoke(new CacheSerializableRunnable("Get / validate object values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object value = region.get("key-object-" + i); assertNotNull(value); assertTrue(value instanceof Order); assertEquals(i, ((Order) value).getIndex()); } } }); vm2.invoke(new CacheSerializableRunnable("Get / validate byte[] values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object value = region.get("key-bytes-" + i); assertNotNull(value); assertTrue(value instanceof byte[]); assertEquals("value-" + i, new String((byte[]) value)); } } }); SerializableRunnable closePool = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(closePool); vm2.invoke(closePool); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests the put operation of the {@link Pool} * * @since GemFire 3.5 */ @Test public void test013PoolPutNoDeserialize() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = Host.getHost(0).getVM(2); vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable createPool = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, false, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(createPool); vm1.invoke(new CacheSerializableRunnable("Put values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { // put string values region.put("key-string-" + i, "value-" + i); // put object values Order order = new Order(); order.init(i); region.put("key-object-" + i, order); // put byte[] values region.put("key-bytes-" + i, ("value-" + i).getBytes()); } } }); vm2.invoke(createPool); vm2.invoke(new CacheSerializableRunnable("Get / validate string values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object value = region.get("key-string-" + i); assertNotNull(value); assertTrue(value instanceof String); assertEquals("value-" + i, value); } } }); vm2.invoke(new CacheSerializableRunnable("Get / validate object values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object value = region.get("key-object-" + i); assertNotNull(value); assertTrue(value instanceof Order); assertEquals(i, ((Order) value).getIndex()); } } }); vm2.invoke(new CacheSerializableRunnable("Get / validate byte[] values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object value = region.get("key-bytes-" + i); assertNotNull(value); assertTrue(value instanceof byte[]); assertEquals("value-" + i, new String((byte[]) value)); } } }); SerializableRunnable closePool = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(closePool); vm2.invoke(closePool); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); Wait.pause(5 * 1000); } /** * Tests that invalidates and destroys are propagated to {@link Pool}s. * * @since GemFire 3.5 */ @Test public void test014InvalidateAndDestroyPropagation() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); CertifiableTestCacheListener l = new CertifiableTestCacheListener( org.apache.geode.test.dunit.LogWriterUtils.getLogWriter()); factory.setCacheListener(l); Region rgn = createRegion(name, factory.create()); rgn.registerInterestRegex(".*", false, false); } }; vm1.invoke(create); vm1.invoke(new CacheSerializableRunnable("Populate region") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.put(new Integer(i), "old" + i); } } }); vm2.invoke(create); Wait.pause(5 * 1000); vm1.invoke(new CacheSerializableRunnable("Turn on history") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); ctl.enableEventHistory(); } }); vm2.invoke(new CacheSerializableRunnable("Update region") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.put(new Integer(i), "new" + i, "callbackArg" + i); } } }); Wait.pause(5 * 1000); vm1.invoke(new CacheSerializableRunnable("Verify invalidates") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); for (int i = 0; i < 10; i++) { Object key = new Integer(i); ctl.waitForInvalidated(key); Region.Entry entry = region.getEntry(key); assertNotNull(entry); assertNull(entry.getValue()); } { List l = ctl.getEventHistory(); assertEquals(10, l.size()); for (int i = 0; i < 10; i++) { Object key = new Integer(i); EntryEvent ee = (EntryEvent) l.get(i); assertEquals(key, ee.getKey()); assertEquals("old" + i, ee.getOldValue()); assertEquals(Operation.INVALIDATE, ee.getOperation()); assertEquals("callbackArg" + i, ee.getCallbackArgument()); assertEquals(true, ee.isOriginRemote()); } } } }); vm2.invoke(new CacheSerializableRunnable("Validate original and destroy") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object key = new Integer(i); assertEquals("new" + i, region.getEntry(key).getValue()); region.destroy(key, "destroyCB" + i); } } }); Wait.pause(5 * 1000); vm1.invoke(new CacheSerializableRunnable("Verify destroys") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); for (int i = 0; i < 10; i++) { Object key = new Integer(i); ctl.waitForDestroyed(key); Region.Entry entry = region.getEntry(key); assertNull(entry); } { List l = ctl.getEventHistory(); assertEquals(10, l.size()); for (int i = 0; i < 10; i++) { Object key = new Integer(i); EntryEvent ee = (EntryEvent) l.get(i); assertEquals(key, ee.getKey()); assertEquals(null, ee.getOldValue()); assertEquals(Operation.DESTROY, ee.getOperation()); assertEquals("destroyCB" + i, ee.getCallbackArgument()); assertEquals(true, ee.isOriginRemote()); } } } }); vm2.invoke(new CacheSerializableRunnable("recreate") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object key = new Integer(i); region.create(key, "create" + i); } } }); Wait.pause(5 * 1000); vm1.invoke(new CacheSerializableRunnable("Verify creates") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); List l = ctl.getEventHistory(); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("history (should be empty): " + l); assertEquals(0, l.size()); // now see if we can get it from the server for (int i = 0; i < 10; i++) { Object key = new Integer(i); assertEquals("create" + i, region.get(key, "loadCB" + i)); } l = ctl.getEventHistory(); assertEquals(10, l.size()); for (int i = 0; i < 10; i++) { Object key = new Integer(i); EntryEvent ee = (EntryEvent) l.get(i); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().info("processing " + ee); assertEquals(key, ee.getKey()); assertEquals(null, ee.getOldValue()); assertEquals("create" + i, ee.getNewValue()); assertEquals(Operation.LOCAL_LOAD_CREATE, ee.getOperation()); assertEquals("loadCB" + i, ee.getCallbackArgument()); assertEquals(false, ee.isOriginRemote()); } } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests that invalidates and destroys are propagated to {@link Pool}s correctly to * DataPolicy.EMPTY + InterestPolicy.ALL * * @since GemFire 5.0 */ @Test public void test015InvalidateAndDestroyToEmptyAllPropagation() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable createEmpty = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); CertifiableTestCacheListener l = new CertifiableTestCacheListener( org.apache.geode.test.dunit.LogWriterUtils.getLogWriter()); factory.setCacheListener(l); factory.setDataPolicy(DataPolicy.EMPTY); factory.setSubscriptionAttributes(new SubscriptionAttributes(InterestPolicy.ALL)); Region rgn = createRegion(name, factory.create()); rgn.registerInterestRegex(".*", false, false); } }; SerializableRunnable createNormal = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); CertifiableTestCacheListener l = new CertifiableTestCacheListener( org.apache.geode.test.dunit.LogWriterUtils.getLogWriter()); factory.setCacheListener(l); Region rgn = createRegion(name, factory.create()); rgn.registerInterestRegex(".*", false, false); } }; vm1.invoke(createEmpty); vm1.invoke(new CacheSerializableRunnable("Populate region") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.put(new Integer(i), "old" + i); } } }); vm2.invoke(createNormal); vm1.invoke(new CacheSerializableRunnable("Turn on history") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); ctl.enableEventHistory(); } }); vm2.invoke(new CacheSerializableRunnable("Update region") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.put(new Integer(i), "new" + i, "callbackArg" + i); } } }); Wait.pause(5 * 1000); vm1.invoke(new CacheSerializableRunnable("Verify invalidates") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); for (int i = 0; i < 10; i++) { Object key = new Integer(i); ctl.waitForInvalidated(key); Region.Entry entry = region.getEntry(key); assertNull(entry); // we are empty! } { List l = ctl.getEventHistory(); assertEquals(10, l.size()); for (int i = 0; i < 10; i++) { Object key = new Integer(i); EntryEvent ee = (EntryEvent) l.get(i); assertEquals(key, ee.getKey()); assertEquals(null, ee.getOldValue()); assertEquals(false, ee.isOldValueAvailable()); // failure assertEquals(Operation.INVALIDATE, ee.getOperation()); assertEquals("callbackArg" + i, ee.getCallbackArgument()); assertEquals(true, ee.isOriginRemote()); } } } }); vm2.invoke(new CacheSerializableRunnable("Validate original and destroy") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object key = new Integer(i); assertEquals("new" + i, region.getEntry(key).getValue()); region.destroy(key, "destroyCB" + i); } } }); Wait.pause(5 * 1000); vm1.invoke(new CacheSerializableRunnable("Verify destroys") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); for (int i = 0; i < 10; i++) { Object key = new Integer(i); ctl.waitForDestroyed(key); Region.Entry entry = region.getEntry(key); assertNull(entry); } { List l = ctl.getEventHistory(); assertEquals(10, l.size()); for (int i = 0; i < 10; i++) { Object key = new Integer(i); EntryEvent ee = (EntryEvent) l.get(i); assertEquals(key, ee.getKey()); assertEquals(null, ee.getOldValue()); assertEquals(false, ee.isOldValueAvailable()); assertEquals(Operation.DESTROY, ee.getOperation()); assertEquals("destroyCB" + i, ee.getCallbackArgument()); assertEquals(true, ee.isOriginRemote()); } } } }); vm2.invoke(new CacheSerializableRunnable("recreate") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object key = new Integer(i); region.create(key, "create" + i, "createCB" + i); } } }); Wait.pause(5 * 1000); vm1.invoke(new CacheSerializableRunnable("Verify creates") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); for (int i = 0; i < 10; i++) { Object key = new Integer(i); ctl.waitForInvalidated(key); Region.Entry entry = region.getEntry(key); assertNull(entry); } List l = ctl.getEventHistory(); assertEquals(10, l.size()); for (int i = 0; i < 10; i++) { Object key = new Integer(i); EntryEvent ee = (EntryEvent) l.get(i); assertEquals(key, ee.getKey()); assertEquals(null, ee.getOldValue()); assertEquals(false, ee.isOldValueAvailable()); assertEquals(Operation.INVALIDATE, ee.getOperation()); assertEquals("createCB" + i, ee.getCallbackArgument()); assertEquals(true, ee.isOriginRemote()); } // now see if we can get it from the server for (int i = 0; i < 10; i++) { Object key = new Integer(i); assertEquals("create" + i, region.get(key, "loadCB" + i)); } l = ctl.getEventHistory(); assertEquals(10, l.size()); for (int i = 0; i < 10; i++) { Object key = new Integer(i); EntryEvent ee = (EntryEvent) l.get(i); assertEquals(key, ee.getKey()); assertEquals(null, ee.getOldValue()); assertEquals("create" + i, ee.getNewValue()); assertEquals(Operation.LOCAL_LOAD_CREATE, ee.getOperation()); assertEquals("loadCB" + i, ee.getCallbackArgument()); assertEquals(false, ee.isOriginRemote()); } } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests that invalidates and destroys are propagated to {@link Pool}s correctly to * DataPolicy.EMPTY + InterestPolicy.CACHE_CONTENT * * @since GemFire 5.0 */ @Test public void test016InvalidateAndDestroyToEmptyCCPropagation() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); createRegion(name, factory.create()); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable createEmpty = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); CertifiableTestCacheListener l = new CertifiableTestCacheListener( org.apache.geode.test.dunit.LogWriterUtils.getLogWriter()); factory.setCacheListener(l); factory.setDataPolicy(DataPolicy.EMPTY); factory.setSubscriptionAttributes(new SubscriptionAttributes(InterestPolicy.CACHE_CONTENT)); Region rgn = createRegion(name, factory.create()); rgn.registerInterestRegex(".*", false, false); } }; SerializableRunnable createNormal = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); CertifiableTestCacheListener l = new CertifiableTestCacheListener( org.apache.geode.test.dunit.LogWriterUtils.getLogWriter()); factory.setCacheListener(l); Region rgn = createRegion(name, factory.create()); rgn.registerInterestRegex(".*", false, false); } }; vm1.invoke(createEmpty); vm1.invoke(new CacheSerializableRunnable("Populate region") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.put(new Integer(i), "old" + i); } } }); vm2.invoke(createNormal); vm1.invoke(new CacheSerializableRunnable("Turn on history") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); ctl.enableEventHistory(); } }); vm2.invoke(new CacheSerializableRunnable("Update region") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.put(new Integer(i), "new" + i, "callbackArg" + i); } } }); Wait.pause(5 * 1000); vm1.invoke(new CacheSerializableRunnable("Verify invalidates") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); List l = ctl.getEventHistory(); assertEquals(0, l.size()); } }); vm2.invoke(new CacheSerializableRunnable("Validate original and destroy") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object key = new Integer(i); assertEquals("new" + i, region.getEntry(key).getValue()); region.destroy(key, "destroyCB" + i); } } }); vm1.invoke(new CacheSerializableRunnable("Verify destroys") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); List l = ctl.getEventHistory(); assertEquals(0, l.size()); } }); vm2.invoke(new CacheSerializableRunnable("recreate") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Object key = new Integer(i); region.create(key, "create" + i, "createCB" + i); } } }); Wait.pause(5 * 1000); vm1.invoke(new CacheSerializableRunnable("Verify creates") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); List l = ctl.getEventHistory(); assertEquals(0, l.size()); // now see if we can get it from the server for (int i = 0; i < 10; i++) { Object key = new Integer(i); assertEquals("create" + i, region.get(key, "loadCB" + i)); } l = ctl.getEventHistory(); assertEquals(10, l.size()); for (int i = 0; i < 10; i++) { Object key = new Integer(i); EntryEvent ee = (EntryEvent) l.get(i); assertEquals(key, ee.getKey()); assertEquals(null, ee.getOldValue()); assertEquals("create" + i, ee.getNewValue()); assertEquals(Operation.LOCAL_LOAD_CREATE, ee.getOperation()); assertEquals("loadCB" + i, ee.getCallbackArgument()); assertEquals(false, ee.isOriginRemote()); } } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests interest key registration. */ @Test public void test017ExpireDestroyHasEntryInCallback() throws CacheException { disconnectAllFromDS(); final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); // Create cache server vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { // In lieu of System.setProperty("gemfire.EXPIRE_SENDS_ENTRY_AS_CALLBACK", "true"); EntryExpiryTask.expireSendsEntryAsCallback = true; AttributesFactory factory = getBridgeServerRegionAttributes(null, null); factory.setEntryTimeToLive(new ExpirationAttributes(1, ExpirationAction.DESTROY)); createRegion(name, factory.create()); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); // Create cache server clients final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable createClient = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setDataPolicy(DataPolicy.EMPTY); factory.setSubscriptionAttributes(new SubscriptionAttributes((InterestPolicy.ALL))); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); CertifiableTestCacheListener l = new CertifiableTestCacheListener( org.apache.geode.test.dunit.LogWriterUtils.getLogWriter()); factory.setCacheListener(l); Region r = createRegion(name, factory.create()); r.registerInterest("ALL_KEYS"); } }; vm1.invoke(createClient); vm1.invoke(new CacheSerializableRunnable("Turn on history") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); ctl.enableEventHistory(); } }); Wait.pause(500); // Create some entries on the client vm1.invoke(new CacheSerializableRunnable("Create entries") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 5; i++) { region.put("key-client-" + i, "value-client-" + i); } } }); // Create some entries on the server vm0.invoke(new CacheSerializableRunnable("Create entries") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 5; i++) { region.put("key-server-" + i, "value-server-" + i); } } }); // Wait for expiration Wait.pause(2000); vm1.invoke(new CacheSerializableRunnable("Validate listener events") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) region.getAttributes().getCacheListener(); int destroyCallbacks = 0; List<CacheEvent> l = ctl.getEventHistory(); for (CacheEvent ce : l) { org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().info("--->>> " + ce); if (ce.getOperation() == Operation.DESTROY && ce.getCallbackArgument() instanceof String) { destroyCallbacks++; } } assertEquals(10, destroyCallbacks); } }); // Close cache server clients SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); // Stop cache server vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } public AttributesFactory getBridgeServerRegionAttributes(CacheLoader cl, CacheWriter cw) { AttributesFactory ret = new AttributesFactory(); if (cl != null) { ret.setCacheLoader(cl); } if (cw != null) { ret.setCacheWriter(cw); } ret.setScope(Scope.DISTRIBUTED_ACK); ret.setConcurrencyChecksEnabled(false); return ret; } public AttributesFactory getBridgeServerMirroredRegionAttributes(CacheLoader cl, CacheWriter cw) { AttributesFactory ret = new AttributesFactory(); if (cl != null) { ret.setCacheLoader(cl); } if (cw != null) { ret.setCacheWriter(cw); } ret.setScope(Scope.DISTRIBUTED_NO_ACK); ret.setDataPolicy(DataPolicy.REPLICATE); ret.setConcurrencyChecksEnabled(false); return ret; } public AttributesFactory getBridgeServerMirroredAckRegionAttributes(CacheLoader cl, CacheWriter cw) { AttributesFactory ret = new AttributesFactory(); if (cl != null) { ret.setCacheLoader(cl); } if (cw != null) { ret.setCacheWriter(cw); } ret.setScope(Scope.DISTRIBUTED_ACK); ret.setConcurrencyChecksEnabled(false); ret.setMirrorType(MirrorType.KEYS_VALUES); return ret; } /** * Tests that updates are not sent to VMs that did not ask for them. */ @Test public void test018OnlyRequestedUpdates() throws Exception { final String name1 = this.getName() + "-1"; final String name2 = this.getName() + "-2"; final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); // Cache server serves up both regions vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); createRegion(name1, factory.create()); createRegion(name2, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); // vm1 sends updates to the server vm1.invoke(new CacheSerializableRunnable("Create regions") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); Region rgn = createRegion(name1, factory.create()); rgn.registerInterestRegex(".*", false, false); rgn = createRegion(name2, factory.create()); rgn.registerInterestRegex(".*", false, false); } }); // vm2 only wants updates to updates to region1 vm2.invoke(new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); Region rgn = createRegion(name1, factory.create()); rgn.registerInterestRegex(".*", false, false); createRegion(name2, factory.create()); // no interest registration for region 2 } }); SerializableRunnable populate = new CacheSerializableRunnable("Populate region") { public void run2() throws CacheException { Region region1 = getRootRegion().getSubregion(name1); for (int i = 0; i < 10; i++) { region1.put(new Integer(i), "Region1Old" + i); } Region region2 = getRootRegion().getSubregion(name2); for (int i = 0; i < 10; i++) { region2.put(new Integer(i), "Region2Old" + i); } } }; vm1.invoke(populate); vm2.invoke(populate); vm1.invoke(new CacheSerializableRunnable("Update") { public void run2() throws CacheException { Region region1 = getRootRegion().getSubregion(name1); for (int i = 0; i < 10; i++) { region1.put(new Integer(i), "Region1New" + i); } Region region2 = getRootRegion().getSubregion(name2); for (int i = 0; i < 10; i++) { region2.put(new Integer(i), "Region2New" + i); } } }); // Wait for updates to be propagated Wait.pause(5 * 1000); vm2.invoke(new CacheSerializableRunnable("Validate") { public void run2() throws CacheException { Region region1 = getRootRegion().getSubregion(name1); for (int i = 0; i < 10; i++) { assertEquals("Region1New" + i, region1.get(new Integer(i))); } Region region2 = getRootRegion().getSubregion(name2); for (int i = 0; i < 10; i++) { assertEquals("Region2Old" + i, region2.get(new Integer(i))); } } }); vm1.invoke(new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { // Terminate region1's Pool Region region1 = getRootRegion().getSubregion(name1); region1.localDestroyRegion(); // Terminate region2's Pool Region region2 = getRootRegion().getSubregion(name2); region2.localDestroyRegion(); } }); vm2.invoke(new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { // Terminate region1's Pool Region region1 = getRootRegion().getSubregion(name1); region1.localDestroyRegion(); } }); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests interest key registration. */ @Test public void test019InterestKeyRegistration() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); // Create cache server vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheLoader cl = new CacheLoader() { public Object load(LoaderHelper helper) { return helper.getKey(); } public void close() { } }; AttributesFactory factory = getBridgeServerRegionAttributes(cl, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); // Create cache server clients final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm2.invoke(create); // Get values for key 1 and key 2 so that there are entries in the clients. // Register interest in one of the keys. vm1.invoke(new CacheSerializableRunnable("Create Entries and Register Interest") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); assertEquals(region.get("key-1"), "key-1"); assertEquals(region.get("key-2"), "key-2"); try { region.registerInterest("key-1"); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While registering interest: ", ex); } } }); vm2.invoke(new CacheSerializableRunnable("Create Entries and Register Interest") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); assertEquals(region.get("key-1"), "key-1"); assertEquals(region.get("key-2"), "key-2"); try { region.registerInterest("key-2"); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While registering interest: ", ex); } } }); // Put new values and validate updates (VM1) vm1.invoke(new CacheSerializableRunnable("Put New Values") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.put("key-1", "vm1-key-1"); region.put("key-2", "vm1-key-2"); // Verify that no invalidates occurred to this region assertEquals(region.getEntry("key-1").getValue(), "vm1-key-1"); assertEquals(region.getEntry("key-2").getValue(), "vm1-key-2"); } }); Wait.pause(500); vm2.invoke(new CacheSerializableRunnable("Validate Entries") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); // Verify that 'key-2' was updated, but 'key-1' was not // and contains the original value assertEquals(region.getEntry("key-1").getValue(), "key-1"); assertEquals(region.getEntry("key-2").getValue(), "vm1-key-2"); // assertNull(region.getEntry("key-2").getValue()); } }); // Put new values and validate updates (VM2) vm2.invoke(new CacheSerializableRunnable("Put New Values") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.put("key-1", "vm2-key-1"); region.put("key-2", "vm2-key-2"); // Verify that no updates occurred to this region assertEquals(region.getEntry("key-1").getValue(), "vm2-key-1"); assertEquals(region.getEntry("key-2").getValue(), "vm2-key-2"); } }); Wait.pause(500); vm1.invoke(new CacheSerializableRunnable("Validate Entries") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); // Verify that 'key-1' was updated, but 'key-2' was not // and contains the original value assertEquals(region.getEntry("key-2").getValue(), "vm1-key-2"); assertEquals(region.getEntry("key-1").getValue(), "vm2-key-1"); // assertNull(region.getEntry("key-1").getValue()); } }); // Unregister interest vm1.invoke(new CacheSerializableRunnable("Unregister Interest") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); try { region.unregisterInterest("key-1"); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While registering interest: ", ex); } } }); vm2.invoke(new CacheSerializableRunnable("Unregister Interest") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); try { region.unregisterInterest("key-2"); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While registering interest: ", ex); } } }); // Put new values and validate updates (VM1) vm1.invoke(new CacheSerializableRunnable("Put New Values") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.put("key-1", "vm1-key-1-again"); region.put("key-2", "vm1-key-2-again"); // Verify that no updates occurred to this region assertEquals(region.getEntry("key-1").getValue(), "vm1-key-1-again"); assertEquals(region.getEntry("key-2").getValue(), "vm1-key-2-again"); } }); Wait.pause(500); vm2.invoke(new CacheSerializableRunnable("Validate Entries") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); // Verify that neither 'key-1' 'key-2' was updated // and contain the original value assertEquals(region.getEntry("key-1").getValue(), "vm2-key-1"); assertEquals(region.getEntry("key-2").getValue(), "vm2-key-2"); } }); // Put new values and validate updates (VM2) vm2.invoke(new CacheSerializableRunnable("Put New Values") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.put("key-1", "vm2-key-1-again"); region.put("key-2", "vm2-key-2-again"); // Verify that no updates occurred to this region assertEquals(region.getEntry("key-1").getValue(), "vm2-key-1-again"); assertEquals(region.getEntry("key-2").getValue(), "vm2-key-2-again"); } }); Wait.pause(500); vm1.invoke(new CacheSerializableRunnable("Validate Entries") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); // Verify that neither 'key-1' 'key-2' was updated // and contain the original value assertEquals(region.getEntry("key-1").getValue(), "vm1-key-1-again"); assertEquals(region.getEntry("key-2").getValue(), "vm1-key-2-again"); } }); // Unregister interest again (to verify that a client can unregister interest // in a key that its not interested in with no problem. vm1.invoke(new CacheSerializableRunnable("Unregister Interest") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); try { region.unregisterInterest("key-1"); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While registering interest: ", ex); } } }); vm2.invoke(new CacheSerializableRunnable("Unregister Interest") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); try { region.unregisterInterest("key-2"); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While registering interest: ", ex); } } }); // Close cache server clients SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); // Stop cache server vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests interest list registration. */ @Test public void test020InterestListRegistration() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = Host.getHost(0).getVM(2); // Create cache server vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheLoader cl = new CacheLoader() { public Object load(LoaderHelper helper) { return helper.getKey(); } public void close() { } }; AttributesFactory factory = getBridgeServerRegionAttributes(cl, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); // Create cache server clients final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm2.invoke(create); // Get values for key 1 and key 6 so that there are entries in the clients. // Register interest in a list of keys. vm1.invoke(new CacheSerializableRunnable("Create Entries and Register Interest") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); assertEquals(region.get("key-1"), "key-1"); assertEquals(region.get("key-6"), "key-6"); try { List list = new ArrayList(); list.add("key-1"); list.add("key-2"); list.add("key-3"); list.add("key-4"); list.add("key-5"); region.registerInterest(list); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While registering interest: ", ex); } } }); vm2.invoke(new CacheSerializableRunnable("Create Entries and Register Interest") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); assertEquals(region.get("key-1"), "key-1"); assertEquals(region.get("key-6"), "key-6"); } }); // Put new values and validate updates (VM2) vm2.invoke(new CacheSerializableRunnable("Put New Values") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.put("key-1", "vm2-key-1"); region.put("key-6", "vm2-key-6"); // Verify that no updates occurred to this region assertEquals(region.getEntry("key-1").getValue(), "vm2-key-1"); assertEquals(region.getEntry("key-6").getValue(), "vm2-key-6"); } }); Wait.pause(5 * 1000); vm1.invoke(new CacheSerializableRunnable("Validate Entries") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); // Verify that 'key-1' was updated assertEquals(region.getEntry("key-1").getValue(), "vm2-key-1"); // Verify that 'key-6' was not invalidated assertEquals(region.getEntry("key-6").getValue(), "key-6"); } }); // Close cache server clients SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); // Stop cache server vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } protected class ConnectionPoolDUnitTestSerializable2 implements java.io.Serializable { protected ConnectionPoolDUnitTestSerializable2(String key) { _key = key; } public String getKey() { return _key; } protected String _key; } /** * Accessed by reflection DO NOT REMOVE * */ protected static int getCacheServerPort() { return bridgeServerPort; } protected static long getNumberOfAfterCreates() { return numberOfAfterCreates; } protected static long getNumberOfAfterUpdates() { return numberOfAfterUpdates; } protected static long getNumberOfAfterInvalidates() { return numberOfAfterInvalidates; } // private class GetKey extends TestRunnable { // // private String key; // private Object result; // private String name; // ConnectionPoolDUnitTest test; // int repCount; // private AtomicBoolean timeToStop; // if non-null then ignroe repCount // // protected GetKey(String objectName, ConnectionPoolDUnitTest t, String name, AtomicBoolean // timeToStop) { // this.key = objectName; // this.test = t; // this.name = name; // this.timeToStop = timeToStop; // } // // protected GetKey(String objectName, ConnectionPoolDUnitTest t, String name, int repCount) { // this.key = objectName; // this.test = t; // this.name = name; // this.repCount=repCount; // } // public void runTest() throws Throwable { // if (this.timeToStop != null) { // getUntilStopped(); // } else { // getForRepCount(); // } // //test.close(); // } // // private void getForRepCount() throws Throwable { //// boolean killed = false; // final Region r = test.getRootRegion().getSubregion(this.name); // final PoolImpl pi = (PoolImpl)PoolManager.find(r.getAttributes().getPoolName()); // try { // for (int i=0;i<repCount;i++) { // try { // String key = this.key + i; // if (r.getEntry(key) != null) { // r.localInvalidate(key); // } // result = r.get(key); // assertTrue("GetKey after get " + key + " result=" + result, pi.getConnectedServerCount() >= 1); // Thread.sleep(10); // } // catch(InterruptedException ie) { // fail("interrupted"); // } // catch(ServerConnectivityException sce) { // fail("While getting value for ACK region", sce); // } // catch(TimeoutException te) { // fail("While getting value for ACK region", te); // } // } // assertTrue(pi.getConnectedServerCount() >= 1); // } finally { // pi.releaseThreadLocalConnection(); // } // } // private void getUntilStopped() throws Throwable { //// boolean killed = false; // final Region r = test.getRootRegion().getSubregion(this.name); // final PoolImpl pi = (PoolImpl)PoolManager.find(r.getAttributes().getPoolName()); // try { // int i=0; // while (!timeToStop.get()) { // i++; // try { // String key = this.key + i; // if (r.getEntry(key) != null) { // r.localInvalidate(key); // } // result = r.get(key); // assertTrue("GetKey after get " + key + " result=" + result, pi.getConnectedServerCount() >= 1); // Thread.sleep(10); // } // catch(InterruptedException ie) { // fail("interrupted"); // } // catch(ServerConnectivityException sce) { // fail("While getting value for ACK region", sce); // } // catch(TimeoutException te) { // fail("While getting value for ACK region", te); // } // } // assertTrue(pi.getConnectedServerCount() >= 1); // } finally { // pi.releaseThreadLocalConnection(); // } // } // } /** * Creates a "loner" distributed system that has dynamic region creation enabled. * * @since GemFire 4.3 */ protected Cache createDynamicRegionCache(String testName, String connectionPoolName) { // note that clients use non-persistent dr factories. DynamicRegionFactory.get() .open(new DynamicRegionFactory.Config(null, connectionPoolName, false, true)); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter().info("CREATED IT"); Cache z = getCache(); return z; } /** * A handy method to poll for arrival of non-null/non-invalid entries * * @param r the Region to poll * @param key the key of the Entry to poll for */ public static void waitForEntry(final Region r, final Object key) { WaitCriterion ev = new WaitCriterion() { public boolean done() { return r.containsValueForKey(key); } public String description() { return "Waiting for entry " + key + " on region " + r; } }; GeodeAwaitility.await().untilAsserted(ev); } public static Region waitForSubRegion(final Region r, final String subRegName) { // final long start = System.currentTimeMillis(); final long MAXWAIT = 10000; WaitCriterion ev = new WaitCriterion() { public boolean done() { return r.getSubregion(subRegName) != null; } public String description() { return "Waiting for subregion " + subRegName; } }; GeodeAwaitility.await().untilAsserted(ev); Region result = r.getSubregion(subRegName); return result; } public static class CacheServerCacheLoader extends TestCacheLoader implements Declarable { public CacheServerCacheLoader() {} public Object load2(LoaderHelper helper) { if (helper.getArgument() instanceof Integer) { try { Thread.sleep(((Integer) helper.getArgument()).intValue()); } catch (InterruptedException ugh) { fail("interrupted"); } } return helper.getKey(); } public void init(Properties props) {} } /** * Create a server that has a value for every key queried and a unique key/value in the specified * Region that uniquely identifies each instance. * * @param vm the VM on which to create the server * @param rName the name of the Region to create on the server * @param port the TCP port on which the server should listen */ public void createBridgeServer(VM vm, final String rName, final int port, final boolean notifyBySubscription) { vm.invoke(new CacheSerializableRunnable("Create Region on Server") { public void run2() { try { AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); factory.setConcurrencyChecksEnabled(false); factory.setCacheLoader(new CacheServerCacheLoader()); beginCacheXml(); createRegion(rName, factory.create()); startBridgeServer(port); finishCacheXml(rName + "-" + port); Region region = getRootRegion().getSubregion(rName); assertNotNull(region); assertNotNull(getRootRegion().getSubregion(rName)); region.put("BridgeServer", new Integer(port)); // A unique key/value to identify the // BridgeServer } catch (Exception e) { getSystem().getLogWriter().severe(e); fail("Failed to start CacheServer " + e); } } }); } // test for bug 35884 @Test public void test021ClientGetOfInvalidServerEntry() throws CacheException { final String regionName1 = this.getName() + "-1"; final Host host = Host.getHost(0); VM server1 = host.getVM(0); VM client = host.getVM(2); SerializableRunnable createServer = new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); factory.setDataPolicy(DataPolicy.REPLICATE); factory.setConcurrencyChecksEnabled(false); createRegion(regionName1, factory.create()); Wait.pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }; // Create server1. server1.invoke(createServer); final int port = server1.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(server1.getHost()); // Init values at server. server1.invoke(new CacheSerializableRunnable("Create values") { public void run2() throws CacheException { Region region1 = getRootRegion().getSubregion(regionName1); // create it invalid region1.create("key-string-1", null); } }); // now try it with a local scope SerializableRunnable createPool2 = new CacheSerializableRunnable("Create region 2") { public void run2() throws CacheException { // Region region1 = getRootRegion().getSubregion(regionName1); // region1.localDestroyRegion(); getLonerSystem(); AttributesFactory regionFactory = new AttributesFactory(); regionFactory.setScope(Scope.LOCAL); regionFactory.setConcurrencyChecksEnabled(false); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("ZZZZZ host0:" + host0 + " port:" + port); ClientServerTestCase.configureConnectionPool(regionFactory, host0, port, -1, false, -1, -1, null); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("ZZZZZDone host0:" + host0 + " port:" + port); createRegion(regionName1, regionFactory.create()); } }; client.invoke(createPool2); // get the invalid entry on the client. client.invoke(new CacheSerializableRunnable("get values on client") { public void run2() throws CacheException { Region region1 = getRootRegion().getSubregion(regionName1); assertEquals(null, region1.getEntry("key-string-1")); assertEquals(null, region1.get("key-string-1")); } }); server1.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } @Test public void test022ClientRegisterUnregisterRequests() throws CacheException { final String regionName1 = this.getName() + "-1"; final Host host = Host.getHost(0); VM server1 = host.getVM(0); VM client = host.getVM(2); SerializableRunnable createServer = new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); factory.setDataPolicy(DataPolicy.REPLICATE); factory.setConcurrencyChecksEnabled(false); createRegion(regionName1, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }; // Create server1. server1.invoke(createServer); final int port = server1.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(server1.getHost()); SerializableRunnable createPool = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); Region region1 = null; AttributesFactory regionFactory = new AttributesFactory(); regionFactory.setScope(Scope.LOCAL); regionFactory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(regionFactory, host0, port, -1, true, -1, -1, null); region1 = createRegion(regionName1, regionFactory.create()); region1.getAttributesMutator().addCacheListener(new CertifiableTestCacheListener( org.apache.geode.test.dunit.LogWriterUtils.getLogWriter())); } }; // Create client. client.invoke(createPool); // Init values at server. server1.invoke(new CacheSerializableRunnable("Create values") { public void run2() throws CacheException { Region region1 = getRootRegion().getSubregion(regionName1); for (int i = 0; i < 20; i++) { region1.put("key-string-" + i, "value-" + i); } } }); // Put some values on the client. client.invoke(new CacheSerializableRunnable("Put values client") { public void run2() throws CacheException { Region region1 = getRootRegion().getSubregion(regionName1); for (int i = 0; i < 10; i++) { region1.put("key-string-" + i, "client-value-" + i); } } }); SerializableRunnable closePool = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region1 = getRootRegion().getSubregion(regionName1); String pName = region1.getAttributes().getPoolName(); region1.localDestroyRegion(); PoolImpl p = (PoolImpl) PoolManager.find(pName); p.destroy(); } }; client.invoke(closePool); SerializableRunnable validateClientRegisterUnRegister = new CacheSerializableRunnable("validate Client Register UnRegister") { public void run2() throws CacheException { for (Iterator bi = getCache().getCacheServers().iterator(); bi.hasNext();) { CacheServerImpl bsi = (CacheServerImpl) bi.next(); final CacheClientNotifierStats ccnStats = bsi.getAcceptor().getCacheClientNotifier().getStats(); WaitCriterion ev = new WaitCriterion() { public boolean done() { return ccnStats.getClientRegisterRequests() == ccnStats .getClientUnRegisterRequests(); } public String description() { return null; } }; GeodeAwaitility.await().untilAsserted(ev); assertEquals("HealthMonitor Client Register/UnRegister mismatch.", ccnStats.getClientRegisterRequests(), ccnStats.getClientUnRegisterRequests()); } } }; server1.invoke(validateClientRegisterUnRegister); server1.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests the containsKeyOnServer operation of the {@link Pool} * * @since GemFire 5.0.2 */ @Test public void test023ContainsKeyOnServer() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); factory.setConcurrencyChecksEnabled(false); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, false, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm2.invoke(create); final Integer key1 = new Integer(0); final String key2 = "0"; vm2.invoke(new CacheSerializableRunnable("Contains key on server") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); boolean containsKey = false; containsKey = region.containsKeyOnServer(key1); assertFalse(containsKey); containsKey = region.containsKeyOnServer(key2); assertFalse(containsKey); } }); vm1.invoke(new CacheSerializableRunnable("Put values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.put(new Integer(0), new Integer(0)); region.put("0", "0"); } }); vm2.invoke(new CacheSerializableRunnable("Contains key on server") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); boolean containsKey = false; containsKey = region.containsKeyOnServer(key1); assertTrue(containsKey); containsKey = region.containsKeyOnServer(key2); assertTrue(containsKey); } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests that invoking {@link Region#create} with a <code>null</code> value does the right thing * with the {@link Pool}. * * @since GemFire 3.5 */ @Test public void test024CreateNullValue() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); final Object createCallbackArg = "CREATE CALLBACK ARG"; vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm2.invoke(create); vm2.invoke(new CacheSerializableRunnable("Create nulls") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.create(new Integer(i), null, createCallbackArg); } } }); Wait.pause(1000); // Wait for updates to be propagated vm2.invoke(new CacheSerializableRunnable("Verify invalidates") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Region.Entry entry = region.getEntry(new Integer(i)); assertNotNull(entry); assertNull(entry.getValue()); } } }); vm1.invoke(new CacheSerializableRunnable("Attempt to create values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.create(new Integer(i), "new" + i); } } }); Wait.pause(1000); // Wait for updates to be propagated vm2.invoke(new CacheSerializableRunnable("Verify invalidates") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { Region.Entry entry = region.getEntry(new Integer(i)); assertNotNull(entry); assertNull(entry.getValue()); } } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests that a {@link Region#localDestroy} is not propagated to the server and that a * {@link Region#destroy} is. Also makes sure that callback arguments are passed correctly. */ @Test public void test025Destroy() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); final Object callbackArg = "DESTROY CALLBACK"; vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheWriter cw = new TestCacheWriter() { public void beforeCreate2(EntryEvent event) throws CacheWriterException { } public void beforeDestroy2(EntryEvent event) throws CacheWriterException { Object beca = event.getCallbackArgument(); assertEquals(callbackArg, beca); } }; AttributesFactory factory = getBridgeServerRegionAttributes(null, cw); createRegion(name, factory.create()); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); Region rgn = createRegion(name, factory.create()); rgn.registerInterestRegex(".*", false, false); } }; vm1.invoke(create); vm1.invoke(new CacheSerializableRunnable("Populate region") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.put(new Integer(i), String.valueOf(i)); } } }); vm2.invoke(create); vm2.invoke(new CacheSerializableRunnable("Load region") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { assertEquals(String.valueOf(i), region.get(new Integer(i))); } } }); vm1.invoke(new CacheSerializableRunnable("Local destroy") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.localDestroy(new Integer(i)); } } }); vm2.invoke(new CacheSerializableRunnable("No destroy propagate") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { assertEquals(String.valueOf(i), region.get(new Integer(i))); } } }); vm1.invoke(new CacheSerializableRunnable("Fetch from server") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { assertEquals(String.valueOf(i), region.get(new Integer(i))); } } }); vm0.invoke(new CacheSerializableRunnable("Check no server cache writer") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); TestCacheWriter writer = getTestWriter(region); writer.wasInvoked(); } }); vm1.invoke(new CacheSerializableRunnable("Distributed destroy") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.destroy(new Integer(i), callbackArg); } } }); Wait.pause(1000); // Wait for destroys to propagate vm1.invoke(new CacheSerializableRunnable("Attempt get from server") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { assertNull(region.getEntry(new Integer(i))); } } }); vm2.invoke(new CacheSerializableRunnable("Validate destroy propagate") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { assertNull(region.getEntry(new Integer(i))); } } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests that a {@link Region#localDestroyRegion} is not propagated to the server and that a * {@link Region#destroyRegion} is. Also makes sure that callback arguments are passed correctly. */ @Ignore("TODO") @Test public void testDestroyRegion() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); final Object callbackArg = "DESTROY CALLBACK"; vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheWriter cw = new TestCacheWriter() { public void beforeCreate2(EntryEvent event) throws CacheWriterException { } public void beforeRegionDestroy2(RegionEvent event) throws CacheWriterException { assertEquals(callbackArg, event.getCallbackArgument()); } }; AttributesFactory factory = getBridgeServerRegionAttributes(null, cw); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm2.invoke(create); vm1.invoke(new CacheSerializableRunnable("Local destroy region") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); assertNull(getRootRegion().getSubregion(name)); // close the bridge writer to prevent callbacks on the connections // Not necessary since locally destroying the region takes care of this. // getPoolClient(region).close(); } }); vm2.invoke(new CacheSerializableRunnable("No destroy propagate") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); assertNotNull(region); } }); vm0.invoke(new CacheSerializableRunnable("Check no server cache writer") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); TestCacheWriter writer = getTestWriter(region); writer.wasInvoked(); } }); vm1.invoke(create); vm1.invoke(new CacheSerializableRunnable("Distributed destroy region") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); assertNotNull(region); region.destroyRegion(callbackArg); assertNull(getRootRegion().getSubregion(name)); // close the bridge writer to prevent callbacks on the connections // Not necessary since locally destroying the region takes care of this. // getPoolClient(region).close(); } }); Wait.pause(1000); // Wait for destroys to propagate vm2.invoke(new CacheSerializableRunnable("Verify destroy propagate") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); assertNull(region); // todo close the bridge writer // Not necessary since locally destroying the region takes care of this. } }); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests interest list registration with callback arg with DataPolicy.EMPTY and InterestPolicy.ALL */ @Test public void test026DPEmptyInterestListRegistrationWithCallbackArg() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = Host.getHost(0).getVM(2); // Create cache server vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheLoader cl = new CacheLoader() { public Object load(LoaderHelper helper) { return helper.getKey(); } public void close() { } }; AttributesFactory factory = getBridgeServerRegionAttributes(cl, null); createRegion(name, factory.create()); Wait.pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); // Create cache server clients final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); factory.addCacheListener(new ControlListener()); factory.setDataPolicy(DataPolicy.EMPTY); factory.setSubscriptionAttributes(new SubscriptionAttributes(InterestPolicy.ALL)); createRegion(name, factory.create()); } }; SerializableRunnable createPublisher = new CacheSerializableRunnable("Create publisher region") { public void run2() throws CacheException { getLonerSystem(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); factory.addCacheListener(new ControlListener()); factory.setDataPolicy(DataPolicy.EMPTY); // make sure empty works with client publishers createRegion(name, factory.create()); } }; vm1.invoke(create); vm2.invoke(createPublisher); // VM1 Register interest vm1.invoke(new CacheSerializableRunnable("Create Entries and Register Interest") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); try { // This call will cause no value to be put into the region region.registerInterest("key-1", InterestResultPolicy.NONE); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While registering interest: ", ex); } } }); // VM2 Put entry (this will cause a create event in both VM1 and VM2) vm2.invoke(new CacheSerializableRunnable("Put Value") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.create("key-1", "key-1-create", "key-1-create"); } }); // VM2 Put entry (this will cause an update event in both VM1 and VM2) vm2.invoke(new CacheSerializableRunnable("Put Value") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.put("key-1", "key-1-update", "key-1-update"); } }); // VM2 Destroy entry (this will cause a destroy event) vm2.invoke(new CacheSerializableRunnable("Destroy Entry") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.destroy("key-1", "key-1-destroy"); } }); final SerializableRunnable assertEvents = new CacheSerializableRunnable("Verify events") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); ControlListener listener = (ControlListener) region.getAttributes().getCacheListeners()[0]; int eventCount = 3; listener.waitWhileNotEnoughEvents(60000, eventCount); assertEquals(eventCount, listener.events.size()); { EventWrapper ew = (EventWrapper) listener.events.get(0); assertEquals(TYPE_CREATE, ew.type); Object key = "key-1"; assertEquals(key, ew.event.getKey()); assertEquals(null, ew.event.getOldValue()); assertEquals(false, ew.event.isOldValueAvailable()); // failure assertEquals("key-1-create", ew.event.getNewValue()); assertEquals(Operation.CREATE, ew.event.getOperation()); assertEquals("key-1-create", ew.event.getCallbackArgument()); assertEquals(true, ew.event.isOriginRemote()); ew = (EventWrapper) listener.events.get(1); assertEquals(TYPE_UPDATE, ew.type); assertEquals(key, ew.event.getKey()); assertEquals(null, ew.event.getOldValue()); assertEquals(false, ew.event.isOldValueAvailable()); assertEquals("key-1-update", ew.event.getNewValue()); assertEquals(Operation.UPDATE, ew.event.getOperation()); assertEquals("key-1-update", ew.event.getCallbackArgument()); assertEquals(true, ew.event.isOriginRemote()); ew = (EventWrapper) listener.events.get(2); assertEquals(TYPE_DESTROY, ew.type); assertEquals("key-1-destroy", ew.arg); assertEquals(key, ew.event.getKey()); assertEquals(null, ew.event.getOldValue()); assertEquals(false, ew.event.isOldValueAvailable()); assertEquals(null, ew.event.getNewValue()); assertEquals(Operation.DESTROY, ew.event.getOperation()); assertEquals("key-1-destroy", ew.event.getCallbackArgument()); assertEquals(true, ew.event.isOriginRemote()); } } }; vm1.invoke(assertEvents); // Close cache server clients SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); // Stop cache server vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests interest list registration with callback arg with DataPolicy.EMPTY and * InterestPolicy.CACHE_CONTENT */ @Test public void test027DPEmptyCCInterestListRegistrationWithCallbackArg() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = Host.getHost(0).getVM(2); // Create cache server vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheLoader cl = new CacheLoader() { public Object load(LoaderHelper helper) { return helper.getKey(); } public void close() { } }; AttributesFactory factory = getBridgeServerRegionAttributes(cl, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); // Create cache server clients final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); factory.setCacheListener(new ControlListener()); factory.setDataPolicy(DataPolicy.EMPTY); factory.setSubscriptionAttributes(new SubscriptionAttributes(InterestPolicy.CACHE_CONTENT)); createRegion(name, factory.create()); } }; SerializableRunnable createPublisher = new CacheSerializableRunnable("Create publisher region") { public void run2() throws CacheException { getLonerSystem(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); factory.setCacheListener(new ControlListener()); factory.setDataPolicy(DataPolicy.EMPTY); // make sure empty works with client publishers createRegion(name, factory.create()); } }; vm1.invoke(create); vm2.invoke(createPublisher); // VM1 Register interest vm1.invoke(new CacheSerializableRunnable("Create Entries and Register Interest") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); try { // This call will cause no value to be put into the region region.registerInterest("key-1", InterestResultPolicy.NONE); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While registering interest: ", ex); } } }); // VM2 Put entry (this will cause a create event in both VM1 and VM2) vm2.invoke(new CacheSerializableRunnable("Put Value") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.create("key-1", "key-1-create", "key-1-create"); } }); // VM2 Put entry (this will cause an update event in both VM1 and VM2) vm2.invoke(new CacheSerializableRunnable("Put Value") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.put("key-1", "key-1-update", "key-1-update"); } }); // VM2 Destroy entry (this will cause a destroy event) vm2.invoke(new CacheSerializableRunnable("Destroy Entry") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.destroy("key-1", "key-1-destroy"); } }); final SerializableRunnable assertEvents = new CacheSerializableRunnable("Verify events") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); ControlListener listener = (ControlListener) region.getAttributes().getCacheListeners()[0]; Wait.pause(1000); // we should not get any events but give some time for the server to send // them assertEquals(0, listener.events.size()); } }; vm1.invoke(assertEvents); // Close cache server clients SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); // Stop cache server vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Test dynamic region creation instantiated from a bridge client causing regions to be created on * two different cache servers. * * Also tests the reverse situation, a dynamic region is created on the cache server expecting * the same region to be created on the client. * * Note: This test re-creates Distributed Systems for its own purposes and uses a Loner * distributed systems to isolate the Bridge Client. * */ @Test public void test028DynamicRegionCreation() throws Exception { final String name = this.getName(); final Host host = Host.getHost(0); final VM client1 = host.getVM(0); // VM client2 = host.getVM(1); final VM srv1 = host.getVM(2); final VM srv2 = host.getVM(3); final String k1 = name + "-key1"; final String v1 = name + "-val1"; final String k2 = name + "-key2"; final String v2 = name + "-val2"; final String k3 = name + "-key3"; final String v3 = name + "-val3"; client1.invoke(() -> disconnectFromDS()); srv1.invoke(() -> disconnectFromDS()); srv2.invoke(() -> disconnectFromDS()); try { // setup servers CacheSerializableRunnable ccs = new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { createDynamicRegionCache(name, (String) null); // Creates a new DS and Cache assertTrue(DynamicRegionFactory.get().isOpen()); try { startBridgeServer(0); } catch (IOException ugh) { fail("cache server startup failed"); } AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); factory.setDataPolicy(DataPolicy.REPLICATE); factory.setConcurrencyChecksEnabled(false); Region region = createRootRegion(name, factory.create()); region.put(k1, v1); Assert.assertTrue(region.get(k1).equals(v1)); } }; srv1.invoke(ccs); srv2.invoke(ccs); final String srv1Host = NetworkUtils.getServerHostName(srv1.getHost()); final int srv1Port = srv1.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final int srv2Port = srv2.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); // final String srv2Host = getServerHostName(srv2.getHost()); // setup clients, do basic tests to make sure pool with notifier work as advertised client1.invoke(new CacheSerializableRunnable("Create Cache Client") { public void run2() throws CacheException { createLonerDS(); AttributesFactory factory = new AttributesFactory(); factory.setConcurrencyChecksEnabled(false); Pool cp = ClientServerTestCase.configureConnectionPool(factory, srv1Host, srv1Port, srv2Port, true, -1, -1, null); { final PoolImpl pool = (PoolImpl) cp; WaitCriterion ev = new WaitCriterion() { public boolean done() { if (pool.getPrimary() == null) { return false; } if (pool.getRedundants().size() < 1) { return false; } return true; } public String description() { return null; } }; GeodeAwaitility.await().untilAsserted(ev); assertNotNull(pool.getPrimary()); assertTrue("backups=" + pool.getRedundants() + " expected=" + 1, pool.getRedundants().size() >= 1); } createDynamicRegionCache(name, "testPool"); assertTrue(DynamicRegionFactory.get().isOpen()); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); factory.setCacheListener(new CertifiableTestCacheListener( org.apache.geode.test.dunit.LogWriterUtils.getLogWriter())); Region region = createRootRegion(name, factory.create()); assertNull(region.getEntry(k1)); region.registerInterestRegex(".*", InterestResultPolicy.KEYS_VALUES); // this should match // the key assertEquals(v1, region.getEntry(k1).getValue()); // Update via registered interest assertNull(region.getEntry(k2)); region.put(k2, v2); // use the Pool assertEquals(v2, region.getEntry(k2).getValue()); // Ensure that the notifier didn't un-do // the put, bug 35355 region.put(k3, v3); // setup a key for invalidation from a notifier } }); srv1.invoke(new CacheSerializableRunnable("Validate Server1 update") { public void run2() throws CacheException { CacheClientNotifier ccn = getInstance(); final CacheClientNotifierStats ccnStats = ccn.getStats(); final int eventCount = ccnStats.getEvents(); Region r = getRootRegion(name); assertNotNull(r); assertEquals(v2, r.getEntry(k2).getValue()); // Validate the Pool worked, getEntry works // because of the mirror assertEquals(v3, r.getEntry(k3).getValue()); // Make sure we have the other entry to use // for notification r.put(k3, v1); // Change k3, sending some data to the client notifier // Wait for the update to propagate to the clients final int maxTime = 20000; // long start = System.currentTimeMillis(); WaitCriterion ev = new WaitCriterion() { public boolean done() { return ccnStats.getEvents() > eventCount; } public String description() { return "waiting for ccnStat"; } }; GeodeAwaitility.await().untilAsserted(ev); // Set prox = ccn.getClientProxies(); // assertIndexDetailsEquals(1, prox.size()); // for (Iterator cpi = prox.iterator(); cpi.hasNext(); ) { // CacheClientProxy ccp = (CacheClientProxy) cpi.next(); // start = System.currentTimeMillis(); // while (ccp.getMessagesProcessed() < 1) { // assertTrue("Waited more than " + maxTime + "ms for client notification", // (System.currentTimeMillis() - start) < maxTime); // try { // Thread.sleep(100); // } catch (InterruptedException ine) { fail("Interrupted while waiting for client // notifier to complete"); } // } // } } }); srv2.invoke(new CacheSerializableRunnable("Validate Server2 update") { public void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); assertEquals(v2, r.getEntry(k2).getValue()); // Validate the Pool worked, getEntry works // because of the mirror assertEquals(v1, r.getEntry(k3).getValue()); // From peer update } }); client1.invoke(new CacheSerializableRunnable("Validate Client notification") { public void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) r.getAttributes().getCacheListener(); ctl.waitForUpdated(k3); assertEquals(v1, r.getEntry(k3).getValue()); // Ensure that the notifier updated the entry } }); // Ok, now we are ready to do some dynamic region action! final String v1Dynamic = v1 + "dynamic"; final String dynFromClientName = name + "-dynamic-client"; final String dynFromServerName = name + "-dynamic-server"; client1.invoke(new CacheSerializableRunnable("Client dynamic region creation") { public void run2() throws CacheException { assertTrue(DynamicRegionFactory.get().isOpen()); Region r = getRootRegion(name); assertNotNull(r); Region dr = DynamicRegionFactory.get().createDynamicRegion(name, dynFromClientName); assertNull(dr.get(k1)); // This should be enough to validate the creation on the server dr.put(k1, v1Dynamic); assertEquals(v1Dynamic, dr.getEntry(k1).getValue()); } }); // Assert the servers have the dynamic region and the new value CacheSerializableRunnable valDR = new CacheSerializableRunnable("Validate dynamic region creation on server") { public void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); long end = System.currentTimeMillis() + 10000; Region dr = null; for (;;) { try { dr = r.getSubregion(dynFromClientName); assertNotNull(dr); assertNotNull(getCache().getRegion(name + Region.SEPARATOR + dynFromClientName)); break; } catch (AssertionError e) { if (System.currentTimeMillis() > end) { throw e; } } } assertEquals(v1Dynamic, dr.getEntry(k1).getValue()); } }; srv1.invoke(valDR); srv2.invoke(valDR); // now delete the dynamic region and see if it goes away on servers client1.invoke(new CacheSerializableRunnable("Client dynamic region destruction") { public void run2() throws CacheException { assertTrue(DynamicRegionFactory.get().isActive()); Region r = getRootRegion(name); assertNotNull(r); String drName = r.getFullPath() + Region.SEPARATOR + dynFromClientName; assertNotNull(getCache().getRegion(drName)); DynamicRegionFactory.get().destroyDynamicRegion(drName); assertNull(getCache().getRegion(drName)); } }); // Assert the servers no longer have the dynamic region CacheSerializableRunnable valNoDR = new CacheSerializableRunnable("Validate dynamic region destruction on server") { public void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); String drName = r.getFullPath() + Region.SEPARATOR + dynFromClientName; assertNull(getCache().getRegion(drName)); try { DynamicRegionFactory.get().destroyDynamicRegion(drName); fail("expected RegionDestroyedException"); } catch (RegionDestroyedException expected) { } } }; srv1.invoke(valNoDR); srv2.invoke(valNoDR); // Now try the reverse, create a dynamic region on the server and see if the client // has it srv2.invoke(new CacheSerializableRunnable("Server dynamic region creation") { public void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); Region dr = DynamicRegionFactory.get().createDynamicRegion(name, dynFromServerName); assertNull(dr.get(k1)); dr.put(k1, v1Dynamic); assertEquals(v1Dynamic, dr.getEntry(k1).getValue()); } }); // Assert the servers have the dynamic region and the new value srv1.invoke(new CacheSerializableRunnable( "Validate dynamic region creation propagation to other server") { public void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); Region dr = waitForSubRegion(r, dynFromServerName); assertNotNull(dr); assertNotNull(getCache().getRegion(name + Region.SEPARATOR + dynFromServerName)); waitForEntry(dr, k1); assertNotNull(dr.getEntry(k1)); assertEquals(v1Dynamic, dr.getEntry(k1).getValue()); } }); // Assert the clients have the dynamic region and the new value client1.invoke(new CacheSerializableRunnable("Validate dynamic region creation on client") { public void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); long end = System.currentTimeMillis() + 10000; Region dr = null; for (;;) { try { dr = r.getSubregion(dynFromServerName); assertNotNull(dr); assertNotNull(getCache().getRegion(name + Region.SEPARATOR + dynFromServerName)); break; } catch (AssertionError e) { if (System.currentTimeMillis() > end) { throw e; } else { Wait.pause(1000); } } } waitForEntry(dr, k1); assertNotNull(dr.getEntry(k1)); assertEquals(v1Dynamic, dr.getEntry(k1).getValue()); } }); // now delete the dynamic region on a server and see if it goes away on client srv2.invoke(new CacheSerializableRunnable("Server dynamic region destruction") { public void run2() throws CacheException { assertTrue(DynamicRegionFactory.get().isActive()); Region r = getRootRegion(name); assertNotNull(r); String drName = r.getFullPath() + Region.SEPARATOR + dynFromServerName; assertNotNull(getCache().getRegion(drName)); DynamicRegionFactory.get().destroyDynamicRegion(drName); assertNull(getCache().getRegion(drName)); } }); srv1.invoke( new CacheSerializableRunnable("Validate dynamic region destruction on other server") { public void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); String drName = r.getFullPath() + Region.SEPARATOR + dynFromServerName; { int retry = 100; while (retry-- > 0 && getCache().getRegion(drName) != null) { try { Thread.sleep(100); } catch (InterruptedException ignore) { fail("interrupted"); } } } assertNull(getCache().getRegion(drName)); } }); // Assert the clients no longer have the dynamic region client1 .invoke(new CacheSerializableRunnable("Validate dynamic region destruction on client") { public void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); String drName = r.getFullPath() + Region.SEPARATOR + dynFromServerName; { int retry = 100; while (retry-- > 0 && getCache().getRegion(drName) != null) { try { Thread.sleep(100); } catch (InterruptedException ignore) { fail("interrupted"); } } } assertNull(getCache().getRegion(drName)); // sleep to make sure that the dynamic region entry from the internal // region,dynamicRegionList in DynamicRegionFactory // ? try { Thread.sleep(10000); } catch (InterruptedException ignore) { fail("interrupted"); } try { DynamicRegionFactory.get().destroyDynamicRegion(drName); fail("expected RegionDestroyedException"); } catch (RegionDestroyedException expected) { } } }); } finally { client1.invoke(() -> disconnectFromDS()); // clean-up loner srv1.invoke(() -> disconnectFromDS()); srv2.invoke(() -> disconnectFromDS()); } } /** * Test for bug 36279 */ @Test public void test029EmptyByteArray() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); final Object createCallbackArg = "CREATE CALLBACK ARG"; vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm1.invoke(new CacheSerializableRunnable("Create empty byte array") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 1; i++) { region.create(new Integer(i), new byte[0], createCallbackArg); } } }); vm1.invoke(new CacheSerializableRunnable("Verify values on client") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 1; i++) { Region.Entry entry = region.getEntry(new Integer(i)); assertNotNull(entry); byte[] value = (byte[]) entry.getValue(); assertNotNull(value); assertEquals(0, value.length); } } }); vm0.invoke(new CacheSerializableRunnable("Verify values on server") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 1; i++) { Region.Entry entry = region.getEntry(new Integer(i)); assertNotNull(entry); byte[] value = (byte[]) entry.getValue(); assertNotNull(value); assertEquals(0, value.length); } } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests interest list registration with callback arg */ @Test public void test030InterestListRegistrationWithCallbackArg() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = Host.getHost(0).getVM(2); // Create cache server vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheLoader cl = new CacheLoader() { public Object load(LoaderHelper helper) { return helper.getKey(); } public void close() { } }; AttributesFactory factory = getBridgeServerRegionAttributes(cl, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); // Create cache server clients final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); factory.setCacheListener(new ControlListener()); createRegion(name, factory.create()); } }; vm1.invoke(create); vm2.invoke(create); // VM1 Register interest vm1.invoke(new CacheSerializableRunnable("Create Entries and Register Interest") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); try { // This call will cause no value to be put into the region region.registerInterest("key-1", InterestResultPolicy.NONE); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While registering interest: ", ex); } } }); // VM2 Put entry (this will cause a create event in both VM1 and VM2) vm2.invoke(new CacheSerializableRunnable("Put Value") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.create("key-1", "key-1-create", "key-1-create"); } }); // VM2 Put entry (this will cause an update event in both VM1 and VM2) vm2.invoke(new CacheSerializableRunnable("Put Value") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.put("key-1", "key-1-update", "key-1-update"); } }); // VM2 Destroy entry (this will cause a destroy event) vm2.invoke(new CacheSerializableRunnable("Destroy Entry") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.destroy("key-1", "key-1-destroy"); } }); final SerializableRunnable assertEvents = new CacheSerializableRunnable("Verify events") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); ControlListener listener = (ControlListener) region.getAttributes().getCacheListeners()[0]; int eventCount = 3; listener.waitWhileNotEnoughEvents(60000, eventCount); assertEquals(eventCount, listener.events.size()); { EventWrapper ew = (EventWrapper) listener.events.get(0); assertEquals(ew.type, TYPE_CREATE); Object key = "key-1"; assertEquals(key, ew.event.getKey()); assertEquals(null, ew.event.getOldValue()); assertEquals("key-1-create", ew.event.getNewValue()); assertEquals(Operation.CREATE, ew.event.getOperation()); assertEquals("key-1-create", ew.event.getCallbackArgument()); assertEquals(true, ew.event.isOriginRemote()); ew = (EventWrapper) listener.events.get(1); assertEquals(ew.type, TYPE_UPDATE); assertEquals(key, ew.event.getKey()); assertEquals("key-1-create", ew.event.getOldValue()); assertEquals("key-1-update", ew.event.getNewValue()); assertEquals(Operation.UPDATE, ew.event.getOperation()); assertEquals("key-1-update", ew.event.getCallbackArgument()); assertEquals(true, ew.event.isOriginRemote()); ew = (EventWrapper) listener.events.get(2); assertEquals(ew.type, TYPE_DESTROY); assertEquals("key-1-destroy", ew.arg); assertEquals(key, ew.event.getKey()); assertEquals("key-1-update", ew.event.getOldValue()); assertEquals(null, ew.event.getNewValue()); assertEquals(Operation.DESTROY, ew.event.getOperation()); assertEquals("key-1-destroy", ew.event.getCallbackArgument()); assertEquals(true, ew.event.isOriginRemote()); } } }; vm1.invoke(assertEvents); // Close cache server clients SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); // Stop cache server vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } /** * Tests the keySetOnServer operation of the {@link Pool} * * @since GemFire 5.0.2 */ @Test public void test031KeySetOnServer() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); factory.setConcurrencyChecksEnabled(false); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm2.invoke(create); vm2.invoke(new CacheSerializableRunnable("Get keys on server") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); Set keySet = region.keySetOnServer(); assertNotNull(keySet); assertEquals(0, keySet.size()); } }); vm1.invoke(new CacheSerializableRunnable("Put values") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.put(new Integer(i), new Integer(i)); } } }); vm2.invoke(new CacheSerializableRunnable("Get keys on server") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); Set keySet = region.keySetOnServer(); assertNotNull(keySet); assertEquals(10, keySet.size()); } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm2.invoke(close); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } // this test doesn't do anything so I commented it out // /** // * Tests that new connections update client notification connections. // */ // public void test032NewConnections() throws Exception { // final String name = this.getName(); // final Host host = Host.getHost(0); // VM vm0 = host.getVM(0); // VM vm1 = host.getVM(1); // VM vm2 = host.getVM(2); // // Cache server serves up the region // vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { // public void run2() throws CacheException { // AttributesFactory factory = getBridgeServerRegionAttributes(null,null); // Region region = createRegion(name, factory.create()); // pause(1000); // try { // startBridgeServer(0); // } catch (Exception ex) { // fail("While starting CacheServer", ex); // } // } // }); // final int port = // vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); // final String host0 = getServerHostName(vm0.getHost()); // SerializableRunnable create = // new CacheSerializableRunnable("Create region") { // public void run2() throws CacheException { // getCache(); // AttributesFactory factory = new AttributesFactory(); // factory.setScope(Scope.LOCAL); // ClientServerTestCase.configureConnectionPool(factory,host0,port,-1,true,-1,-1, null); // createRegion(name, factory.create()); // } // }; // vm1.invoke(create); // vm2.invoke(create); // vm1.invoke(new CacheSerializableRunnable("Create new connection") { // public void run2() throws CacheException { // Region region = getRootRegion().getSubregion(name); // BridgeClient writer = getPoolClient(region); // Endpoint[] endpoints = (Endpoint[])writer.getEndpoints(); // for (int i=0; i<endpoints.length; i++) endpoints[i].addNewConnection(); // } // }); // SerializableRunnable close = // new CacheSerializableRunnable("Close Pool") { // public void run2() throws CacheException { // Region region = getRootRegion().getSubregion(name); // region.localDestroyRegion(); // } // }; // vm1.invoke(close); // vm2.invoke(close); // vm0.invoke(new SerializableRunnable("Stop CacheServer") { // public void run() { // stopBridgeServer(getCache()); // } // }); // } /** * Tests that creating, putting and getting a non-serializable key or value throws the correct * (NotSerializableException) exception. */ @Test public void test033NotSerializableException() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); // VM vm2 = host.getVM(2); vm0.invoke(new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = getBridgeServerRegionAttributes(null, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }); final int port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final String host0 = NetworkUtils.getServerHostName(vm0.getHost()); SerializableRunnable create = new CacheSerializableRunnable("Create region") { public void run2() throws CacheException { getLonerSystem(); getCache(); AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); ClientServerTestCase.configureConnectionPool(factory, host0, port, -1, true, -1, -1, null); createRegion(name, factory.create()); } }; vm1.invoke(create); vm1.invoke(new CacheSerializableRunnable("Attempt to create a non-serializable value") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); try { region.create(new Integer(1), new ConnectionPoolTestNonSerializable()); fail("Should not have been able to create a ConnectionPoolTestNonSerializable"); } catch (Exception e) { if (!(e.getCause() instanceof java.io.NotSerializableException)) fail("Unexpected exception while creating a non-serializable value " + e); } } }); vm1.invoke(new CacheSerializableRunnable("Attempt to put a non-serializable value") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); try { region.put(new Integer(1), new ConnectionPoolTestNonSerializable()); fail("Should not have been able to put a ConnectionPoolTestNonSerializable"); } catch (Exception e) { if (!(e.getCause() instanceof java.io.NotSerializableException)) fail("Unexpected exception while putting a non-serializable value " + e); } } }); vm1.invoke(new CacheSerializableRunnable("Attempt to get a non-serializable key") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); try { region.get(new ConnectionPoolTestNonSerializable()); fail("Should not have been able to get a ConnectionPoolTestNonSerializable"); } catch (Exception e) { if (!(e.getCause() instanceof java.io.NotSerializableException)) fail("Unexpected exception while getting a non-serializable key " + e); } } }); SerializableRunnable close = new CacheSerializableRunnable("Close Pool") { public void run2() throws CacheException { Region region = getRootRegion().getSubregion(name); region.localDestroyRegion(); } }; vm1.invoke(close); vm0.invoke(new SerializableRunnable("Stop CacheServer") { public void run() { stopBridgeServer(getCache()); } }); } protected class ConnectionPoolTestNonSerializable { protected ConnectionPoolTestNonSerializable() {} } /** * Tests 'notify-all' client updates. This test verifies that: - only invalidates are sent as part * of the 'notify-all' mode of client updates - originators of updates are not sent invalidates - * non-originators of updates are sent invalidates - multiple invalidates are not sent for the * same update */ @Test public void test034NotifyAllUpdates() throws CacheException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); VM vm3 = host.getVM(3); disconnectAllFromDS(); // Create the cache servers with distributed, mirrored region SerializableRunnable createServer = new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { CacheLoader cl = new CacheLoader() { public Object load(LoaderHelper helper) { return helper.getKey(); } public void close() { } }; AttributesFactory factory = getBridgeServerMirroredAckRegionAttributes(cl, null); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }; getSystem().getLogWriter().info("before create server"); vm0.invoke(createServer); vm1.invoke(createServer); // Create cache server clients final int numberOfKeys = 10; final String host0 = NetworkUtils.getServerHostName(host); final int vm0Port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); final int vm1Port = vm1.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); SerializableRunnable createClient = new CacheSerializableRunnable("Create Cache Server Client") { public void run2() throws CacheException { // reset all static listener variables in case this is being rerun in a subclass numberOfAfterInvalidates = 0; numberOfAfterCreates = 0; numberOfAfterUpdates = 0; getLonerSystem(); // create the region AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, vm0Port, vm1Port, true, -1, -1, null); Region rgn = createRegion(name, factory.create()); } }; getSystem().getLogWriter().info("before create client"); vm2.invoke(createClient); vm3.invoke(createClient); // Initialize each client with entries (so that afterInvalidate is called) SerializableRunnable initializeClient = new CacheSerializableRunnable("Initialize Client") { public void run2() throws CacheException { numberOfAfterInvalidates = 0; numberOfAfterCreates = 0; numberOfAfterUpdates = 0; LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); for (int i = 0; i < numberOfKeys; i++) { assertEquals("key-" + i, region.get("key-" + i)); } } }; getSystem().getLogWriter().info("before initialize client"); vm2.invoke(initializeClient); vm3.invoke(initializeClient); // Add a CacheListener to both vm2 and vm3 vm2.invoke(new CacheSerializableRunnable("Add CacheListener 1") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); CacheListener listener = new CacheListenerAdapter() { public void afterCreate(EntryEvent e) { numberOfAfterCreates++; org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("vm2 numberOfAfterCreates: " + numberOfAfterCreates); } public void afterUpdate(EntryEvent e) { numberOfAfterUpdates++; org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("vm2 numberOfAfterUpdates: " + numberOfAfterUpdates); } public void afterInvalidate(EntryEvent e) { numberOfAfterInvalidates++; org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("vm2 numberOfAfterInvalidates: " + numberOfAfterInvalidates); } }; region.getAttributesMutator().addCacheListener(listener); region.registerInterestRegex(".*", false, false); } }); vm3.invoke(new CacheSerializableRunnable("Add CacheListener 2") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); CacheListener listener = new CacheListenerAdapter() { public void afterCreate(EntryEvent e) { numberOfAfterCreates++; // getLogWriter().info("vm3 numberOfAfterCreates: " + numberOfAfterCreates); } public void afterUpdate(EntryEvent e) { numberOfAfterUpdates++; // getLogWriter().info("vm3 numberOfAfterUpdates: " + numberOfAfterUpdates); } public void afterInvalidate(EntryEvent e) { numberOfAfterInvalidates++; // getLogWriter().info("vm3 numberOfAfterInvalidates: " + numberOfAfterInvalidates); } }; region.getAttributesMutator().addCacheListener(listener); region.registerInterestRegex(".*", false, false); } }); Wait.pause(3000); getSystem().getLogWriter().info("before puts"); // Use vm2 to put new values // This should cause 10 afterUpdates to vm2 and 10 afterInvalidates to vm3 vm2.invoke(new CacheSerializableRunnable("Put New Values") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); for (int i = 0; i < 10; i++) { region.put("key-" + i, "key-" + i); } } }); getSystem().getLogWriter().info("after puts"); // Wait to make sure all the updates are received Wait.pause(1000); long vm2AfterCreates = vm2.invoke(() -> ConnectionPoolDUnitTest.getNumberOfAfterCreates()); long vm2AfterUpdates = vm2.invoke(() -> ConnectionPoolDUnitTest.getNumberOfAfterUpdates()); long vm2AfterInvalidates = vm2.invoke(() -> ConnectionPoolDUnitTest.getNumberOfAfterInvalidates()); long vm3AfterCreates = vm3.invoke(() -> ConnectionPoolDUnitTest.getNumberOfAfterCreates()); long vm3AfterUpdates = vm3.invoke(() -> ConnectionPoolDUnitTest.getNumberOfAfterUpdates()); long vm3AfterInvalidates = vm3.invoke(() -> ConnectionPoolDUnitTest.getNumberOfAfterInvalidates()); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("vm2AfterCreates: " + vm2AfterCreates); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("vm2AfterUpdates: " + vm2AfterUpdates); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("vm2AfterInvalidates: " + vm2AfterInvalidates); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("vm3AfterCreates: " + vm3AfterCreates); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("vm3AfterUpdates: " + vm3AfterUpdates); org.apache.geode.test.dunit.LogWriterUtils.getLogWriter() .info("vm3AfterInvalidates: " + vm3AfterInvalidates); assertTrue("VM2 should not have received any afterCreate messages", vm2AfterCreates == 0); assertTrue("VM2 should not have received any afterInvalidate messages", vm2AfterInvalidates == 0); assertTrue("VM2 received " + vm2AfterUpdates + " afterUpdate messages. It should have received " + numberOfKeys, vm2AfterUpdates == numberOfKeys); assertTrue("VM3 should not have received any afterCreate messages", vm3AfterCreates == 0); assertTrue("VM3 should not have received any afterUpdate messages", vm3AfterUpdates == 0); assertTrue( "VM3 received " + vm3AfterInvalidates + " afterInvalidate messages. It should have received " + numberOfKeys, vm3AfterInvalidates == numberOfKeys); } /** * Test that the "notify by subscription" attribute is unique for each BridgeServer and Gateway * */ /* * public void test035NotifyBySubscriptionIsolation() throws Exception { final String name = * this.getName(); final Host host = Host.getHost(0); final VM server = host.getVM(3); final VM * client1 = host.getVM(1); final VM client2 = host.getVM(2); * * final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(3); final int bs1Port = * ports[0]; final int bs2Port = ports[1]; final int gwPort = ports[2]; * * final String key1 = "key1-" + name; final String val1 = "val1-" + name; final String key2 = * "key2-" + name; final String val2 = "val2-" + name; * * try { server.invoke(new CacheSerializableRunnable("Setup BridgeServers and Gateway") { public * void run2() throws CacheException { Cache cache = getCache(); * * try { * * // Create a gateway (which sets notify-by-subscription to true) cache.setGatewayHub(name, * gwPort).start(); * * // Start the server that does not have notify-by-subscription (server2) CacheServer bridge2 = * cache.addCacheServer(); bridge2.setPort(bs2Port); bridge2.setNotifyBySubscription(false); * String[] noNotifyGroup = {"noNotifyGroup"}; bridge2.setGroups(noNotifyGroup); bridge2.start(); * assertFalse(bridge2.getNotifyBySubscription()); { BridgeServerImpl bsi = (BridgeServerImpl) * bridge2; AcceptorImpl aci = bsi.getAcceptor(); * * //assertFalse(aci.getCacheClientNotifier().getNotifyBySubscription()); } * * // Start the server that DOES have notify-by-subscription (server1) CacheServer bridge1 = * cache.addCacheServer(); bridge1.setPort(bs1Port); bridge1.setNotifyBySubscription(true); * String[] notifyGroup = {"notifyGroup"}; bridge1.setGroups(notifyGroup); bridge1.start(); * assertTrue(bridge1.getNotifyBySubscription()); { BridgeServerImpl bsi = (BridgeServerImpl) * bridge1; AcceptorImpl aci = bsi.getAcceptor(); * assertTrue(aci.getCacheClientNotifier().getNotifyBySubscription()); } * * } catch (IOException ioe) { fail("Setup of BridgeServer test " + name + " failed", ioe ); } * * Region r = createRootRegion(name, getRegionAttributes()); r.put(key1, val1); } }); * * client1.invoke(new * CacheSerializableRunnable("Test client1 to server with true notify-by-subscription") { public * void run2() throws CacheException { createLonerDS(); AttributesFactory factory = new * AttributesFactory(); factory.setScope(Scope.LOCAL); * ClientServerTestCase.configureConnectionPool(factory,getServerHostName(host),bs1Port,-1,true,-1 * ,-1, "notifyGroup"); factory.setCacheListener(new * CertifiableTestCacheListener(getLogWriter())); Region r = createRootRegion(name, * factory.create()); assertNull(r.getEntry(key1)); r.registerInterest(key1); * assertNotNull(r.getEntry(key1)); assertIndexDetailsEquals(val1, r.getEntry(key1).getValue()); * r.registerInterest(key2); assertNull(r.getEntry(key2)); } }); * * client2.invoke(new * CacheSerializableRunnable("Test client2 to server with false notify-by-subscription") { public * void run2() throws CacheException { createLonerDS(); AttributesFactory factory = new * AttributesFactory(); * ClientServerTestCase.configureConnectionPool(factory,getServerHostName(host),bs2Port,-1,true,-1 * ,-1, "noNotifyGroup"); * * factory.setScope(Scope.LOCAL); factory.setCacheListener(new * CertifiableTestCacheListener(getLogWriter())); Region r = createRootRegion(name, * factory.create()); assertNull(r.getEntry(key1)); assertIndexDetailsEquals(val1, r.get(key1)); * assertNull(r.getEntry(key2)); r.registerInterest(key2); assertNull(r.getEntry(key2)); } }); * * server.invoke(new * CacheSerializableRunnable("Update server with new values for client notification") { public * void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); * r.put(key2, val2); // Create a new entry r.put(key1, val2); // Change the first entry } }); * * client1.invoke(new * CacheSerializableRunnable("Test update from to server with true notify-by-subscription") { * public void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); * CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) * r.getAttributes().getCacheListener(); * * ctl.waitForUpdated(key1); assertNotNull(r.getEntry(key1)); assertIndexDetailsEquals(val2, * r.getEntry(key1).getValue()); // new value should have been pushed * * ctl.waitForCreated(key2); assertNotNull(r.getEntry(key2)); // new entry should have been pushed * assertIndexDetailsEquals(val2, r.getEntry(key2).getValue()); } }); * * client2.invoke(new * CacheSerializableRunnable("Test update from server with false notify-by-subscription") { public * void run2() throws CacheException { Region r = getRootRegion(name); assertNotNull(r); * CertifiableTestCacheListener ctl = (CertifiableTestCacheListener) * r.getAttributes().getCacheListener(); ctl.waitForInvalidated(key1); * assertNotNull(r.getEntry(key1)); assertNull(r.getEntry(key1).getValue()); // Invalidate should * have been pushed assertIndexDetailsEquals(val2, r.get(key1)); // New value should be fetched * * assertNull(r.getEntry(key2)); // assertNull(r.getEntry(key2).getValue()); * assertIndexDetailsEquals(val2, r.get(key2)); // New entry should be fetched } }); tearDown(); } * finally { // HashSet destroyedRoots = new HashSet(); try { client1.invoke(() -> * CacheTestCase.remoteTearDown()); client1.invoke(() -> disconnectFromDS()); } finally { * client2.invoke(() -> CacheTestCase.remoteTearDown()); client2.invoke(() -> disconnectFromDS()); * } } } */ // disabled - per Sudhir we don't support multiple bridges in the same VM // public void test0362BridgeServersWithDiffGroupsInSameVM() throws Exception { // final String name = this.getName(); // final Host host = Host.getHost(0); // final VM server = host.getVM(3); // final VM client1 = host.getVM(1); // final VM client2 = host.getVM(2); // // final int[] ports = AvailablePortHelper.getRandomAvailableTCPPorts(3); // final int bs1Port = ports[0]; // final int bs2Port = ports[1]; // // try { // server.invoke(new CacheSerializableRunnable("Setup BridgeServers and Gateway") { // public void run2() throws CacheException // { // Cache cache = getCache(); // // try { // // // Start server in group 1 // CacheServer bridge1 = cache.addCacheServer(); // bridge1.setPort(bs1Port); // String[] group1 = {"zGroup1"}; // bridge1.setGroups(group1); // bridge1.start(); // // // start server in group 2 // CacheServer bridge2 = cache.addCacheServer(); // bridge2.setPort(bs2Port); // bridge2.setNotifyBySubscription(true); // String[] group2 = {"zGroup2"}; // bridge2.setGroups(group2); // bridge2.start(); // getLogWriter().info("zGroup1 port should be "+bs1Port+" zGroup2 port should be "+bs2Port); // } catch (IOException ioe) { // fail("Setup of BridgeServer test " + name + " failed", ioe ); // } // // createRootRegion(name, getRegionAttributes()); // } // }); // // client1.invoke(new CacheSerializableRunnable("Test client1 to zGroup2") { // public void run2() throws CacheException // { // createLonerDS(); // AttributesFactory factory = new AttributesFactory(); // factory.setScope(Scope.LOCAL); // ClientServerTestCase.configureConnectionPool(factory,getServerHostName(host),bs1Port,-1,true,-1,-1, // "zGroup2"); // Region r = createRootRegion(name, factory.create()); // r.registerInterest("whatever"); // } // }); // // client2.invoke(new CacheSerializableRunnable("Test client2 to zGroup1") { // public void run2() throws CacheException // { // createLonerDS(); // AttributesFactory factory = new AttributesFactory(); // ClientServerTestCase.configureConnectionPool(factory,getServerHostName(host),bs2Port,-1,true,-1,-1, // "zGroup1"); // // factory.setScope(Scope.LOCAL); // Region r = createRootRegion(name, factory.create()); // r.registerInterest("whatever"); // } // }); // // tearDown(); // } finally { // try { // client1.invoke(() -> CacheTestCase.remoteTearDown()); // client1.invoke(() -> disconnectFromDS()); // } finally { // client2.invoke(() -> CacheTestCase.remoteTearDown()); // client2.invoke(() -> disconnectFromDS()); // } // } // } public static class DelayListener extends CacheListenerAdapter { private final int delay; public DelayListener(int delay) { this.delay = delay; } private void delay() { try { Thread.sleep(this.delay); } catch (InterruptedException ignore) { fail("interrupted"); } } public void afterCreate(EntryEvent event) { delay(); } public void afterDestroy(EntryEvent event) { delay(); } public void afterInvalidate(EntryEvent event) { delay(); } public void afterRegionDestroy(RegionEvent event) { delay(); } public void afterRegionCreate(RegionEvent event) { delay(); } public void afterRegionInvalidate(RegionEvent event) { delay(); } public void afterUpdate(EntryEvent event) { delay(); } public void afterRegionClear(RegionEvent event) { delay(); } public void afterRegionLive(RegionEvent event) { delay(); } } /** * Make sure a tx done in a server on an empty region gets sent to clients who have registered * interest. */ @Test public void test037Bug39526part1() throws CacheException, InterruptedException { final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); // Create the cache servers with distributed, empty region SerializableRunnable createServer = new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); factory.setDataPolicy(DataPolicy.EMPTY); factory.setConcurrencyChecksEnabled(false); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }; getSystem().getLogWriter().info("before create server"); vm0.invoke(createServer); // Create cache server client final String host0 = NetworkUtils.getServerHostName(host); final int vm0Port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); SerializableRunnable createClient = new CacheSerializableRunnable("Create Cache Server Client") { public void run2() throws CacheException { getLonerSystem(); // create the region AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, vm0Port, -1, true, -1, -1, null); createRegion(name, factory.create()); LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.registerInterestRegex(".*"); } }; getSystem().getLogWriter().info("before create client"); vm1.invoke(createClient); // now do a tx in the server SerializableRunnable doServerTx = new CacheSerializableRunnable("doServerTx") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); Cache cache = getCache(); CacheTransactionManager txmgr = cache.getCacheTransactionManager(); txmgr.begin(); try { region.put("k1", "v1"); region.put("k2", "v2"); region.put("k3", "v3"); } finally { txmgr.commit(); } } }; getSystem().getLogWriter().info("before doServerTx"); vm0.invoke(doServerTx); // now verify that the client receives the committed data SerializableRunnable validateClient = new CacheSerializableRunnable("Validate Cache Server Client") { public void run2() throws CacheException { final LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); // wait for a while for us to have the correct number of entries WaitCriterion ev = new WaitCriterion() { public boolean done() { return region.size() == 3; } public String description() { return "waiting for region to be size 3"; } }; GeodeAwaitility.await().untilAsserted(ev); // assertIndexDetailsEquals(3, region.size()); assertTrue(region.containsKey("k1")); assertTrue(region.containsKey("k2")); assertTrue(region.containsKey("k3")); assertEquals("v1", region.getEntry("k1").getValue()); assertEquals("v2", region.getEntry("k2").getValue()); assertEquals("v3", region.getEntry("k3").getValue()); } }; getSystem().getLogWriter().info("before confirmCommitOnClient"); vm1.invoke(validateClient); } /** * Now confirm that a tx done in a peer of a server (the server having an empty region and wanting * all events) sends the tx to its clients */ @Test public void test038Bug39526part2() throws CacheException, InterruptedException { disconnectAllFromDS(); final String name = this.getName(); final Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); VM vm2 = host.getVM(2); // Create the cache servers with distributed, empty region SerializableRunnable createServer = new CacheSerializableRunnable("Create Cache Server") { public void run2() throws CacheException { AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); factory.setConcurrencyChecksEnabled(false); factory.setDataPolicy(DataPolicy.EMPTY); factory.setSubscriptionAttributes(new SubscriptionAttributes(InterestPolicy.ALL)); createRegion(name, factory.create()); // pause(1000); try { startBridgeServer(0); } catch (Exception ex) { org.apache.geode.test.dunit.Assert.fail("While starting CacheServer", ex); } } }; getSystem().getLogWriter().info("before create server"); vm0.invoke(createServer); // Create cache server client final String host0 = NetworkUtils.getServerHostName(host); final int vm0Port = vm0.invoke(() -> ConnectionPoolDUnitTest.getCacheServerPort()); SerializableRunnable createClient = new CacheSerializableRunnable("Create Cache Server Client") { public void run2() throws CacheException { getLonerSystem(); // create the region AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.LOCAL); factory.setConcurrencyChecksEnabled(false); // create bridge writer ClientServerTestCase.configureConnectionPool(factory, host0, vm0Port, -1, true, -1, -1, null); createRegion(name, factory.create()); LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); region.registerInterestRegex(".*"); } }; getSystem().getLogWriter().info("before create client"); vm1.invoke(createClient); SerializableRunnable createServerPeer = new CacheSerializableRunnable("Create Server Peer") { public void run2() throws CacheException { AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK); factory.setDataPolicy(DataPolicy.EMPTY); factory.setConcurrencyChecksEnabled(false); createRegion(name, factory.create()); } }; getSystem().getLogWriter().info("before create server peer"); vm2.invoke(createServerPeer); // now do a tx in the server SerializableRunnable doServerTx = new CacheSerializableRunnable("doServerTx") { public void run2() throws CacheException { LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); Cache cache = getCache(); CacheTransactionManager txmgr = cache.getCacheTransactionManager(); txmgr.begin(); try { region.put("k1", "v1"); region.put("k2", "v2"); region.put("k3", "v3"); } finally { txmgr.commit(); } } }; getSystem().getLogWriter().info("before doServerTx"); vm2.invoke(doServerTx); // @todo verify server received it but to do this need a listener in // the server // now verify that the client receives the committed data SerializableRunnable validateClient = new CacheSerializableRunnable("Validate Cache Server Client") { public void run2() throws CacheException { final LocalRegion region = (LocalRegion) getRootRegion().getSubregion(name); // wait for a while for us to have the correct number of entries WaitCriterion ev = new WaitCriterion() { public boolean done() { return region.size() == 3; } public String description() { return "waiting for region to be size 3"; } }; GeodeAwaitility.await().untilAsserted(ev); // assertIndexDetailsEquals(3, region.size()); assertTrue(region.containsKey("k1")); assertTrue(region.containsKey("k2")); assertTrue(region.containsKey("k3")); assertEquals("v1", region.getEntry("k1").getValue()); assertEquals("v2", region.getEntry("k2").getValue()); assertEquals("v3", region.getEntry("k3").getValue()); } }; getSystem().getLogWriter().info("before confirmCommitOnClient"); vm1.invoke(validateClient); disconnectAllFromDS(); } static class Order implements DataSerializable { int index; public Order() {} public void init(int index) { this.index = index; } public int getIndex() { return index; } @Override public void toData(DataOutput out) throws IOException { out.writeInt(index); } @Override public void fromData(DataInput in) throws IOException, ClassNotFoundException { index = in.readInt(); } } }
{ "content_hash": "711cd7c6dd2ec6a3998652f459a15211", "timestamp": "", "source": "github", "line_count": 5855, "max_line_length": 113, "avg_line_length": 36.55815542271563, "alnum_prop": 0.6332738451188519, "repo_name": "pdxrunner/geode", "id": "beab93201db0a94d625b36a61720afe6b5d71050", "size": "214837", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "geode-core/src/distributedTest/java/org/apache/geode/cache/ConnectionPoolDUnitTest.java", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "106708" }, { "name": "Dockerfile", "bytes": "16800" }, { "name": "Go", "bytes": "1205" }, { "name": "Groovy", "bytes": "5503" }, { "name": "HTML", "bytes": "3820747" }, { "name": "Java", "bytes": "28135686" }, { "name": "JavaScript", "bytes": "1781602" }, { "name": "Python", "bytes": "24351" }, { "name": "Ruby", "bytes": "6665" }, { "name": "Shell", "bytes": "134940" } ], "symlink_target": "" }
// // GDataEntryYouTubeUpload.m // // This entry is used to upload to YouTube. // #import "GDataEntryYouTubeUpload.h" #import "GDataEntryYouTubeVideo.h" #import "GDataYouTubeElements.h" @implementation GDataEntryYouTubeUpload + (GDataEntryYouTubeUpload *)uploadEntryWithMediaGroup:(GDataYouTubeMediaGroup *)mediaGroup data:(NSData *)data MIMEType:(NSString *)mimeType slug:(NSString *)fileName { GDataEntryYouTubeUpload *entry = [[[self alloc] init] autorelease]; [entry setNamespaces:[GDataEntryYouTubeVideo youTubeNamespaces]]; [entry setMediaGroup:mediaGroup]; [entry setUploadData:data]; [entry setUploadMIMEType:mimeType]; [entry setUploadSlug:fileName]; return entry; } #pragma mark - - (void)addExtensionDeclarations { [super addExtensionDeclarations]; Class entryClass = [self class]; // YouTubeMediaGroup encapsulates YouTubeMediaContent [self addExtensionDeclarationForParentClass:entryClass childClass:[GDataYouTubeMediaGroup class]]; } - (NSMutableArray *)itemsForDescription { NSMutableArray *items = [super itemsForDescription]; [self addToArray:items objectDescriptionIfNonNil:[self mediaGroup] withName:@"mediaGroup"]; return items; } - (id)init { self = [super init]; if (self) { } return self; } #pragma mark - - (GDataYouTubeMediaGroup *)mediaGroup { return [self objectForExtensionClass:[GDataYouTubeMediaGroup class]]; } - (void)setMediaGroup:(GDataYouTubeMediaGroup *)obj { [self setObject:obj forExtensionClass:[GDataYouTubeMediaGroup class]]; } @end
{ "content_hash": "012a7cc7ba65355a7eaf198fbb58583b", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 93, "avg_line_length": 24.450704225352112, "alnum_prop": 0.6768433179723502, "repo_name": "number7/budgetminder", "id": "9065bb9fabc83ec7b049ce0bcfaa2865f80854ba", "size": "2318", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "GData/Clients/YouTube/GDataEntryYouTubeUpload.m", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "885" }, { "name": "CSS", "bytes": "6666" }, { "name": "HTML", "bytes": "6777" }, { "name": "JavaScript", "bytes": "37240" }, { "name": "Objective-C", "bytes": "2045995" }, { "name": "Python", "bytes": "9502" }, { "name": "Ruby", "bytes": "2465" }, { "name": "Shell", "bytes": "10983" } ], "symlink_target": "" }
var Index = function () { return { //main function to initiate the module init: function () { } }; }();
{ "content_hash": "0efb90d3e4e998e2820c621770a995f2", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 46, "avg_line_length": 10.846153846153847, "alnum_prop": 0.46099290780141844, "repo_name": "dshibko/clear", "id": "42731fcf74f9ded7e349f6a856c61c00c6d88288", "size": "141", "binary": false, "copies": "3", "ref": "refs/heads/develop", "path": "public/admin-assets/scripts/index.js", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "820284" }, { "name": "CoffeeScript", "bytes": "57172" }, { "name": "Go", "bytes": "6713" }, { "name": "JavaScript", "bytes": "2115651" }, { "name": "Makefile", "bytes": "397" }, { "name": "PHP", "bytes": "218761" }, { "name": "Python", "bytes": "5173" }, { "name": "Ruby", "bytes": "861" }, { "name": "Shell", "bytes": "1357" } ], "symlink_target": "" }
Task 'store-api-key' -requiredVariables SettingsPath { $promptForKeyCredParams = @{ DestinationPath = $SettingsPath Message = 'Enter your NuGet API key in the password field' Key = 'NuGetApiKey' } PromptUserForCredentialAndStorePassword @promptForKeyCredParams "The NuGetApiKey has been stored in $SettingsPath" }
{ "content_hash": "71bf2421b72cf368c170053485dee90f", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 74, "avg_line_length": 37.4, "alnum_prop": 0.679144385026738, "repo_name": "Cobster/psst", "id": "040d1c13f66dfadc82b340dd8e4337cd2dd7878b", "size": "374", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "psake/api/store-api-key.task.ps1", "mode": "33188", "license": "mit", "language": [ { "name": "PowerShell", "bytes": "64315" } ], "symlink_target": "" }
using System.Linq; using Sprache; namespace SharpFlame.Core.Parsers.Lev2 { public class Lev2Grammar { public static readonly Parser<string> MultiLineComment = Parse.AnyChar.Except(Parse.String("*/")).AtLeastOnce().Text() .Contained(Parse.String("/*"), Parse.String("*/")).Token(); public static readonly Parser<string> LineEnd = Parse.Return("").End() .XOr(Parse.String("\r").Text()) .Or(Parse.String("\n").Text()) .Or(Parse.String("\r\n")).Text(); public static readonly Parser<string> SingleLineComment = Parse.String("//").Then(_ => Parse.AnyChar.Until(LineEnd)).Text().Token(); public static readonly Parser<string> QuotedText = Parse.CharExcept('"').AtLeastOnce().Text() .Contained(Parse.Char('"'), Parse.Char('"')); //campaign MULTI_CAM_1 public static readonly Parser<string> CampaingDirective = from directive in Parse.String("campaign").Token() from name in Parse.AnyChar.Until(Parse.WhiteSpace).Token().Text() select name; //data "wrf/basic.wrf" public static readonly Parser<string> DataDirective = from directive in Parse.String("data").Token() from datapath in QuotedText.Token() select datapath; //campaign MULTI_T3_C2 //data "wrf/vidmem2.wrf" //data "wrf/basic.wrf" //data "wrf/cam2.wrf" //data "wrf/audio.wrf" //data "wrf/piestats.wrf" //data "wrf/stats.wrf" //data "wrf/multires3.wrf" public static readonly Parser<Campaign> Campaign = from campaignName in CampaingDirective from dataArray in DataDirective.AtLeastOnce() select new Campaign { Name = campaignName, Data = dataArray.ToArray() }; //level Sk-ThePit-T2 public static readonly Parser<string> LevelDirective = from directive in Parse.String("level").Token() from name in Parse.AnyChar.Until(Parse.WhiteSpace).Token().Text() select name; //players 4 public static readonly Parser<int> PlayersDirective = from directive in Parse.String("players").Token() from numberStr in Parse.Number select int.Parse(numberStr); //type 18 public static readonly Parser<int> TypeDirective = from directive in Parse.String("type").Token() from numberStr in Parse.Number select int.Parse(numberStr); //dataset MULTI_T2_C1 public static readonly Parser<string> DatasetDirective = from directive in Parse.String("dataset").Token() from name in Parse.AnyChar.Until(Parse.WhiteSpace).Token().Text() select name; //game "multiplay/maps/4c-rush.gam" public static readonly Parser<string> GameDirective = from directive in Parse.String("game").Token() from gamepath in QuotedText.Token() select gamepath; //level Sk-Rush2-T2 //players 4 //type 18 //dataset MULTI_T2_C1 //game "multiplay/maps/4c-rush2.gam" public static readonly Parser<Level> Level = from level in LevelDirective from players in PlayersDirective from type in TypeDirective from dataset in DatasetDirective from game in GameDirective from data in DataDirective.AtLeastOnce().Optional() select new Level { Name = level, Players = players, Type = type, Dataset = dataset, Game = game, Data = data.IsDefined ? data.Get().ToArray() : null }; public static readonly Parser<Lev> Lev = from loop in ( from multi_comments in MultiLineComment.Optional().AtLeastOnce() from single_comments in SingleLineComment.Optional().AtLeastOnce() from campaigns in Campaign.Optional().AtLeastOnce() from levels in Level.Optional().AtLeastOnce() //convert optionals to data array let campaignsArray = campaigns .Where(c => c.IsDefined) .Select(c => c.Get()).ToArray() let levelsArray = levels .Where(l => l.IsDefined) .Select(l => l.Get()).ToArray() select new { Campaigns = campaignsArray, Levels = levelsArray } ).AtLeastOnce() // loop select new Lev { Campaigns = loop.SelectMany(c => c.Campaigns).ToArray(), Levels = loop.SelectMany(l => l.Levels).ToArray() }; } public class Lev { public Campaign[] Campaigns { get; set; } public Level[] Levels { get; set; } } public class Campaign { public string Name { get; set; } public string[] Data { get; set; } } public class Level { public string Name { get; set; } public int Players { get; set; } public int Type { get; set; } public string Dataset { get; set; } public string Game { get; set; } public string[] Data { get; set; } } }
{ "content_hash": "4df70dd41ce6488611ae1a8709230dff", "timestamp": "", "source": "github", "line_count": 157, "max_line_length": 86, "avg_line_length": 37.42675159235669, "alnum_prop": 0.5175289312457454, "repo_name": "bchavez/SharpFlame", "id": "4a0119cb8107c3d41b8ce28e0bd3d84c21dc504a", "size": "5878", "binary": false, "copies": "1", "ref": "refs/heads/eto", "path": "source/SharpFlame.Core/Parsers/Lev2/Lev2Grammar.cs", "mode": "33261", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "676" }, { "name": "C#", "bytes": "2225820" }, { "name": "Shell", "bytes": "377" } ], "symlink_target": "" }
#ifndef LALRGRAM_HPP #define LALRGRAM_HPP #include "gram.hpp" #include "lalrparsingtable.hpp" #include <deque> #include <utility> /// @todo: consider priorities for productions, additional to ignore /// @todo: consider moving ignore flag from Information to ItemCore, /// since ignore items and non-ignore items can be mixed together in same info. /// An item is ignorable if production is an ignore production /// or item was added to info during closure calculation. template<typename T, T N = T(0)> class LalrGrammar: public Grammar<T> { public: typedef LalrParsingTable<T> Table; typedef typename Table::Action Action; typedef typename Grammar<T>::ProductionNo ProductionNo; typedef typename Grammar<T>::Set Set; typedef typename Grammar<T>::Token Token; typedef typename Grammar<T>::String String; typedef typename Grammar<T>::ProductionIndex ProductionIndex; struct Error { Error(size_t state, T tok, Action a1, Action a2) : state(state), token(tok), actions(a1, a2) {} size_t state; T token; std::pair<Action, Action> actions; }; static const T endToken; void clear(); Table createParsingTable(); const std::vector<Error>& getErrors() const; const std::set<ProductionNo>& getAmbiguousProductions() const; bool isIgnoreProduction(ProductionNo prodNo) const; /// @return true if action shifts into ignore state or reduces with ignore production. bool isIgnorableAction(const Action& action) const; /// Production with position in body. struct ItemCore { ItemCore(const ProductionNo& p, size_t d, bool ignore) : productionNo(p), dot(d), ignore(ignore) {} bool operator<(const ItemCore& other) const; bool operator==(const ItemCore& other) const; const ProductionNo productionNo; const size_t dot; bool ignore; }; typedef std::map<ItemCore, Set> InfoItems; typedef typename InfoItems::value_type Item; /// Information consists of item cores with set of tokens. /// The tokens are follows of body of production. /// An information corresponds to a state of the parser. struct Information { Information()/* : ignore(false)*/ {} InfoItems items; /// Flag of being info of ignore production. //bool ignore; /// @return true if all items are ignorable. bool isIgnorable() const; /// Adds follow tokens to each item. void addFollow(const Set&); }; /// @return information corresponding to state in parsing table. /// @see createParsingTable() const Information& getInformation(size_t state) const; /// @return number of informations size_t getNumberOfStates() const; protected: /// Information with id. struct TaggedInfo { TaggedInfo(const Information& info, size_t id) : info(info), id(id) {} Information info; size_t id; }; struct InfoCoreCompare { bool operator()(const TaggedInfo&, const TaggedInfo&) const; }; typedef std::set<TaggedInfo,InfoCoreCompare> Collection; typedef std::vector<typename Collection::iterator> Informations; typedef std::vector<std::map<Token,size_t> > GotoInformations; struct CoreCompare { bool operator()(const Item&, const Item&) const; }; static bool core_equal(const Information&,const Information&); static bool info_union_add(Information&, const Information&); /// Fills ignoreStartInfo with all start items of all ignore productions. /// Has to be called before startProduction is added. void calcIgnoreStartInfo(); Information& calcClosure(Information&, Set& infoFirsts) const; Information& closure(Information&) const; void calcCollection(); std::pair<size_t, bool> addInfo(Information&); bool isStartProduction(ProductionNo prodNo) const; /// Handles conflicts between actions of different ignorable state. /// Handles problems with ignore productions: /// A->B C; /// A->d; /// B->@; /// @->e; /// Now parsing "ed" gets stuck at "e.d" in item [A->B.C], since item [B->., e] would be preferred over [@->e.], /// since [@->e.] is ignorable and [B->., e] not. /// So this function recognizes this special case and chooses [@->e.] for resolution of this conflict. /// @param a1 first conflicting action and at the same time destination to write more prior action to. /// @param a2 second conflicting action. /// @param a2Ingorable ignorable state of action a2 and implicitly the inverse of ignorable state of action a1. /// @return new ignorable state of final action a1. //bool handleConflict(Action& a1, const Action& a2, bool a2Ignorable) const; Collection collection; Informations informations; GotoInformations gotos; /// All start items of all ignore productions. /// The follows just contain all firsts and have to be completed for each insert of ignore-start-items. Information ignoreStartInfo; Set ignoreFirsts; std::vector<Error> errors; std::set<ProductionNo> ambiguousProductions; }; #endif
{ "content_hash": "69b79a9d8cffad4a507e8ac219fa30fd", "timestamp": "", "source": "github", "line_count": 153, "max_line_length": 113, "avg_line_length": 32.42483660130719, "alnum_prop": 0.7083249344890143, "repo_name": "jackscan/parlucid", "id": "6c9b83774e0d96538339fc28715315a76479e26f", "size": "6301", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lalrparser/src/lalrgram.hpp", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "C++", "bytes": "948601" }, { "name": "Lua", "bytes": "139798" }, { "name": "Shell", "bytes": "105" } ], "symlink_target": "" }
/* $NetBSD: ppp-comp.h,v 1.4 1998/05/02 14:34:25 christos Exp $ */ /* * ppp-comp.h - Definitions for doing PPP packet compression. * * Copyright (c) 1994 The Australian National University. * All rights reserved. * * Permission to use, copy, modify, and distribute this software and its * documentation is hereby granted, provided that the above copyright * notice appears in all copies. This software is provided without any * warranty, express or implied. The Australian National University * makes no representations about the suitability of this software for * any purpose. * * IN NO EVENT SHALL THE AUSTRALIAN NATIONAL UNIVERSITY BE LIABLE TO ANY * PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF * THE AUSTRALIAN NATIONAL UNIVERSITY HAVE BEEN ADVISED OF THE POSSIBILITY * OF SUCH DAMAGE. * * THE AUSTRALIAN NATIONAL UNIVERSITY SPECIFICALLY DISCLAIMS ANY WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY * AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS * ON AN "AS IS" BASIS, AND THE AUSTRALIAN NATIONAL UNIVERSITY HAS NO * OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, * OR MODIFICATIONS. * * Id: ppp-comp.h,v 1.10 1996/09/26 06:30:11 paulus Exp */ #ifndef _NET_PPP_COMP_H #define _NET_PPP_COMP_H /* * The following symbols control whether we include code for * various compression methods. */ #ifndef DO_BSD_COMPRESS #define DO_BSD_COMPRESS 1 /* by default, include BSD-Compress */ #endif #ifndef DO_DEFLATE #define DO_DEFLATE 1 /* by default, include Deflate */ #endif #define DO_PREDICTOR_1 0 #define DO_PREDICTOR_2 0 /* * Structure giving methods for compression/decompression. */ #ifdef PACKETPTR struct compressor { int compress_proto; /* CCP compression protocol number */ /* Allocate space for a compressor (transmit side) */ void *(*comp_alloc) __P((u_char *options, int opt_len)); /* Free space used by a compressor */ void (*comp_free) __P((void *state)); /* Initialize a compressor */ int (*comp_init) __P((void *state, u_char *options, int opt_len, int unit, int hdrlen, int debug)); /* Reset a compressor */ void (*comp_reset) __P((void *state)); /* Compress a packet */ int (*compress) __P((void *state, PACKETPTR *mret, PACKETPTR mp, int orig_len, int max_len)); /* Return compression statistics */ void (*comp_stat) __P((void *state, struct compstat *stats)); /* Allocate space for a decompressor (receive side) */ void *(*decomp_alloc) __P((u_char *options, int opt_len)); /* Free space used by a decompressor */ void (*decomp_free) __P((void *state)); /* Initialize a decompressor */ int (*decomp_init) __P((void *state, u_char *options, int opt_len, int unit, int hdrlen, int mru, int debug)); /* Reset a decompressor */ void (*decomp_reset) __P((void *state)); /* Decompress a packet. */ int (*decompress) __P((void *state, PACKETPTR mp, PACKETPTR *dmpp)); /* Update state for an incompressible packet received */ void (*incomp) __P((void *state, PACKETPTR mp)); /* Return decompression statistics */ void (*decomp_stat) __P((void *state, struct compstat *stats)); }; #endif /* PACKETPTR */ /* * Return values for decompress routine. * We need to make these distinctions so that we can disable certain * useful functionality, namely sending a CCP reset-request as a result * of an error detected after decompression. This is to avoid infringing * a patent held by Motorola. * Don't you just lurve software patents. */ #define DECOMP_OK 0 /* everything went OK */ #define DECOMP_ERROR 1 /* error detected before decomp. */ #define DECOMP_FATALERROR 2 /* error detected after decomp. */ /* * CCP codes. */ #define CCP_CONFREQ 1 #define CCP_CONFACK 2 #define CCP_TERMREQ 5 #define CCP_TERMACK 6 #define CCP_RESETREQ 14 #define CCP_RESETACK 15 /* * Max # bytes for a CCP option */ #define CCP_MAX_OPTION_LENGTH 32 /* * Parts of a CCP packet. */ #define CCP_CODE(dp) ((dp)[0]) #define CCP_ID(dp) ((dp)[1]) #define CCP_LENGTH(dp) (((dp)[2] << 8) + (dp)[3]) #define CCP_HDRLEN 4 #define CCP_OPT_CODE(dp) ((dp)[0]) #define CCP_OPT_LENGTH(dp) ((dp)[1]) #define CCP_OPT_MINLEN 2 /* * Definitions for BSD-Compress. */ #define CI_BSD_COMPRESS 21 /* config. option for BSD-Compress */ #define CILEN_BSD_COMPRESS 3 /* length of config. option */ /* Macros for handling the 3rd byte of the BSD-Compress config option. */ #define BSD_NBITS(x) ((x) & 0x1F) /* number of bits requested */ #define BSD_VERSION(x) ((x) >> 5) /* version of option format */ #define BSD_CURRENT_VERSION 1 /* current version number */ #define BSD_MAKE_OPT(v, n) (((v) << 5) | (n)) #define BSD_MIN_BITS 9 /* smallest code size supported */ #define BSD_MAX_BITS 15 /* largest code size supported */ /* * Definitions for Deflate. */ #define CI_DEFLATE 26 /* config option for Deflate */ #define CI_DEFLATE_DRAFT 24 /* value used in original draft RFC */ #define CILEN_DEFLATE 4 /* length of its config option */ #define DEFLATE_MIN_SIZE 8 #define DEFLATE_MAX_SIZE 15 #define DEFLATE_METHOD_VAL 8 #define DEFLATE_SIZE(x) (((x) >> 4) + DEFLATE_MIN_SIZE) #define DEFLATE_METHOD(x) ((x) & 0x0F) #define DEFLATE_MAKE_OPT(w) ((((w) - DEFLATE_MIN_SIZE) << 4) \ + DEFLATE_METHOD_VAL) #define DEFLATE_CHK_SEQUENCE 0 /* * Definitions for other, as yet unsupported, compression methods. */ #define CI_PREDICTOR_1 1 /* config option for Predictor-1 */ #define CILEN_PREDICTOR_1 2 /* length of its config option */ #define CI_PREDICTOR_2 2 /* config option for Predictor-2 */ #define CILEN_PREDICTOR_2 2 /* length of its config option */ #endif /* _NET_PPP_COMP_H */
{ "content_hash": "bf058d4a41d0f1654c63d3e0e4c2fd45", "timestamp": "", "source": "github", "line_count": 168, "max_line_length": 76, "avg_line_length": 34.357142857142854, "alnum_prop": 0.6961191961191961, "repo_name": "MarginC/kame", "id": "5adc3f0f19f72ef3eab217695830c7def1656edf", "size": "5772", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "netbsd/sys/net/ppp-comp.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Arc", "bytes": "7491" }, { "name": "Assembly", "bytes": "14375563" }, { "name": "Awk", "bytes": "313712" }, { "name": "Batchfile", "bytes": "6819" }, { "name": "C", "bytes": "356715789" }, { "name": "C++", "bytes": "4231647" }, { "name": "DIGITAL Command Language", "bytes": "11155" }, { "name": "Emacs Lisp", "bytes": "790" }, { "name": "Forth", "bytes": "253695" }, { "name": "GAP", "bytes": "9964" }, { "name": "Groff", "bytes": "2220485" }, { "name": "Lex", "bytes": "168376" }, { "name": "Logos", "bytes": "570213" }, { "name": "Makefile", "bytes": "1778847" }, { "name": "Mathematica", "bytes": "16549" }, { "name": "Objective-C", "bytes": "529629" }, { "name": "PHP", "bytes": "11283" }, { "name": "Perl", "bytes": "151251" }, { "name": "Perl6", "bytes": "2572" }, { "name": "Ruby", "bytes": "7283" }, { "name": "Scheme", "bytes": "76872" }, { "name": "Shell", "bytes": "583253" }, { "name": "Stata", "bytes": "408" }, { "name": "Yacc", "bytes": "606054" } ], "symlink_target": "" }
import { Fixture } from "../../utils/fixture"; import { normalizeEnvironment } from "../../utils/snapshot-serializer-utils"; expect.addSnapshotSerializer({ serialize(str: string) { return normalizeEnvironment(str); }, test(val: string) { return val != null && typeof val === "string"; }, }); describe("lerna-list", () => { let fixture: Fixture; beforeAll(async () => { fixture = await Fixture.create({ name: "lerna-list", packageManager: "npm", initializeGit: true, runLernaInit: true, installDependencies: true, }); await fixture.lerna("create package-c -y"); await fixture.lerna("create package-b --private -y"); await fixture.addPackagesDirectory("modules"); await fixture.lerna("create package-a modules -y"); await fixture.lerna("create package-e modules -y"); await fixture.lerna("create package-d modules --private -y"); await fixture.addDependencyToPackage({ packagePath: "modules/package-a", dependencyName: "package-c", version: "0.0.0", }); await fixture.addDependencyToPackage({ packagePath: "packages/package-b", dependencyName: "package-c", version: "0.0.0", }); await fixture.addDependencyToPackage({ packagePath: "modules/package-a", dependencyName: "package-d", version: "0.0.0", }); }); afterAll(() => fixture.destroy()); it("should list public packages in lexicographical order", async () => { const output = await fixture.lerna("list"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 package-a package-e package-c lerna success found 3 packages `); }); describe("--json", () => { it("should list packages json", async () => { const output = await fixture.lerna("list --json"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 [ { "name": "package-a", "version": "0.0.0", "private": false, "location": "/tmp/lerna-e2e/lerna-list/lerna-workspace/modules/package-a" }, { "name": "package-e", "version": "0.0.0", "private": false, "location": "/tmp/lerna-e2e/lerna-list/lerna-workspace/modules/package-e" }, { "name": "package-c", "version": "0.0.0", "private": false, "location": "/tmp/lerna-e2e/lerna-list/lerna-workspace/packages/package-c" } ] lerna success found 3 packages `); }); }); describe("--ndjson", () => { it("should list packages as newline-delimited json", async () => { const output = await fixture.lerna("list --ndjson"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 {"name":"package-a","version":"0.0.0","private":false,"location":"/tmp/lerna-e2e/lerna-list/lerna-workspace/modules/package-a"} {"name":"package-e","version":"0.0.0","private":false,"location":"/tmp/lerna-e2e/lerna-list/lerna-workspace/modules/package-e"} {"name":"package-c","version":"0.0.0","private":false,"location":"/tmp/lerna-e2e/lerna-list/lerna-workspace/packages/package-c"} lerna success found 3 packages `); }); }); describe("--all", () => { it("should list all packages, including private ones that are hidden by default", async () => { const output = await fixture.lerna("list --all"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 package-a package-d (PRIVATE) package-e package-b (PRIVATE) package-c lerna success found 5 packages `); }); }); describe("-a", () => { it("should list all packages, including private ones that are hidden by default", async () => { const output = await fixture.lerna("list -a"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 package-a package-d (PRIVATE) package-e package-b (PRIVATE) package-c lerna success found 5 packages `); }); }); describe("--long", () => { it("should list packages with version and path information", async () => { const output = await fixture.lerna("list --long"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 package-a v0.0.0 modules/package-a package-e v0.0.0 modules/package-e package-c v0.0.0 packages/package-c lerna success found 3 packages `); }); }); describe("-l", () => { it("should list packages with version and path information", async () => { const output = await fixture.lerna("list -l"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 package-a v0.0.0 modules/package-a package-e v0.0.0 modules/package-e package-c v0.0.0 packages/package-c lerna success found 3 packages `); }); }); describe("--parseable", () => { it("should list packages with parseable output instead of columnified view", async () => { const output = await fixture.lerna("list --parseable"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 /tmp/lerna-e2e/lerna-list/lerna-workspace/modules/package-a /tmp/lerna-e2e/lerna-list/lerna-workspace/modules/package-e /tmp/lerna-e2e/lerna-list/lerna-workspace/packages/package-c lerna success found 3 packages `); }); }); describe("-p", () => { it("should list packages with parseable output instead of columnified view", async () => { const output = await fixture.lerna("list -p"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 /tmp/lerna-e2e/lerna-list/lerna-workspace/modules/package-a /tmp/lerna-e2e/lerna-list/lerna-workspace/modules/package-e /tmp/lerna-e2e/lerna-list/lerna-workspace/packages/package-c lerna success found 3 packages `); }); }); describe("-pla", () => { it("should list all packages, with version and package info, in a parseable output", async () => { const output = await fixture.lerna("list -pla"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 /tmp/lerna-e2e/lerna-list/lerna-workspace/modules/package-a:package-a:0.0.0 /tmp/lerna-e2e/lerna-list/lerna-workspace/modules/package-d:package-d:0.0.0:PRIVATE /tmp/lerna-e2e/lerna-list/lerna-workspace/modules/package-e:package-e:0.0.0 /tmp/lerna-e2e/lerna-list/lerna-workspace/packages/package-b:package-b:0.0.0:PRIVATE /tmp/lerna-e2e/lerna-list/lerna-workspace/packages/package-c:package-c:0.0.0 lerna success found 5 packages `); }); }); describe("--toposort", () => { it("should list packages in topological order", async () => { const output = await fixture.lerna("list --toposort"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 package-e package-c package-a lerna success found 3 packages `); }); }); describe("--graph", () => { it("should list packages with their dependencies in a json list", async () => { const output = await fixture.lerna("list --graph"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 { "package-a": [ "package-c", "package-d" ], "package-e": [], "package-c": [] } lerna success found 3 packages `); }); describe("--all", () => { it("should list all packages with their dependencies in a json list", async () => { const output = await fixture.lerna("list --graph --all"); expect(output.combinedOutput).toMatchInlineSnapshot(` lerna notice cli v999.9.9-e2e.0 { "package-a": [ "package-c", "package-d" ], "package-d": [], "package-e": [], "package-b": [ "package-c" ], "package-c": [] } lerna success found 5 packages `); }); }); }); });
{ "content_hash": "6db56c6a77ebc81fac7bfd0ff7b1b120", "timestamp": "", "source": "github", "line_count": 280, "max_line_length": 136, "avg_line_length": 30.889285714285716, "alnum_prop": 0.5892010637067869, "repo_name": "lerna/lerna", "id": "9e389b67601eddf520b0b866655a8b19edcf92f8", "size": "8649", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "e2e/tests/lerna-list/lerna-list.spec.ts", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "171" }, { "name": "CSS", "bytes": "11482" }, { "name": "JavaScript", "bytes": "1042756" }, { "name": "Shell", "bytes": "4056" }, { "name": "TypeScript", "bytes": "527250" } ], "symlink_target": "" }
<?php return [ /* |-------------------------------------------------------------------------- | Mail Driver |-------------------------------------------------------------------------- | | Laravel supports both SMTP and PHP's "mail" function as drivers for the | sending of e-mail. You may specify which one you're using throughout | your application here. By default, Laravel is setup for SMTP mail. | | Supported: "smtp", "mail", "sendmail", "mailgun", "mandrill", "log" | */ 'driver' => env('MAIL_DRIVER', 'smtp'), /* |-------------------------------------------------------------------------- | SMTP Host Address |-------------------------------------------------------------------------- | | Here you may provide the host address of the SMTP server used by your | applications. A default option is provided that is compatible with | the Mailgun mail service which will provide reliable deliveries. | */ 'host' => env('MAIL_HOST', 'smtp.mail.ru'), /* |-------------------------------------------------------------------------- | SMTP Host Port |-------------------------------------------------------------------------- | | This is the SMTP port used by your application to deliver e-mails to | users of the application. Like the host we have set this value to | stay compatible with the Mailgun e-mail application by default. | */ 'port' => env('MAIL_PORT', 2525), /* |-------------------------------------------------------------------------- | Global "From" Address |-------------------------------------------------------------------------- | | You may wish for all e-mails sent by your application to be sent from | the same address. Here, you may specify a name and address that is | used globally for all e-mails that are sent by your application. | */ 'from' => ['address' => 'info@genie.kz', 'name' => 'robot'], /* |-------------------------------------------------------------------------- | E-Mail Encryption Protocol |-------------------------------------------------------------------------- | | Here you may specify the encryption protocol that should be used when | the application send e-mail messages. A sensible default using the | transport layer security protocol should provide great security. | */ 'encryption' => env('MAIL_ENCRYPTION', 'tls'), /* |-------------------------------------------------------------------------- | SMTP Server Username |-------------------------------------------------------------------------- | | If your SMTP server requires a username for authentication, you should | set it here. This will get used to authenticate with your server on | connection. You may also set the "password" value below this one. | */ 'username' => env('MAIL_USERNAME','info@genie.kz'), /* |-------------------------------------------------------------------------- | SMTP Server Password |-------------------------------------------------------------------------- | | Here you may set the password required by your SMTP server to send out | messages from your application. This will be given to the server on | connection so that the application will be able to send messages. | */ 'password' => env('MAIL_PASSWORD','Genie2016@'), /* |-------------------------------------------------------------------------- | Sendmail System Path |-------------------------------------------------------------------------- | | When using the "sendmail" driver to send e-mails, we will need to know | the path to where Sendmail lives on this server. A default path has | been provided here, which will work well on most of your systems. | */ 'sendmail' => '/usr/sbin/sendmail -bs', /* |-------------------------------------------------------------------------- | Mail "Pretend" |-------------------------------------------------------------------------- | | When this option is enabled, e-mail will not actually be sent over the | web and will instead be written to your application's logs files so | you may inspect the message. This is great for local development. | */ 'pretend' => false, ];
{ "content_hash": "c7417efe75f50cb1b556a2b8153f8b0c", "timestamp": "", "source": "github", "line_count": 124, "max_line_length": 76, "avg_line_length": 33.33064516129032, "alnum_prop": 0.45947253810791194, "repo_name": "nurbulatuly/ideab1nk", "id": "7c0cae22bdd6c581fb72d46b1ec32a973118c9a5", "size": "4133", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "config/mail.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "480" }, { "name": "HTML", "bytes": "45711" }, { "name": "JavaScript", "bytes": "90097" }, { "name": "PHP", "bytes": "385207" } ], "symlink_target": "" }
var assert = require('assert'); var async = require('async'); var domain = require('domain'); var helper = require('../../test-helper'); var Client = require('../../../lib/client'); var Host = require('../../../lib/host').Host; var clientOptions = require('../../../lib/client-options'); var utils = require('../../../lib/utils'); var errors = require('../../../lib/errors'); var types = require('../../../lib/types'); var RoundRobinPolicy = require('../../../lib/policies/load-balancing.js').RoundRobinPolicy; describe('Client', function () { this.timeout(120000); describe('#connect()', function () { before(helper.ccmHelper.start(3)); after(helper.ccmHelper.remove); it('should discover all hosts in the ring and hosts object can be serializable', function (done) { var client = newInstance(); client.connect(function (err) { if (err) return done(err); assert.strictEqual(client.hosts.length, 3); assert.strictEqual(client.hosts.values().length, 3); assert.strictEqual(client.hosts.keys().length, 3); assert.doesNotThrow(function () { //It should be serializable JSON.stringify(client.hosts); }); client.shutdown(done); }); }); it('should retrieve the cassandra version of the hosts', function (done) { var client = newInstance(); client.connect(function (err) { if (err) return done(err); assert.strictEqual(client.hosts.length, 3); client.hosts.values().forEach(function (h) { assert.strictEqual(typeof h.cassandraVersion, 'string'); assert.strictEqual( h.cassandraVersion.split('.').slice(0, 2).join('.'), helper.getCassandraVersion().split('.').slice(0, 2).join('.')); }); client.shutdown(done); }); }); it('should fail if the contact points can not be resolved', function (done) { var client = newInstance({contactPoints: ['not-a-host']}); client.connect(function (err) { assert.ok(err); helper.assertInstanceOf(err, errors.NoHostAvailableError); client.shutdown(function (err) { assert.ifError(err); done(); }); }); }); it('should fail if the contact points can not be reached', function (done) { var client = newInstance({contactPoints: ['1.1.1.1']}); client.connect(function (err) { assert.ok(err); helper.assertInstanceOf(err, errors.NoHostAvailableError); done(); }); }); it('should fail if the keyspace does not exists', function (done) { var client = newInstance({ keyspace: 'ks_does_not_exists'}); client.connect(function (err) { helper.assertInstanceOf(err, Error); client.shutdown(function (err) { assert.ifError(err); done(); }); }); }); it('should select a tokenizer', function (done) { var client = newInstance(); client.connect(function (err) { if (err) return done(err); helper.assertInstanceOf(client.metadata.tokenizer, require('../../../lib/tokenizer.js').Murmur3Tokenizer); client.shutdown(done); }); }); it('should allow multiple parallel calls to connect', function (done) { var client = newInstance(); async.times(100, function (n, next) { client.connect(next); }, function (err) { assert.ifError(err); client.shutdown(done); }); }); it('should resolve host names', function (done) { var client = new Client(utils.extend({}, helper.baseOptions, {contactPoints: ['localhost']})); client.connect(function (err) { assert.ifError(err); assert.strictEqual(client.hosts.length, 3); client.hosts.forEach(function (h) { assert.notEqual(h.address, 'localhost'); }); client.shutdown(done); }); }); it('should fail if the keyspace does not exists', function (done) { var client = new Client(utils.extend({}, helper.baseOptions, {keyspace: 'not-existent-ks'})); async.times(10, function (n, next) { client.connect(function (err) { assert.ok(err); //Not very nice way to check but here it is //Does the message contains Keyspace assert.ok(err.message.toLowerCase().indexOf('keyspace') >= 0, 'Message mismatch, was: ' + err.message); next(); }); }, function (err) { assert.ifError(err); client.shutdown(done); }); }); it('should not use contactPoints that are not part of peers', function (done) { var contactPoints = helper.baseOptions.contactPoints.slice(0); contactPoints.push('host-not-existent-not-peer'); contactPoints.push('1.1.1.1'); var client = newInstance({contactPoints: contactPoints}); client.connect(function (err) { assert.ifError(err); //the 3 original hosts assert.strictEqual(client.hosts.length, 3); var hosts = client.hosts.keys(); assert.strictEqual(hosts[0], contactPoints[0] + ':9042'); assert.notEqual(hosts[1], contactPoints[1] + ':9042'); assert.notEqual(hosts[2], contactPoints[1] + ':9042'); assert.notEqual(hosts[1], contactPoints[2] + ':9042'); assert.notEqual(hosts[2], contactPoints[2] + ':9042'); client.shutdown(done); }); }); it('should use the default pooling options according to the protocol version', function (done) { var client = newInstance(); client.connect(function (err) { assert.ifError(err); assert.ok(client.options.pooling.coreConnectionsPerHost); if (client.controlConnection.protocolVersion < 3) { helper.assertValueEqual(client.options.pooling.coreConnectionsPerHost, clientOptions.coreConnectionsPerHostV2); } else { helper.assertValueEqual(client.options.pooling.coreConnectionsPerHost, clientOptions.coreConnectionsPerHostV3); } async.times(10, function (n, next) { client.execute('SELECT key FROM system.local', next); }, function (err) { if (err) return done(err); assert.strictEqual(client.hosts.values()[0].pool.connections.length, client.options.pooling.coreConnectionsPerHost[types.distance.local]); client.shutdown(done); }); }); }); it('should override default pooling options when specified', function (done) { var client = newInstance({ pooling: { coreConnectionsPerHost: { '0': 4 } }}); client.connect(function (err) { assert.ifError(err); assert.ok(client.options.pooling.coreConnectionsPerHost); var defaults = clientOptions.coreConnectionsPerHostV3; if (client.controlConnection.protocolVersion < 3) { defaults = clientOptions.coreConnectionsPerHostV2; } assert.ok(client.options.pooling.coreConnectionsPerHost[types.distance.local], 4); assert.ok(client.options.pooling.coreConnectionsPerHost[types.distance.remote], defaults[types.distance.remote]); async.times(50, function (n, next) { client.execute('SELECT key FROM system.local', next); }, function (err) { if (err) return done(err); assert.strictEqual(client.hosts.values()[0].pool.connections.length, client.options.pooling.coreConnectionsPerHost[types.distance.local]); client.shutdown(done); }); }); }); it('should not fail when switching keyspace and a contact point is not valid', function (done) { var client = new Client({ contactPoints: ['1.1.1.1', helper.baseOptions.contactPoints[0]], keyspace: 'system' }); client.connect(function (err) { assert.ifError(err); client.shutdown(done); }); }); it('should open connections to all hosts when warmup is set', function (done) { var connectionsPerHost = {}; connectionsPerHost[types.distance.local] = 3; connectionsPerHost[types.distance.remote] = 1; var client = newInstance({ pooling: { warmup: true, coreConnectionsPerHost: connectionsPerHost}}); client.connect(function (err) { assert.ifError(err); assert.strictEqual(client.hosts.length, 3); client.hosts.forEach(function (host) { assert.strictEqual(host.pool.connections.length, 3); }); client.shutdown(done); }); }); it('should only warmup connections for hosts with local distance', function (done) { var lbPolicy = new RoundRobinPolicy(); lbPolicy.getDistance = function (host) { //noinspection JSCheckFunctionSignatures var id = helper.lastOctetOf(host.address); if(id == '1') { return types.distance.local; } else if(id == '2') { return types.distance.remote; } return types.distance.ignored; }; var connectionsPerHost = {}; connectionsPerHost[types.distance.local] = 3; connectionsPerHost[types.distance.remote] = 1; var client = newInstance({ policies: { loadBalancing: lbPolicy }, pooling: { warmup: true, coreConnectionsPerHost: connectionsPerHost} }); client.connect(function (err) { assert.ifError(err); assert.strictEqual(client.hosts.length, 3); client.hosts.forEach(function (host) { var id = helper.lastOctetOf(host); if(id == '1') { assert.strictEqual(host.pool.connections.length, 3); } else { assert.strictEqual(host.pool.connections.length, 0); } }); client.shutdown(done); }); }); }); describe('#connect() with auth', function () { before(function (done) { async.series([ function (next) { //it wont hurt to remove helper.ccmHelper.exec(['remove'], function () { //ignore error next(); }); }, function (next) { helper.ccmHelper.exec(['create', 'test', '-v', helper.getCassandraVersion()], next); }, function (next) { helper.ccmHelper.exec(['updateconf', "authenticator: PasswordAuthenticator"], next); }, function (next) { helper.ccmHelper.exec(['populate', '-n', '2'], next); }, function (next) { helper.ccmHelper.exec(['start'], function () { //It takes a while for Cassandra to create the default user account setTimeout(function () {next();}, 25000); }); } ], done) }); after(helper.ccmHelper.remove); var PlainTextAuthProvider = require('../../../lib/auth/plain-text-auth-provider.js'); it('should connect using the plain text authenticator', function (done) { var options = {authProvider: new PlainTextAuthProvider('cassandra', 'cassandra')}; var client = newInstance(options); async.times(100, function (n, next) { client.connect(next); }, function (err) { done(err); }); }); it('should connect using the plain text authenticator when calling execute', function (done) { var options = {authProvider: new PlainTextAuthProvider('cassandra', 'cassandra'), keyspace: 'system'}; var client = newInstance(options); async.times(100, function (n, next) { client.execute('SELECT * FROM schema_keyspaces', next); }, function (err) { done(err); }); }); it('should return an AuthenticationError', function (done) { var options = {authProvider: new PlainTextAuthProvider('not___EXISTS', 'not___EXISTS'), keyspace: 'system'}; var client = newInstance(options); async.timesSeries(10, function (n, next) { client.connect(function (err) { assert.ok(err); helper.assertInstanceOf(err, errors.NoHostAvailableError); assert.ok(err.innerErrors); helper.assertInstanceOf(helper.values(err.innerErrors)[0], errors.AuthenticationError); next(); }); }, done); }); it('should return an AuthenticationError when calling execute', function (done) { var options = {authProvider: new PlainTextAuthProvider('not___EXISTS', 'not___EXISTS'), keyspace: 'system'}; var client = newInstance(options); async.times(10, function (n, next) { client.execute('SELECT * FROM schema_keyspaces', function (err) { assert.ok(err); helper.assertInstanceOf(err, errors.NoHostAvailableError); assert.ok(err.innerErrors); helper.assertInstanceOf(helper.values(err.innerErrors)[0], errors.AuthenticationError); next(); }); }, done); }); }); describe('#connect() with ssl', function () { before(helper.ccmHelper.start(1, {ssl: true})); after(helper.ccmHelper.remove); it('should connect to a ssl enabled cluster', function (done) { var client = newInstance({sslOptions: {}}); client.connect(function (err) { assert.ifError(err); assert.strictEqual(client.hosts.length, 1); done(); }); }); }); describe('#execute()', function () { before(helper.ccmHelper.start(3)); after(helper.ccmHelper.remove); it('should use the keyspace provided', function (done) { var client = new Client(utils.extend({}, helper.baseOptions, {keyspace: 'system'})); //on all hosts async.times(10, function (n, next) { assert.strictEqual(client.keyspace, 'system'); //A query in the system ks client.execute('SELECT * FROM schema_keyspaces', function (err, result) { assert.ifError(err); assert.ok(result.rows); assert.ok(result.rows.length > 0); next(); }); }, done); }); it('should fail to execute if the keyspace does not exists', function (done) { var client = new Client(utils.extend({}, helper.baseOptions, {keyspace: 'NOT____EXISTS'})); //on all hosts async.times(10, function (n, next) { //No matter what, the keyspace does not exists client.execute('SELECT * FROM system.schema_keyspaces', function (err) { helper.assertInstanceOf(err, Error); next(); }); }, done); }); it('should change the active keyspace after USE statement', function (done) { var client = newInstance(); client.execute('USE system', function (err) { if (err) return done(err); assert.strictEqual(client.keyspace, 'system'); //all next queries, the instance should still "be" in the system keyspace async.times(100, function (n, next) { client.execute('SELECT * FROM schema_keyspaces', [], next); }, done) }); }); it('should return ResponseError when executing USE with a wrong keyspace', function (done) { var client = newInstance(); client.execute('USE ks_not_exist', function (err) { assert.ok(err instanceof errors.ResponseError); assert.equal(client.keyspace, null); done(); }); }); it('should create the amount of connections determined by the options', function (done) { var options = { pooling: { coreConnectionsPerHost: { '0': 3, '1': 0, '2': 0 } } }; var client = new Client(utils.extend({}, helper.baseOptions, options)); //execute a couple of queries async.times(100, function (n, next) { setTimeout(function () { client.execute('SELECT * FROM system.schema_keyspaces', next); }, 100 + n * 2) }, function (err) { if (err) return done(err); assert.strictEqual(client.hosts.length, 3); var hosts = client.hosts.slice(0); assert.strictEqual(hosts[0].pool.coreConnectionsLength, 3); assert.strictEqual(hosts[1].pool.coreConnectionsLength, 3); assert.strictEqual(hosts[0].pool.connections.length, 3); assert.strictEqual(hosts[1].pool.connections.length, 3); done(err); }); }); it('should maintain the domain in the callbacks', function (done) { var unexpectedErrors = []; var errors = []; var domains = [ //2 domains because there are more than 2 hosts as an uncaught error //will blow up the host pool, by design //But we need to test prepared and unprepared domain.create(), domain.create() ]; var fatherDomain = domain.create(); var childDomain = domain.create(); var client1 = new Client(helper.baseOptions); var client2 = new Client(helper.baseOptions); async.series([ client1.connect.bind(client1), client2.connect.bind(client1), function executeABunchOfTimes1(next) { async.times(10, function (n, timesNext) { client1.execute('SELECT * FROM system.local', timesNext); }, next); }, function executeABunchOfTimes2(next) { async.times(10, function (n, timesNext) { client2.execute('SELECT * FROM system.local', timesNext); }, next); }, function blowUpSingleDomain(next) { var EventEmitter = require('events').EventEmitter; async.timesSeries(domains.length, function (n, timesNext) { var waiting = 1; var d = domains[n]; d.add(new EventEmitter()); d.on('error', function (err) { errors.push([err.toString(), n.toString()]); setImmediate(function () { //OK, this line might result in an output message (!?) d.dispose(); }); }); d.run(function() { client1.execute('SELECT * FROM system.local', [], {prepare: n % 2}, function (err) { waiting = 0; if (err) { unexpectedErrors.push(err); } throw new Error('From domain ' + n); }); }); function wait() { if (waiting > 0) { waiting++; if (waiting > 100) { return timesNext(new Error('Timed out')); } return setTimeout(wait, 50); } //Delay to allow throw setTimeout(function () { timesNext(); }, 100); } wait(); }, next); }, function nestedDomain(next) { var waiting = true; fatherDomain.on('error', function (err) { errors.push([err.toString(), 'father']); }); fatherDomain.run(function () { childDomain.on('error', function (err) { errors.push([err.toString(), 'child']); }); childDomain.run(function() { client2.execute('SELECT * FROM system.local', function (err) { waiting = false; if (err) { unexpectedErrors.push(err); } throw new Error('From domain child'); }); }); }); function wait() { if (waiting) { return setTimeout(wait, 50); } //Delay to allow throw setTimeout(next, 100); } wait(); }, function assertResults(next) { assert.strictEqual(unexpectedErrors.length, 0, 'Unexpected errors: ' + unexpectedErrors[0]); //assert.strictEqual(errors.length, domains.length + 1); errors.forEach(function (item) { assert.strictEqual(item[0], 'Error: From domain ' + item[1]); }); next(); } ], done); }); it('should wait for schema agreement before calling back', function (done) { var queries = [ "CREATE KEYSPACE ks1 WITH replication = {'class': 'SimpleStrategy', 'replication_factor' : 3};", "CREATE TABLE ks1.tbl1 (id uuid PRIMARY KEY, value text)", "SELECT * FROM ks1.tbl1", "SELECT * FROM ks1.tbl1 where id = d54cb06d-d168-45a0-b1b2-9f5c75435d3d", "CREATE KEYSPACE ks2 WITH replication = {'class': 'SimpleStrategy', 'replication_factor' : 3};", "CREATE TABLE ks2.tbl2 (id uuid PRIMARY KEY, value text)", "SELECT * FROM ks2.tbl2", "SELECT * FROM ks2.tbl2", "CREATE TABLE ks2.tbl3 (id uuid PRIMARY KEY, value text)", "SELECT * FROM ks2.tbl3", "SELECT * FROM ks2.tbl3", "CREATE TABLE ks2.tbl4 (id uuid PRIMARY KEY, value text)", "SELECT * FROM ks2.tbl4", "SELECT * FROM ks2.tbl4", "SELECT * FROM ks2.tbl4" ]; var client = newInstance(); //warmup first async.timesSeries(10, function (n, next) { client.execute('SELECT key FROM system.local', next); }, function (err) { assert.ifError(err); async.eachSeries(queries, function (query, next) { client.execute(query, next); }, done); }); }); }); describe('failover', function () { beforeEach(helper.ccmHelper.start(3)); afterEach(helper.ccmHelper.remove); it('should failover after a node goes down', function (done) { var client = newInstance(); var hosts = {}; var hostsDown = []; client.on('hostDown', function (h) { hostsDown.push(h); }); async.series([ function warmUpPool(seriesNext) { async.times(100, function (n, next) { client.execute('SELECT * FROM system.schema_keyspaces', function (err, result) { assert.ifError(err); hosts[result.info.queriedHost] = true; next(); }); }, seriesNext); }, function killNode(seriesNext) { setTimeout(function () { helper.ccmHelper.exec(['node1', 'stop', '--not-gently']); seriesNext(); }, 0); }, function testCase(seriesNext) { //3 hosts alive assert.strictEqual(Object.keys(hosts).length, 3); var counter = 0; async.times(1000, function (i, next) { client.execute('SELECT * FROM system.schema_keyspaces', function (err) { counter++; assert.ifError(err); next(); }); }, function (err) { assert.ifError(err); //Only 2 hosts alive at the end assert.strictEqual( client.hosts.slice(0).reduce(function (val, h) { return val + (h.isUp() ? 1 : 0); }, 0), 2); assert.ok(hostsDown.length >= 1, "Expected at least 1 host down" + " event."); //Ensure each down event is for the stopped host. We may get //multiple down events for the same host on a control connection. hostsDown.forEach(function (downHost) { assert.strictEqual(helper.lastOctetOf(downHost), '1'); }); seriesNext(); }); } ], done); }); it('should failover when a node goes down with some outstanding requests', function (done) { var options = utils.extend({}, helper.baseOptions); options.pooling = { coreConnectionsPerHost: { '0': 1, '1': 1, '2': 0 } }; var client = new Client(options); var hosts = {}; var query = 'SELECT * FROM system.schema_keyspaces'; async.series([ function warmUpPool(seriesNext) { async.times(10, function (n, next) { client.execute(query, function (err, result) { assert.ifError(err); hosts[result.info.queriedHost] = true; next(); }); }, seriesNext); }, function testCase(seriesNext) { //3 hosts alive assert.strictEqual(Object.keys(hosts).length, 3); var counter = 0; var issued = 0; var killed = false; async.times(500, function (n, next) { if (n === 10) { //kill a node when there are some outstanding requests helper.ccmHelper.exec(['node2', 'stop', '--not-gently'], function (err) { killed = true; assert.ifError(err); //do a couple of more queries async.times(10, function (n, next2) { client.execute(query, next2); }, next); }); return; } if (killed) { //Don't issue more requests return next(); } issued++; client.execute(query, function (err) { assert.ifError(err); counter++; next(); }); }, function (err) { assert.ifError(err); //Only 2 hosts alive at the end assert.strictEqual( client.hosts.slice(0).reduce(function (val, h) { return val + (h.isUp() ? 1 : 0); }, 0), 2); seriesNext(); }); } ], done); }); it('should warn but not fail when warmup is enable and a node is down', function (done) { async.series([ helper.toTask(helper.ccmHelper.exec, null, ['node2', 'stop']), function (next) { var warnings = []; var client = newInstance({ pooling: { warmup: true } }); client.on('log', function (level, className, message) { if (level !== 'warning' || className !== 'Client') return; warnings.push(message); }); client.connect(function (err) { assert.ifError(err); assert.strictEqual(warnings.length, 1); assert.ok(warnings[0].indexOf('pool') >= 0, 'warning does not contains the word pool: ' + warnings[0]); client.shutdown(next); }); } ], done); }); it('should connect when first contact point is down', function (done) { async.series([ helper.toTask(helper.ccmHelper.exec, null, ['node1', 'stop']), function (next) { var client = newInstance({ contactPoints: ['127.0.0.1', '127.0.0.2'], pooling: { warmup: true } }); client.connect(function (err) { assert.ifError(err); client.shutdown(next); }); } ], done); }); }); describe('events', function () { //noinspection JSPotentiallyInvalidUsageOfThis this.timeout(600000); var is1x = helper.getCassandraVersion().charAt(0) === '1'; beforeEach(helper.ccmHelper.start(2)); afterEach(helper.ccmHelper.remove); it('should emit hostUp hostDown', function (done) { var client = newInstance(); var hostsWentUp = []; var hostsWentDown = []; async.series([ client.connect.bind(client), function addListeners(next) { client.on('hostUp', hostsWentUp.push.bind(hostsWentUp)); client.on('hostDown', hostsWentDown.push.bind(hostsWentDown)); next(); }, helper.toTask(helper.ccmHelper.stopNode, null, 2), helper.toTask(helper.ccmHelper.startNode, null, 2), function wait1s(next) { // If C* 1.x, we wait slightly before checking the listener // because the node is marked UP just before the listeners are // called since CCM considers the node up as soon as other nodes // have seen it up, at which point they would send the // notification on the control connection so there is a very small // race here that is not evident at C* 2.x+. if(is1x) { setTimeout(next, 1000); } else { next(); } }, function checkResults(next) { assert.strictEqual(hostsWentUp.length, 1); helper.assertInstanceOf(hostsWentUp[0], Host); assert.strictEqual(helper.lastOctetOf(hostsWentUp[0]), '2'); // Special exception for C* 1.x, as it may send duplicate down events // for a single host. if(!is1x) { assert.strictEqual(hostsWentDown.length, 1); } hostsWentDown.forEach(function(downHost) { helper.assertInstanceOf(downHost, Host); assert.strictEqual(helper.lastOctetOf(downHost), '2'); }); next(); }, client.shutdown.bind(client) ], done); }); it('should emit hostAdd hostRemove', function (done) { var client = newInstance(); var hostsAdded = []; var hostsRemoved = []; function trace(message) { return (function (next) { helper.trace(message); next(); }); } async.series([ client.connect.bind(client), function addListeners(next) { client.on('hostAdd', hostsAdded.push.bind(hostsAdded)); client.on('hostRemove', hostsRemoved.push.bind(hostsRemoved)); next(); }, trace('Bootstrapping node 3'), helper.toTask(helper.ccmHelper.bootstrapNode, null, 3), trace('Starting newly bootstrapped node 3'), helper.toTask(helper.ccmHelper.startNode, null, 3), trace('Decommissioning node 2'), helper.toTask(helper.ccmHelper.decommissionNode, null, 2), trace('Stopping node 2'), helper.toTask(helper.ccmHelper.stopNode, null, 2), function checkResults(next) { helper.trace('Checking results'); assert.strictEqual(hostsAdded.length, 1); assert.strictEqual(hostsRemoved.length, 1); helper.assertInstanceOf(hostsAdded[0], Host); helper.assertInstanceOf(hostsRemoved[0], Host); assert.strictEqual(helper.lastOctetOf(hostsAdded[0]), '3'); assert.strictEqual(helper.lastOctetOf(hostsRemoved[0]), '2'); next(); }, client.shutdown.bind(client) ], done); }); }); describe('#shutdown()', function () { before(helper.ccmHelper.start(2)); after(helper.ccmHelper.remove); it('should close all connections to all hosts', function (done) { var client = newInstance(); async.series([ client.connect.bind(client), function makeSomeQueries(next) { //to ensure that the pool is all up! async.times(100, function (n, timesNext) { client.execute('SELECT * FROM system.schema_keyspaces', timesNext); }, next); }, function shutDown(next) { var hosts = client.hosts.slice(0); assert.strictEqual(hosts.length, 2); assert.ok(hosts[0].pool.connections.length > 0); assert.ok(hosts[1].pool.connections.length > 0); assert.ok(!hosts[0].pool.shuttingDown); assert.ok(!hosts[1].pool.shuttingDown); client.shutdown(next); }, function checkPool(next) { var hosts = client.hosts.slice(0); assert.strictEqual(hosts.length, 2); assert.strictEqual(hosts[0].pool.connections.length, 0); assert.strictEqual(hosts[1].pool.connections.length, 0); next(); } ], done); }); }); }); /** @returns {Client} */ function newInstance(options) { return new Client(utils.extend({}, helper.baseOptions, options)); }
{ "content_hash": "ffdbe3b8e6bd5428acec9bf20116ff3c", "timestamp": "", "source": "github", "line_count": 811, "max_line_length": 148, "avg_line_length": 38.99876695437731, "alnum_prop": 0.5679145061338055, "repo_name": "connor4312/nodejs-driver", "id": "a0e63811a8df2e1836f97c8022bc3250ccffbaa8", "size": "31628", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "test/integration/short/client-pool-tests.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "JavaScript", "bytes": "818744" } ], "symlink_target": "" }
require('coffee-script/register'); require('./command-precompile.coffee');
{ "content_hash": "bbb3b5bcd4f0e8ab0feaed0ac64b4a9e", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 39, "avg_line_length": 37.5, "alnum_prop": 0.76, "repo_name": "octoblu/node-pre-compile-to-s3", "id": "41f6b68f8beebd371e18d7125b2c548f121abc1a", "size": "96", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "command-precompile.js", "mode": "33261", "license": "mit", "language": [ { "name": "CoffeeScript", "bytes": "7705" }, { "name": "JavaScript", "bytes": "460" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_43) on Wed May 22 21:49:23 UTC 2013 --> <META http-equiv="Content-Type" content="text/html; charset=UTF-8"> <TITLE> org.apache.hadoop.hbase.io.hfile.slab Class Hierarchy (HBase 0.94.8 API) </TITLE> <META NAME="date" CONTENT="2013-05-22"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="org.apache.hadoop.hbase.io.hfile.slab Class Hierarchy (HBase 0.94.8 API)"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../../../org/apache/hadoop/hbase/io/hfile/package-tree.html"><B>PREV</B></A>&nbsp; &nbsp;<A HREF="../../../../../../../org/apache/hadoop/hbase/ipc/package-tree.html"><B>NEXT</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/slab/package-tree.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> Hierarchy For Package org.apache.hadoop.hbase.io.hfile.slab </H2> </CENTER> <DL> <DT><B>Package Hierarchies:</B><DD><A HREF="../../../../../../../overview-tree.html">All Packages</A></DL> <HR> <H2> Class Hierarchy </H2> <UL> <LI TYPE="circle">java.lang.<A HREF="http://java.sun.com/javase/6/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang"><B>Object</B></A><UL> <LI TYPE="circle">org.apache.hadoop.hbase.io.hfile.slab.<A HREF="../../../../../../../org/apache/hadoop/hbase/io/hfile/slab/SingleSizeCache.html" title="class in org.apache.hadoop.hbase.io.hfile.slab"><B>SingleSizeCache</B></A> (implements org.apache.hadoop.hbase.io.hfile.<A HREF="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</A>, org.apache.hadoop.hbase.io.<A HREF="../../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</A>) <LI TYPE="circle">org.apache.hadoop.hbase.io.hfile.slab.<A HREF="../../../../../../../org/apache/hadoop/hbase/io/hfile/slab/SlabCache.html" title="class in org.apache.hadoop.hbase.io.hfile.slab"><B>SlabCache</B></A> (implements org.apache.hadoop.hbase.io.hfile.<A HREF="../../../../../../../org/apache/hadoop/hbase/io/hfile/BlockCache.html" title="interface in org.apache.hadoop.hbase.io.hfile">BlockCache</A>, org.apache.hadoop.hbase.io.<A HREF="../../../../../../../org/apache/hadoop/hbase/io/HeapSize.html" title="interface in org.apache.hadoop.hbase.io">HeapSize</A>) </UL> </UL> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;<A HREF="../../../../../../../org/apache/hadoop/hbase/io/hfile/package-tree.html"><B>PREV</B></A>&nbsp; &nbsp;<A HREF="../../../../../../../org/apache/hadoop/hbase/ipc/package-tree.html"><B>NEXT</B></A></FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../../index.html?org/apache/hadoop/hbase/io/hfile/slab/package-tree.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> Copyright &#169; 2013 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All Rights Reserved. </BODY> </HTML>
{ "content_hash": "a7e90e01123c5f3fd64d22f9940fe655", "timestamp": "", "source": "github", "line_count": 156, "max_line_length": 583, "avg_line_length": 49.23076923076923, "alnum_prop": 0.61640625, "repo_name": "algarecu/hbase-0.94.8-qod", "id": "693db635ef80dc174eec19b38978448c7ec60346", "size": "7680", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/apidocs/org/apache/hadoop/hbase/io/hfile/slab/package-tree.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "19836" }, { "name": "CSS", "bytes": "20794" }, { "name": "HTML", "bytes": "139288" }, { "name": "Java", "bytes": "24259991" }, { "name": "Makefile", "bytes": "2514" }, { "name": "PHP", "bytes": "14700" }, { "name": "Perl", "bytes": "17334" }, { "name": "Python", "bytes": "29070" }, { "name": "Ruby", "bytes": "779544" }, { "name": "Shell", "bytes": "175912" }, { "name": "Thrift", "bytes": "69092" }, { "name": "XSLT", "bytes": "8758" } ], "symlink_target": "" }
//CHECKSTYLE:FileLength:OFF package org.pentaho.di.core.row.value; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.EOFException; import java.io.IOException; import java.io.StringReader; import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.net.InetAddress; import java.net.SocketTimeoutException; import java.nio.charset.Charset; import java.sql.Blob; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Timestamp; import java.sql.Types; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.text.NumberFormat; import java.text.ParseException; import java.text.ParsePosition; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.Locale; import java.util.TimeZone; import org.pentaho.di.compatibility.Value; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.DatabaseInterface; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.database.GreenplumDatabaseMeta; import org.pentaho.di.core.database.MySQLDatabaseMeta; import org.pentaho.di.core.database.NetezzaDatabaseMeta; import org.pentaho.di.core.database.OracleDatabaseMeta; import org.pentaho.di.core.database.PostgreSQLDatabaseMeta; import org.pentaho.di.core.database.SQLiteDatabaseMeta; import org.pentaho.di.core.database.TeradataDatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleEOFException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.exception.KettleValueException; import org.pentaho.di.core.gui.PrimitiveGCInterface; import org.pentaho.di.core.logging.KettleLogStore; import org.pentaho.di.core.logging.LogChannelInterface; import org.pentaho.di.core.row.ValueDataUtil; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.util.EnvUtil; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.w3c.dom.Node; /** * @author jb */ public class ValueMetaBase implements ValueMetaInterface { protected static Class<?> PKG = Const.class; // for i18n purposes, needed by Translator2 public static final String DEFAULT_DATE_FORMAT_MASK = Const.NVL( EnvUtil .getSystemProperty( Const.KETTLE_DEFAULT_DATE_FORMAT ), "yyyy/MM/dd HH:mm:ss.SSS" ); public static final String DEFAULT_TIMESTAMP_FORMAT_MASK = Const.NVL( EnvUtil .getSystemProperty( Const.KETTLE_DEFAULT_TIMESTAMP_FORMAT ), "yyyy/MM/dd HH:mm:ss.SSSSSSSSS" ); public static final String XML_META_TAG = "value-meta"; public static final String XML_DATA_TAG = "value-data"; public static final boolean EMPTY_STRING_AND_NULL_ARE_DIFFERENT = convertStringToBoolean( Const.NVL( System .getProperty( Const.KETTLE_EMPTY_STRING_DIFFERS_FROM_NULL, "N" ), "N" ) ); protected String name; protected int length; protected int precision; protected int type; protected int trimType; protected int storageType; protected String origin; protected String comments; protected Object[] index; protected String conversionMask; protected String stringEncoding; protected String decimalSymbol; protected String groupingSymbol; protected String currencySymbol; protected boolean caseInsensitive; protected boolean sortedDescending; protected boolean outputPaddingEnabled; protected boolean largeTextField; protected Locale dateFormatLocale; protected TimeZone dateFormatTimeZone; protected boolean dateFormatLenient; protected boolean lenientStringToNumber; protected boolean ignoreTimezone; protected SimpleDateFormat dateFormat; protected boolean dateFormatChanged; protected DecimalFormat decimalFormat; protected boolean decimalFormatChanged; protected ValueMetaInterface storageMetadata; protected boolean identicalFormat; protected ValueMetaInterface conversionMetadata; boolean singleByteEncoding; protected long numberOfBinaryStringConversions; protected boolean bigNumberFormatting; // get & store original result set meta data for later use // @see java.sql.ResultSetMetaData // protected int originalColumnType; protected String originalColumnTypeName; protected int originalPrecision; protected int originalScale; protected boolean originalAutoIncrement; protected int originalNullable; protected boolean originalSigned; private static final LogChannelInterface log = KettleLogStore.getLogChannelInterfaceFactory().create( "ValueMetaBase" ); /** * The trim type codes */ public static final String[] trimTypeCode = { "none", "left", "right", "both" }; /** * The trim description */ public static final String[] trimTypeDesc = { BaseMessages.getString( PKG, "ValueMeta.TrimType.None" ), BaseMessages.getString( PKG, "ValueMeta.TrimType.Left" ), BaseMessages.getString( PKG, "ValueMeta.TrimType.Right" ), BaseMessages.getString( PKG, "ValueMeta.TrimType.Both" ) }; public ValueMetaBase() { this( null, ValueMetaInterface.TYPE_NONE, -1, -1 ); } public ValueMetaBase( String name ) { this( name, ValueMetaInterface.TYPE_NONE, -1, -1 ); } public ValueMetaBase( String name, int type ) { this( name, type, -1, -1 ); } public ValueMetaBase( String name, int type, int storageType ) { this( name, type, -1, -1 ); this.storageType = storageType; setDefaultConversionMask(); } public ValueMetaBase( String name, int type, int length, int precision ) { this.name = name; this.type = type; this.length = length; this.precision = precision; this.storageType = STORAGE_TYPE_NORMAL; this.sortedDescending = false; this.outputPaddingEnabled = false; this.decimalSymbol = "" + Const.DEFAULT_DECIMAL_SEPARATOR; this.groupingSymbol = "" + Const.DEFAULT_GROUPING_SEPARATOR; this.dateFormatLocale = Locale.getDefault(); this.dateFormatTimeZone = TimeZone.getDefault(); this.identicalFormat = true; this.bigNumberFormatting = true; this.lenientStringToNumber = convertStringToBoolean( Const.NVL( System.getProperty( Const.KETTLE_LENIENT_STRING_TO_NUMBER_CONVERSION, "N" ), "N" ) ); this.ignoreTimezone = convertStringToBoolean( Const.NVL( System.getProperty( Const.KETTLE_COMPATIBILITY_DB_IGNORE_TIMEZONE, "N" ), "N" ) ); determineSingleByteEncoding(); setDefaultConversionMask(); } public static final String[] SINGLE_BYTE_ENCODINGS = new String[] { "ISO8859_1", "Cp1252", "ASCII", "Cp037", "Cp273", "Cp277", "Cp278", "Cp280", "Cp284", "Cp285", "Cp297", "Cp420", "Cp424", "Cp437", "Cp500", "Cp737", "Cp775", "Cp850", "Cp852", "Cp855", "Cp856", "Cp857", "Cp858", "Cp860", "Cp861", "Cp862", "Cp863", "Cp865", "Cp866", "Cp869", "Cp870", "Cp871", "Cp875", "Cp918", "Cp921", "Cp922", "Cp1140", "Cp1141", "Cp1142", "Cp1143", "Cp1144", "Cp1145", "Cp1146", "Cp1147", "Cp1148", "Cp1149", "Cp1250", "Cp1251", "Cp1253", "Cp1254", "Cp1255", "Cp1257", "ISO8859_2", "ISO8859_3", "ISO8859_5", "ISO8859_5", "ISO8859_6", "ISO8859_7", "ISO8859_8", "ISO8859_9", "ISO8859_13", "ISO8859_15", "ISO8859_15_FDIS", "MacCentralEurope", "MacCroatian", "MacCyrillic", "MacDingbat", "MacGreek", "MacHebrew", "MacIceland", "MacRoman", "MacRomania", "MacSymbol", "MacTurkish", "MacUkraine", }; protected void setDefaultConversionMask() { // Set some sensible default mask on the numbers // switch ( type ) { case TYPE_INTEGER: String alternativeIntegerMask = EnvUtil.getSystemProperty( Const.KETTLE_DEFAULT_INTEGER_FORMAT ); if ( Const.isEmpty( alternativeIntegerMask ) ) { setConversionMask( "#;-#" ); } else { setConversionMask( alternativeIntegerMask ); } break; case TYPE_NUMBER: String alternativeNumberMask = EnvUtil.getSystemProperty( Const.KETTLE_DEFAULT_NUMBER_FORMAT ); if ( Const.isEmpty( alternativeNumberMask ) ) { setConversionMask( "#.#;-#.#" ); } else { setConversionMask( alternativeNumberMask ); } break; case TYPE_BIGNUMBER: String alternativeBigNumberMask = EnvUtil.getSystemProperty( Const.KETTLE_DEFAULT_BIGNUMBER_FORMAT ); if ( Const.isEmpty( alternativeBigNumberMask ) ) { setConversionMask( "#.###############################################;" + "-#.###############################################" ); } else { setConversionMask( alternativeBigNumberMask ); } setGroupingSymbol( null ); setDecimalSymbol( "." ); // For backward compatibility reasons! break; default: break; } } protected void determineSingleByteEncoding() { singleByteEncoding = false; Charset cs; if ( Const.isEmpty( stringEncoding ) ) { cs = Charset.defaultCharset(); } else { cs = Charset.forName( stringEncoding ); } // See if the default character set for input is single byte encoded. // for ( String charSetEncoding : SINGLE_BYTE_ENCODINGS ) { if ( cs.toString().equalsIgnoreCase( charSetEncoding ) ) { singleByteEncoding = true; } } } @Override public ValueMetaBase clone() { try { ValueMetaBase valueMeta = (ValueMetaBase) super.clone(); valueMeta.dateFormat = null; valueMeta.decimalFormat = null; if ( dateFormatLocale != null ) { valueMeta.dateFormatLocale = (Locale) dateFormatLocale.clone(); } if ( dateFormatTimeZone != null ) { valueMeta.dateFormatTimeZone = (TimeZone) dateFormatTimeZone.clone(); } if ( storageMetadata != null ) { valueMeta.storageMetadata = storageMetadata.clone(); } if ( conversionMetadata != null ) { valueMeta.conversionMetadata = conversionMetadata.clone(); } valueMeta.compareStorageAndActualFormat(); return valueMeta; } catch ( CloneNotSupportedException e ) { return null; } } /** * @return the comments */ @Override public String getComments() { return comments; } /** * @param comments * the comments to set */ @Override public void setComments( String comments ) { this.comments = comments; } /** * @return the index */ @Override public Object[] getIndex() { return index; } /** * @param index * the index to set */ @Override public void setIndex( Object[] index ) { this.index = index; } /** * @return the length */ @Override public int getLength() { return length; } /** * @param length * the length to set */ @Override public void setLength( int length ) { this.length = length; } /** * @param length * the length to set */ @Override public void setLength( int length, int precision ) { this.length = length; this.precision = precision; } /** * @return the name */ @Override public String getName() { return name; } /** * @param name * the name to set */ @Override public void setName( String name ) { this.name = name; } /** * @return the origin */ @Override public String getOrigin() { return origin; } /** * @param origin * the origin to set */ @Override public void setOrigin( String origin ) { this.origin = origin; } /** * @return the precision */ @Override public int getPrecision() { // For backward compatibility we need to tweak a bit... // if ( isInteger() || isBinary() ) { return 0; } if ( isString() || isBoolean() ) { return -1; } return precision; } /** * @param precision * the precision to set */ @Override public void setPrecision( int precision ) { this.precision = precision; } /** * @return the storageType */ @Override public int getStorageType() { return storageType; } /** * @param storageType * the storageType to set */ @Override public void setStorageType( int storageType ) { this.storageType = storageType; } @Override public boolean isStorageNormal() { return storageType == STORAGE_TYPE_NORMAL; } @Override public boolean isStorageIndexed() { return storageType == STORAGE_TYPE_INDEXED; } @Override public boolean isStorageBinaryString() { return storageType == STORAGE_TYPE_BINARY_STRING; } /** * @return the type */ @Override public int getType() { return type; } /** * @param type * the type to set * @deprecated */ @Override @Deprecated public void setType( int type ) { this.type = type; } /** * @return the conversionMask */ @Override @Deprecated public String getConversionMask() { return conversionMask; } /** * @param conversionMask * the conversionMask to set */ @Override public void setConversionMask( String conversionMask ) { this.conversionMask = conversionMask; dateFormatChanged = true; decimalFormatChanged = true; compareStorageAndActualFormat(); } /** * @return the encoding */ @Override public String getStringEncoding() { return stringEncoding; } /** * @param encoding * the encoding to set */ @Override public void setStringEncoding( String encoding ) { this.stringEncoding = encoding; determineSingleByteEncoding(); compareStorageAndActualFormat(); } /** * @return the decimalSymbol */ @Override public String getDecimalSymbol() { return decimalSymbol; } /** * @param decimalSymbol * the decimalSymbol to set */ @Override public void setDecimalSymbol( String decimalSymbol ) { this.decimalSymbol = decimalSymbol; decimalFormatChanged = true; compareStorageAndActualFormat(); } /** * @return the groupingSymbol */ @Override public String getGroupingSymbol() { return groupingSymbol; } /** * @param groupingSymbol * the groupingSymbol to set */ @Override public void setGroupingSymbol( String groupingSymbol ) { this.groupingSymbol = groupingSymbol; decimalFormatChanged = true; compareStorageAndActualFormat(); } /** * @return the currencySymbol */ @Override public String getCurrencySymbol() { return currencySymbol; } /** * @param currencySymbol * the currencySymbol to set */ @Override public void setCurrencySymbol( String currencySymbol ) { this.currencySymbol = currencySymbol; decimalFormatChanged = true; } /** * @return the caseInsensitive */ @Override public boolean isCaseInsensitive() { return caseInsensitive; } /** * @param caseInsensitive * the caseInsensitive to set */ @Override public void setCaseInsensitive( boolean caseInsensitive ) { this.caseInsensitive = caseInsensitive; } /** * @return the sortedDescending */ @Override public boolean isSortedDescending() { return sortedDescending; } /** * @param sortedDescending * the sortedDescending to set */ @Override public void setSortedDescending( boolean sortedDescending ) { this.sortedDescending = sortedDescending; } /** * @return true if output padding is enabled (padding to specified length) */ @Override public boolean isOutputPaddingEnabled() { return outputPaddingEnabled; } /** * @param outputPaddingEnabled * Set to true if output padding is to be enabled (padding to specified length) */ @Override public void setOutputPaddingEnabled( boolean outputPaddingEnabled ) { this.outputPaddingEnabled = outputPaddingEnabled; } /** * @return true if this is a large text field (CLOB, TEXT) with arbitrary length. */ @Override public boolean isLargeTextField() { return largeTextField; } /** * @param largeTextField * Set to true if this is to be a large text field (CLOB, TEXT) with arbitrary length. */ @Override public void setLargeTextField( boolean largeTextField ) { this.largeTextField = largeTextField; } /** * @return the dateFormatLenient */ @Override public boolean isDateFormatLenient() { return dateFormatLenient; } /** * @param dateFormatLenient * the dateFormatLenient to set */ @Override public void setDateFormatLenient( boolean dateFormatLenient ) { this.dateFormatLenient = dateFormatLenient; dateFormatChanged = true; } /** * @return the dateFormatLocale */ @Override public Locale getDateFormatLocale() { return dateFormatLocale; } /** * @param dateFormatLocale * the dateFormatLocale to set */ @Override public void setDateFormatLocale( Locale dateFormatLocale ) { this.dateFormatLocale = dateFormatLocale; dateFormatChanged = true; } // DATE + STRING protected synchronized String convertDateToString( Date date ) { if ( date == null ) { return null; } return getDateFormat().format( date ); } protected static SimpleDateFormat compatibleDateFormat = new SimpleDateFormat( "yyyy/MM/dd HH:mm:ss.SSS" ); protected synchronized String convertDateToCompatibleString( Date date ) { if ( date == null ) { return null; } return compatibleDateFormat.format( date ); } protected synchronized Date convertStringToDate( String string ) throws KettleValueException { string = Const.trimToType( string, getTrimType() ); // see if trimming needs // to be performed before // conversion if ( Const.isEmpty( string ) ) { return null; } try { ParsePosition pp = new ParsePosition( 0 ); Date result = getDateFormat().parse( string, pp ); if ( pp.getErrorIndex() >= 0 ) { // error happen throw new ParseException( string, pp.getErrorIndex() ); } // some chars can be after pp.getIndex(). That means, not full value was parsed. For example, for value // "25-03-1918 11:54" and format "dd-MM-yyyy", value will be "25-03-1918 00:00" without any exception. // If there are only spaces after pp.getIndex() - that means full values was parsed return result; } catch ( ParseException e ) { String dateFormat = ( getDateFormat() != null ) ? getDateFormat().toPattern() : "null"; throw new KettleValueException( toString() + " : couldn't convert string [" + string + "] to a date using format [" + dateFormat + "] on offset location " + e.getErrorOffset(), e ); } } // DATE + NUMBER protected Double convertDateToNumber( Date date ) { return new Double( date.getTime() ); } protected Date convertNumberToDate( Double number ) { return new Date( number.longValue() ); } // DATE + INTEGER protected Long convertDateToInteger( Date date ) { return new Long( date.getTime() ); } protected Date convertIntegerToDate( Long number ) { return new Date( number.longValue() ); } // DATE + BIGNUMBER protected BigDecimal convertDateToBigNumber( Date date ) { return new BigDecimal( date.getTime() ); } protected Date convertBigNumberToDate( BigDecimal number ) { return new Date( number.longValue() ); } protected synchronized String convertNumberToString( Double number ) throws KettleValueException { if ( number == null ) { if ( !outputPaddingEnabled || length < 1 ) { return null; } else { // Return strings padded to the specified length... // This is done for backward compatibility with 2.5.x // We just optimized this a bit... // String[] emptyPaddedStrings = Const.getEmptyPaddedStrings(); if ( length < emptyPaddedStrings.length ) { return emptyPaddedStrings[length]; } else { return Const.rightPad( "", length ); } } } try { return getDecimalFormat( false ).format( number ); } catch ( Exception e ) { throw new KettleValueException( toString() + " : couldn't convert Number to String ", e ); } } protected synchronized String convertNumberToCompatibleString( Double number ) throws KettleValueException { if ( number == null ) { return null; } return Double.toString( number ); } protected synchronized Double convertStringToNumber( String string ) throws KettleValueException { string = Const.trimToType( string, getTrimType() ); // see if trimming needs // to be performed before // conversion if ( Const.isEmpty( string ) ) { return null; } try { Number number; if ( lenientStringToNumber ) { number = getDecimalFormat( false ).parse( string ); } else { ParsePosition parsePosition = new ParsePosition( 0 ); number = getDecimalFormat( false ).parse( string, parsePosition ); if ( parsePosition.getIndex() < string.length() ) { throw new KettleValueException( toString() + " : couldn't convert String to number : non-numeric character found at position " + ( parsePosition.getIndex() + 1 ) + " for value [" + string + "]" ); } } return new Double( number.doubleValue() ); } catch ( Exception e ) { throw new KettleValueException( toString() + " : couldn't convert String to number ", e ); } } @Override public synchronized SimpleDateFormat getDateFormat() { // If we have a Date that is represented as a String // In that case we can set the format of the original Date on the String // value metadata in the form of a conversion metadata object. // That way, we can always convert from Date to String and back without a // problem, no matter how complex the format was. // As such, we should return the date SimpleDateFormat of the conversion // metadata. // if ( conversionMetadata != null ) { return conversionMetadata.getDateFormat(); } if ( dateFormat == null || dateFormatChanged ) { // This may not become static as the class is not thread-safe! dateFormat = new SimpleDateFormat(); String mask; if ( Const.isEmpty( conversionMask ) ) { mask = DEFAULT_DATE_FORMAT_MASK; } else { mask = conversionMask; } // Do we have a locale? // if ( dateFormatLocale == null || dateFormatLocale.equals( Locale.getDefault() ) ) { dateFormat = new SimpleDateFormat( mask ); } else { dateFormat = new SimpleDateFormat( mask, dateFormatLocale ); } // Do we have a time zone? // if ( dateFormatTimeZone != null ) { dateFormat.setTimeZone( dateFormatTimeZone ); } // Set the conversion leniency as well // dateFormat.setLenient( dateFormatLenient ); dateFormatChanged = false; } return dateFormat; } @Override public synchronized DecimalFormat getDecimalFormat() { return getDecimalFormat( false ); } @Override public synchronized DecimalFormat getDecimalFormat( boolean useBigDecimal ) { // If we have an Integer that is represented as a String // In that case we can set the format of the original Integer on the String // value metadata in the form of a conversion metadata object. // That way, we can always convert from Integer to String and back without a // problem, no matter how complex the format was. // As such, we should return the decimal format of the conversion metadata. // if ( conversionMetadata != null ) { return conversionMetadata.getDecimalFormat( useBigDecimal ); } // Calculate the decimal format as few times as possible. // That is because creating or changing a DecimalFormat object is very CPU // hungry. // if ( decimalFormat == null || decimalFormatChanged ) { decimalFormat = (DecimalFormat) NumberFormat.getInstance(); decimalFormat.setParseBigDecimal( useBigDecimal ); DecimalFormatSymbols decimalFormatSymbols = decimalFormat.getDecimalFormatSymbols(); if ( !Const.isEmpty( currencySymbol ) ) { decimalFormatSymbols.setCurrencySymbol( currencySymbol ); } if ( !Const.isEmpty( groupingSymbol ) ) { decimalFormatSymbols.setGroupingSeparator( groupingSymbol.charAt( 0 ) ); } if ( !Const.isEmpty( decimalSymbol ) ) { decimalFormatSymbols.setDecimalSeparator( decimalSymbol.charAt( 0 ) ); } decimalFormat.setDecimalFormatSymbols( decimalFormatSymbols ); // Apply the conversion mask if we have one... if ( !Const.isEmpty( conversionMask ) ) { decimalFormat.applyPattern( conversionMask ); } else { switch ( type ) { case TYPE_INTEGER: if ( length < 1 ) { decimalFormat.applyPattern( " ###############0;-###############0" ); // Same // as // before // version // 3.0 } else { StringBuilder integerPattern = new StringBuilder(); // First the format for positive integers... // integerPattern.append( " " ); for ( int i = 0; i < getLength(); i++ ) { integerPattern.append( '0' ); // all zeroes. } integerPattern.append( ";" ); // Then the format for the negative numbers... // integerPattern.append( "-" ); for ( int i = 0; i < getLength(); i++ ) { integerPattern.append( '0' ); // all zeroes. } decimalFormat.applyPattern( integerPattern.toString() ); } break; case TYPE_BIGNUMBER: case TYPE_NUMBER: if ( length < 1 ) { decimalFormat.applyPattern( " ##########0.0########;-#########0.0########" ); } else { StringBuilder numberPattern = new StringBuilder(); // First do the format for positive numbers... // numberPattern.append( ' ' ); // to compensate for minus sign. if ( precision < 0 ) { // Default: two decimals for ( int i = 0; i < length; i++ ) { numberPattern.append( '0' ); } numberPattern.append( ".00" ); // for the .00 } else { // Floating point format 00001234,56 --> (12,2) for ( int i = 0; i <= length; i++ ) { numberPattern.append( '0' ); // all zeroes. } int pos = length - precision + 1; if ( pos >= 0 && pos < numberPattern.length() ) { numberPattern.setCharAt( length - precision + 1, '.' ); // one // 'comma' } } // Now do the format for negative numbers... // StringBuilder negativePattern = new StringBuilder( numberPattern ); negativePattern.setCharAt( 0, '-' ); numberPattern.append( ";" ); numberPattern.append( negativePattern ); // Apply the pattern... // decimalFormat.applyPattern( numberPattern.toString() ); } break; default: break; } } decimalFormatChanged = false; } return decimalFormat; } protected synchronized String convertIntegerToString( Long integer ) throws KettleValueException { if ( integer == null ) { if ( !outputPaddingEnabled || length < 1 ) { return null; } else { // Return strings padded to the specified length... // This is done for backward compatibility with 2.5.x // We just optimized this a bit... // String[] emptyPaddedStrings = Const.getEmptyPaddedStrings(); if ( length < emptyPaddedStrings.length ) { return emptyPaddedStrings[length]; } else { return Const.rightPad( "", length ); } } } try { return getDecimalFormat( false ).format( integer ); } catch ( Exception e ) { throw new KettleValueException( toString() + " : couldn't convert Long to String ", e ); } } protected synchronized String convertIntegerToCompatibleString( Long integer ) throws KettleValueException { if ( integer == null ) { return null; } return Long.toString( integer ); } protected synchronized Long convertStringToInteger( String string ) throws KettleValueException { string = Const.trimToType( string, getTrimType() ); // see if trimming needs // to be performed before // conversion if ( Const.isEmpty( string ) ) { return null; } try { Number number; if ( lenientStringToNumber ) { number = new Long( getDecimalFormat( false ).parse( string ).longValue() ); } else { ParsePosition parsePosition = new ParsePosition( 0 ); number = getDecimalFormat( false ).parse( string, parsePosition ); if ( parsePosition.getIndex() < string.length() ) { throw new KettleValueException( toString() + " : couldn't convert String to number : non-numeric character found at position " + ( parsePosition.getIndex() + 1 ) + " for value [" + string + "]" ); } } return new Long( number.longValue() ); } catch ( Exception e ) { throw new KettleValueException( toString() + " : couldn't convert String to Integer", e ); } } protected synchronized String convertBigNumberToString( BigDecimal number ) throws KettleValueException { if ( number == null ) { return null; } try { return getDecimalFormat( bigNumberFormatting ).format( number ); } catch ( Exception e ) { throw new KettleValueException( toString() + " : couldn't convert BigNumber to String ", e ); } } protected synchronized BigDecimal convertStringToBigNumber( String string ) throws KettleValueException { string = Const.trimToType( string, getTrimType() ); // see if trimming needs // to be performed before // conversion if ( Const.isEmpty( string ) ) { return null; } try { Number number; if ( lenientStringToNumber ) { number = getDecimalFormat( bigNumberFormatting ).parse( string ); } else { ParsePosition parsePosition = new ParsePosition( 0 ); number = getDecimalFormat( bigNumberFormatting ).parse( string, parsePosition ); if ( parsePosition.getIndex() < string.length() ) { throw new KettleValueException( toString() + " : couldn't convert String to number : non-numeric character found at position " + ( parsePosition.getIndex() + 1 ) + " for value [" + string + "]" ); } } return (BigDecimal) number; } catch ( Exception e ) { // We added this workaround for PDI-1824 // try { return new BigDecimal( string ); } catch ( NumberFormatException ex ) { throw new KettleValueException( toString() + " : couldn't convert string value '" + string + "' to a big number.", ex ); } } } // BOOLEAN + STRING protected String convertBooleanToString( Boolean bool ) { if ( bool == null ) { return null; } if ( length >= 3 ) { return bool.booleanValue() ? "true" : "false"; } else { return bool.booleanValue() ? "Y" : "N"; } } public static Boolean convertStringToBoolean( String string ) { if ( Const.isEmpty( string ) ) { return null; } return Boolean.valueOf( "Y".equalsIgnoreCase( string ) || "TRUE".equalsIgnoreCase( string ) || "YES".equalsIgnoreCase( string ) || "1".equals( string ) ); } // BOOLEAN + NUMBER protected Double convertBooleanToNumber( Boolean bool ) { if ( bool == null ) { return null; } return new Double( bool.booleanValue() ? 1.0 : 0.0 ); } protected Boolean convertNumberToBoolean( Double number ) { if ( number == null ) { return null; } return Boolean.valueOf( number.intValue() != 0 ); } // BOOLEAN + INTEGER protected Long convertBooleanToInteger( Boolean bool ) { if ( bool == null ) { return null; } return Long.valueOf( bool.booleanValue() ? 1L : 0L ); } protected Boolean convertIntegerToBoolean( Long number ) { if ( number == null ) { return null; } return Boolean.valueOf( number.longValue() != 0 ); } // BOOLEAN + BIGNUMBER protected BigDecimal convertBooleanToBigNumber( Boolean bool ) { if ( bool == null ) { return null; } return bool.booleanValue() ? BigDecimal.ONE : BigDecimal.ZERO; } protected Boolean convertBigNumberToBoolean( BigDecimal number ) { if ( number == null ) { return null; } return Boolean.valueOf( number.intValue() != 0 ); } /** * Converts a byte[] stored in a binary string storage type into a String; * * @param binary * the binary string * @return the String in the correct encoding. * @throws KettleValueException */ protected String convertBinaryStringToString( byte[] binary ) throws KettleValueException { //noinspection deprecation return convertBinaryStringToString( binary, EMPTY_STRING_AND_NULL_ARE_DIFFERENT ); } /* * Do not use this method directly! It is for tests! */ @Deprecated String convertBinaryStringToString( byte[] binary, boolean emptyStringDiffersFromNull ) throws KettleValueException { // OK, so we have an internal representation of the original object, read // from file. // Before we release it back, we have to see if we don't have to do a // String-<type>-String // conversion with different masks. // This obviously only applies to numeric data and dates. // We verify if this is true or false in advance for performance reasons // // if (binary==null || binary.length==0) return null; if ( binary == null || binary.length == 0 ) { return ( emptyStringDiffersFromNull && binary != null ) ? "" : null; } String encoding; if ( identicalFormat ) { encoding = getStringEncoding(); } else { encoding = storageMetadata.getStringEncoding(); } if ( Const.isEmpty( encoding ) ) { return new String( binary ); } else { try { return new String( binary, encoding ); } catch ( UnsupportedEncodingException e ) { throw new KettleValueException( toString() + " : couldn't convert binary value to String with specified string encoding [" + stringEncoding + "]", e ); } } } /** * Converts the specified data object to the normal storage type. * * @param object * the data object to convert * @return the data in a normal storage type * @throws KettleValueException * In case there is a data conversion error. */ @Override public Object convertToNormalStorageType( Object object ) throws KettleValueException { if ( object == null ) { return null; } switch ( storageType ) { case STORAGE_TYPE_NORMAL: return object; case STORAGE_TYPE_BINARY_STRING: return convertBinaryStringToNativeType( (byte[]) object ); case STORAGE_TYPE_INDEXED: return index[(Integer) object]; default: throw new KettleValueException( toStringMeta() + " : Unknown storage type [" + storageType + "] while converting to normal storage type" ); } } /** * Converts the specified data object to the binary string storage type. * * @param object * the data object to convert * @return the data in a binary string storage type * @throws KettleValueException * In case there is a data conversion error. */ @Override public Object convertToBinaryStringStorageType( Object object ) throws KettleValueException { if ( object == null ) { return null; } switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertNormalStorageTypeToBinaryString( object ); case STORAGE_TYPE_BINARY_STRING: return object; case STORAGE_TYPE_INDEXED: return convertNormalStorageTypeToBinaryString( index[(Integer) object] ); default: throw new KettleValueException( toStringMeta() + " : Unknown storage type [" + storageType + "] while converting to normal storage type" ); } } /** * Convert the binary data to the actual data type.<br> * - byte[] --> Long (Integer) - byte[] --> Double (Number) - byte[] --> BigDecimal (BigNumber) - byte[] --> Date * (Date) - byte[] --> Boolean (Boolean) - byte[] --> byte[] (Binary) * * @param binary * @return * @throws KettleValueException */ @Override public Object convertBinaryStringToNativeType( byte[] binary ) throws KettleValueException { if ( binary == null ) { return null; } numberOfBinaryStringConversions++; // OK, so we have an internal representation of the original object, read // from file. // First we decode it in the correct encoding // String string = convertBinaryStringToString( binary ); // In this method we always must convert the data. // We use the storageMetadata object to convert the binary string object. // // --> Convert from the String format to the current data type... // return convertData( storageMetadata, string ); } @Override public Object convertNormalStorageTypeToBinaryString( Object object ) throws KettleValueException { if ( object == null ) { return null; } String string = getString( object ); return convertStringToBinaryString( string ); } protected byte[] convertStringToBinaryString( String string ) throws KettleValueException { if ( string == null ) { return null; } if ( Const.isEmpty( stringEncoding ) ) { return string.getBytes(); } else { try { return string.getBytes( stringEncoding ); } catch ( UnsupportedEncodingException e ) { throw new KettleValueException( toString() + " : couldn't convert String to Binary with specified string encoding [" + stringEncoding + "]", e ); } } } /** * Clones the data. Normally, we don't have to do anything here, but just for arguments and safety, we do a little * extra work in case of binary blobs and Date objects. We should write a programmers manual later on to specify in * all clarity that "we always overwrite/replace values in the Object[] data rows, we never modify them" . * * @return a cloned data object if needed */ @Override public Object cloneValueData( Object object ) throws KettleValueException { if ( object == null ) { return null; } if ( storageType == STORAGE_TYPE_NORMAL ) { switch ( getType() ) { case ValueMetaInterface.TYPE_STRING: case ValueMetaInterface.TYPE_NUMBER: case ValueMetaInterface.TYPE_INTEGER: case ValueMetaInterface.TYPE_BOOLEAN: case ValueMetaInterface.TYPE_BIGNUMBER: // primitive data types: we can only // overwrite these, not change them return object; case ValueMetaInterface.TYPE_DATE: return new Date( ( (Date) object ).getTime() ); // just to make sure: very // inexpensive too. case ValueMetaInterface.TYPE_BINARY: byte[] origin = (byte[]) object; byte[] target = new byte[origin.length]; System.arraycopy( origin, 0, target, 0, origin.length ); return target; case ValueMetaInterface.TYPE_SERIALIZABLE: // Let's not create a copy but simply return the same value. // return object; default: throw new KettleValueException( toString() + ": unable to make copy of value type: " + getType() ); } } else { return object; } } @Override public String getCompatibleString( Object object ) throws KettleValueException { try { String string; switch ( type ) { case TYPE_DATE: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertDateToCompatibleString( (Date) object ); break; case STORAGE_TYPE_BINARY_STRING: string = convertDateToCompatibleString( (Date) convertBinaryStringToNativeType( (byte[]) object ) ); break; case STORAGE_TYPE_INDEXED: if ( object == null ) { string = null; } else { string = convertDateToCompatibleString( (Date) index[( (Integer) object ).intValue()] ); } break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; case TYPE_NUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertNumberToCompatibleString( (Double) object ); break; case STORAGE_TYPE_BINARY_STRING: string = convertNumberToCompatibleString( (Double) convertBinaryStringToNativeType( (byte[]) object ) ); break; case STORAGE_TYPE_INDEXED: string = object == null ? null : convertNumberToCompatibleString( (Double) index[( (Integer) object ) .intValue()] ); break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; case TYPE_INTEGER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertIntegerToCompatibleString( (Long) object ); break; case STORAGE_TYPE_BINARY_STRING: string = convertIntegerToCompatibleString( (Long) convertBinaryStringToNativeType( (byte[]) object ) ); break; case STORAGE_TYPE_INDEXED: string = object == null ? null : convertIntegerToCompatibleString( (Long) index[( (Integer) object ) .intValue()] ); break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; default: return getString( object ); } return string; } catch ( ClassCastException e ) { throw new KettleValueException( toString() + " : There was a data type error: the data type of " + object.getClass().getName() + " object [" + object + "] does not correspond to value meta [" + toStringMeta() + "]" ); } } @Override public String getString( Object object ) throws KettleValueException { try { String string; switch ( type ) { case TYPE_STRING: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = object == null ? null : object.toString(); break; case STORAGE_TYPE_BINARY_STRING: string = (String) convertBinaryStringToNativeType( (byte[]) object ); break; case STORAGE_TYPE_INDEXED: string = object == null ? null : (String) index[( (Integer) object ).intValue()]; break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } if ( string != null ) { string = trim( string ); } break; case TYPE_DATE: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertDateToString( (Date) object ); break; case STORAGE_TYPE_BINARY_STRING: string = convertDateToString( (Date) convertBinaryStringToNativeType( (byte[]) object ) ); break; case STORAGE_TYPE_INDEXED: string = object == null ? null : convertDateToString( (Date) index[( (Integer) object ).intValue()] ); break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; case TYPE_NUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertNumberToString( (Double) object ); break; case STORAGE_TYPE_BINARY_STRING: string = convertNumberToString( (Double) convertBinaryStringToNativeType( (byte[]) object ) ); break; case STORAGE_TYPE_INDEXED: string = object == null ? null : convertNumberToString( (Double) index[( (Integer) object ).intValue()] ); break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; case TYPE_INTEGER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertIntegerToString( (Long) object ); break; case STORAGE_TYPE_BINARY_STRING: string = convertIntegerToString( (Long) convertBinaryStringToNativeType( (byte[]) object ) ); break; case STORAGE_TYPE_INDEXED: string = object == null ? null : convertIntegerToString( (Long) index[( (Integer) object ).intValue()] ); break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; case TYPE_BIGNUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertBigNumberToString( (BigDecimal) object ); break; case STORAGE_TYPE_BINARY_STRING: string = convertBigNumberToString( (BigDecimal) convertBinaryStringToNativeType( (byte[]) object ) ); break; case STORAGE_TYPE_INDEXED: string = object == null ? null : convertBigNumberToString( (BigDecimal) index[( (Integer) object ).intValue()] ); break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; case TYPE_BOOLEAN: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertBooleanToString( (Boolean) object ); break; case STORAGE_TYPE_BINARY_STRING: string = convertBooleanToString( (Boolean) convertBinaryStringToNativeType( (byte[]) object ) ); break; case STORAGE_TYPE_INDEXED: string = object == null ? null : convertBooleanToString( (Boolean) index[( (Integer) object ).intValue()] ); break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; case TYPE_BINARY: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = convertBinaryStringToString( (byte[]) object ); break; case STORAGE_TYPE_BINARY_STRING: string = convertBinaryStringToString( (byte[]) object ); break; case STORAGE_TYPE_INDEXED: string = object == null ? null : convertBinaryStringToString( (byte[]) index[( (Integer) object ).intValue()] ); break; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; case TYPE_SERIALIZABLE: switch ( storageType ) { case STORAGE_TYPE_NORMAL: string = object == null ? null : object.toString(); break; // just go for the default toString() case STORAGE_TYPE_BINARY_STRING: string = convertBinaryStringToString( (byte[]) object ); break; case STORAGE_TYPE_INDEXED: string = object == null ? null : index[( (Integer) object ).intValue()].toString(); break; // just go for the default toString() default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } break; default: throw new KettleValueException( toString() + " : Unknown type " + type + " specified." ); } if ( isOutputPaddingEnabled() && getLength() > 0 ) { string = ValueDataUtil.rightPad( string, getLength() ); } return string; } catch ( ClassCastException e ) { throw new KettleValueException( toString() + " : There was a data type error: the data type of " + object.getClass().getName() + " object [" + object + "] does not correspond to value meta [" + toStringMeta() + "]" ); } } protected String trim( String string ) { switch ( getTrimType() ) { case TRIM_TYPE_NONE: break; case TRIM_TYPE_RIGHT: string = Const.rtrim( string ); break; case TRIM_TYPE_LEFT: string = Const.ltrim( string ); break; case TRIM_TYPE_BOTH: string = Const.trim( string ); break; default: break; } return string; } @Override public Double getNumber( Object object ) throws KettleValueException { try { if ( object == null ) { return null; } switch ( type ) { case TYPE_NUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return (Double) object; case STORAGE_TYPE_BINARY_STRING: return (Double) convertBinaryStringToNativeType( (byte[]) object ); case STORAGE_TYPE_INDEXED: return (Double) index[( (Integer) object ).intValue()]; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_STRING: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToNumber( (String) object ); case STORAGE_TYPE_BINARY_STRING: return convertStringToNumber( (String) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertStringToNumber( (String) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_DATE: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertDateToNumber( (Date) object ); case STORAGE_TYPE_BINARY_STRING: return convertDateToNumber( (Date) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return new Double( ( (Date) index[( (Integer) object ).intValue()] ).getTime() ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_INTEGER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return new Double( ( (Long) object ).doubleValue() ); case STORAGE_TYPE_BINARY_STRING: return new Double( ( (Long) convertBinaryStringToNativeType( (byte[]) object ) ).doubleValue() ); case STORAGE_TYPE_INDEXED: return new Double( ( (Long) index[( (Integer) object ).intValue()] ).doubleValue() ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BIGNUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return new Double( ( (BigDecimal) object ).doubleValue() ); case STORAGE_TYPE_BINARY_STRING: return new Double( ( (BigDecimal) convertBinaryStringToNativeType( (byte[]) object ) ).doubleValue() ); case STORAGE_TYPE_INDEXED: return new Double( ( (BigDecimal) index[( (Integer) object ).intValue()] ).doubleValue() ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BOOLEAN: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertBooleanToNumber( (Boolean) object ); case STORAGE_TYPE_BINARY_STRING: return convertBooleanToNumber( (Boolean) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertBooleanToNumber( (Boolean) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BINARY: throw new KettleValueException( toString() + " : I don't know how to convert binary values to numbers." ); case TYPE_SERIALIZABLE: throw new KettleValueException( toString() + " : I don't know how to convert serializable values to numbers." ); default: throw new KettleValueException( toString() + " : Unknown type " + type + " specified." ); } } catch ( Exception e ) { throw new KettleValueException( "Unexpected conversion error while converting value [" + toString() + "] to a Number", e ); } } @Override public Long getInteger( Object object ) throws KettleValueException { try { if ( object == null ) { return null; } switch ( type ) { case TYPE_INTEGER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return (Long) object; case STORAGE_TYPE_BINARY_STRING: return (Long) convertBinaryStringToNativeType( (byte[]) object ); case STORAGE_TYPE_INDEXED: return (Long) index[( (Integer) object ).intValue()]; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_STRING: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToInteger( (String) object ); case STORAGE_TYPE_BINARY_STRING: return convertStringToInteger( (String) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertStringToInteger( (String) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_NUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return new Long( Math.round( ( (Double) object ).doubleValue() ) ); case STORAGE_TYPE_BINARY_STRING: return new Long( Math.round( ( (Double) convertBinaryStringToNativeType( (byte[]) object ) ) .doubleValue() ) ); case STORAGE_TYPE_INDEXED: return new Long( Math.round( ( (Double) index[( (Integer) object ).intValue()] ).doubleValue() ) ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_DATE: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertDateToInteger( (Date) object ); case STORAGE_TYPE_BINARY_STRING: return new Long( ( (Date) convertBinaryStringToNativeType( (byte[]) object ) ).getTime() ); case STORAGE_TYPE_INDEXED: return convertDateToInteger( (Date) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BIGNUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return new Long( ( (BigDecimal) object ).longValue() ); case STORAGE_TYPE_BINARY_STRING: return new Long( ( (BigDecimal) convertBinaryStringToNativeType( (byte[]) object ) ).longValue() ); case STORAGE_TYPE_INDEXED: return new Long( ( (BigDecimal) index[( (Integer) object ).intValue()] ).longValue() ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BOOLEAN: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertBooleanToInteger( (Boolean) object ); case STORAGE_TYPE_BINARY_STRING: return convertBooleanToInteger( (Boolean) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertBooleanToInteger( (Boolean) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BINARY: throw new KettleValueException( toString() + " : I don't know how to convert binary values to integers." ); case TYPE_SERIALIZABLE: throw new KettleValueException( toString() + " : I don't know how to convert serializable values to integers." ); default: throw new KettleValueException( toString() + " : Unknown type " + type + " specified." ); } } catch ( Exception e ) { throw new KettleValueException( "Unexpected conversion error while converting value [" + toString() + "] to an Integer", e ); } } @Override public BigDecimal getBigNumber( Object object ) throws KettleValueException { if ( object == null ) { return null; } switch ( type ) { case TYPE_BIGNUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return (BigDecimal) object; case STORAGE_TYPE_BINARY_STRING: return (BigDecimal) convertBinaryStringToNativeType( (byte[]) object ); case STORAGE_TYPE_INDEXED: return (BigDecimal) index[( (Integer) object ).intValue()]; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_STRING: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToBigNumber( (String) object ); case STORAGE_TYPE_BINARY_STRING: return convertStringToBigNumber( (String) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertStringToBigNumber( (String) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_INTEGER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return BigDecimal.valueOf( ( (Long) object ).longValue() ); case STORAGE_TYPE_BINARY_STRING: return BigDecimal.valueOf( ( (Long) convertBinaryStringToNativeType( (byte[]) object ) ).longValue() ); case STORAGE_TYPE_INDEXED: return BigDecimal.valueOf( ( (Long) index[( (Integer) object ).intValue()] ).longValue() ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_NUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return BigDecimal.valueOf( ( (Double) object ).doubleValue() ); case STORAGE_TYPE_BINARY_STRING: return BigDecimal.valueOf( ( (Double) convertBinaryStringToNativeType( (byte[]) object ) ).doubleValue() ); case STORAGE_TYPE_INDEXED: return BigDecimal.valueOf( ( (Double) index[( (Integer) object ).intValue()] ).doubleValue() ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_DATE: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertDateToBigNumber( (Date) object ); case STORAGE_TYPE_BINARY_STRING: return convertDateToBigNumber( (Date) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertDateToBigNumber( (Date) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BOOLEAN: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertBooleanToBigNumber( (Boolean) object ); case STORAGE_TYPE_BINARY_STRING: return convertBooleanToBigNumber( (Boolean) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertBooleanToBigNumber( (Boolean) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BINARY: throw new KettleValueException( toString() + " : I don't know how to convert binary values to integers." ); case TYPE_SERIALIZABLE: throw new KettleValueException( toString() + " : I don't know how to convert serializable values to integers." ); default: throw new KettleValueException( toString() + " : Unknown type " + type + " specified." ); } } @Override public Boolean getBoolean( Object object ) throws KettleValueException { if ( object == null ) { return null; } switch ( type ) { case TYPE_BOOLEAN: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return (Boolean) object; case STORAGE_TYPE_BINARY_STRING: return (Boolean) convertBinaryStringToNativeType( (byte[]) object ); case STORAGE_TYPE_INDEXED: return (Boolean) index[( (Integer) object ).intValue()]; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_STRING: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToBoolean( trim( (String) object ) ); case STORAGE_TYPE_BINARY_STRING: return convertStringToBoolean( trim( (String) convertBinaryStringToNativeType( (byte[]) object ) ) ); case STORAGE_TYPE_INDEXED: return convertStringToBoolean( trim( (String) index[( (Integer) object ).intValue()] ) ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_INTEGER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertIntegerToBoolean( (Long) object ); case STORAGE_TYPE_BINARY_STRING: return convertIntegerToBoolean( (Long) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertIntegerToBoolean( (Long) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_NUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertNumberToBoolean( (Double) object ); case STORAGE_TYPE_BINARY_STRING: return convertNumberToBoolean( (Double) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertNumberToBoolean( (Double) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BIGNUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertBigNumberToBoolean( (BigDecimal) object ); case STORAGE_TYPE_BINARY_STRING: return convertBigNumberToBoolean( (BigDecimal) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertBigNumberToBoolean( (BigDecimal) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_DATE: throw new KettleValueException( toString() + " : I don't know how to convert date values to booleans." ); case TYPE_BINARY: throw new KettleValueException( toString() + " : I don't know how to convert binary values to booleans." ); case TYPE_SERIALIZABLE: throw new KettleValueException( toString() + " : I don't know how to convert serializable values to booleans." ); default: throw new KettleValueException( toString() + " : Unknown type " + type + " specified." ); } } @Override public Date getDate( Object object ) throws KettleValueException { if ( object == null ) { return null; } switch ( type ) { case TYPE_DATE: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return (Date) object; case STORAGE_TYPE_BINARY_STRING: return (Date) convertBinaryStringToNativeType( (byte[]) object ); case STORAGE_TYPE_INDEXED: return (Date) index[( (Integer) object ).intValue()]; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_STRING: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToDate( (String) object ); case STORAGE_TYPE_BINARY_STRING: return convertStringToDate( (String) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertStringToDate( (String) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_NUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertNumberToDate( (Double) object ); case STORAGE_TYPE_BINARY_STRING: return convertNumberToDate( (Double) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertNumberToDate( (Double) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_INTEGER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertIntegerToDate( (Long) object ); case STORAGE_TYPE_BINARY_STRING: return convertIntegerToDate( (Long) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertIntegerToDate( (Long) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BIGNUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertBigNumberToDate( (BigDecimal) object ); case STORAGE_TYPE_BINARY_STRING: return convertBigNumberToDate( (BigDecimal) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertBigNumberToDate( (BigDecimal) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BOOLEAN: throw new KettleValueException( toString() + " : I don't know how to convert a boolean to a date." ); case TYPE_BINARY: throw new KettleValueException( toString() + " : I don't know how to convert a binary value to date." ); case TYPE_SERIALIZABLE: throw new KettleValueException( toString() + " : I don't know how to convert a serializable value to date." ); default: throw new KettleValueException( toString() + " : Unknown type " + type + " specified." ); } } @Override public byte[] getBinary( Object object ) throws KettleValueException { if ( object == null ) { return null; } switch ( type ) { case TYPE_BINARY: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return (byte[]) object; case STORAGE_TYPE_BINARY_STRING: return (byte[]) object; case STORAGE_TYPE_INDEXED: return (byte[]) index[( (Integer) object ).intValue()]; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_DATE: throw new KettleValueException( toString() + " : I don't know how to convert a date to binary." ); case TYPE_STRING: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString( (String) object ); case STORAGE_TYPE_BINARY_STRING: return (byte[]) object; case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( (String) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_NUMBER: throw new KettleValueException( toString() + " : I don't know how to convert a number to binary." ); case TYPE_INTEGER: throw new KettleValueException( toString() + " : I don't know how to convert an integer to binary." ); case TYPE_BIGNUMBER: throw new KettleValueException( toString() + " : I don't know how to convert a bignumber to binary." ); case TYPE_BOOLEAN: throw new KettleValueException( toString() + " : I don't know how to convert a boolean to binary." ); case TYPE_SERIALIZABLE: throw new KettleValueException( toString() + " : I don't know how to convert a serializable to binary." ); default: throw new KettleValueException( toString() + " : Unknown type " + type + " specified." ); } } @Override public byte[] getBinaryString( Object object ) throws KettleValueException { // If the input is a binary string, we should return the exact same binary // object IF // and only IF the formatting options for the storage metadata and this // object are the same. // if ( isStorageBinaryString() && identicalFormat ) { return (byte[]) object; // shortcut it directly for better performance. } try { if ( object == null ) { return null; } switch ( type ) { case TYPE_STRING: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString( (String) object ); case STORAGE_TYPE_BINARY_STRING: return convertStringToBinaryString( (String) convertBinaryStringToNativeType( (byte[]) object ) ); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( (String) index[( (Integer) object ).intValue()] ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_DATE: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString( convertDateToString( (Date) object ) ); case STORAGE_TYPE_BINARY_STRING: String string = convertDateToString( (Date) convertBinaryStringToNativeType( (byte[]) object ) ); return convertStringToBinaryString( string ); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( convertDateToString( (Date) index[( (Integer) object ).intValue()] ) ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_NUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString( convertNumberToString( (Double) object ) ); case STORAGE_TYPE_BINARY_STRING: String string = convertNumberToString( (Double) convertBinaryStringToNativeType( (byte[]) object ) ); return convertStringToBinaryString( string ); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( convertNumberToString( (Double) index[( (Integer) object ).intValue()] ) ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_INTEGER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString( convertIntegerToString( (Long) object ) ); case STORAGE_TYPE_BINARY_STRING: String string = convertIntegerToString( (Long) convertBinaryStringToNativeType( (byte[]) object ) ); return convertStringToBinaryString( string ); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( convertIntegerToString( (Long) index[( (Integer) object ).intValue()] ) ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BIGNUMBER: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString( convertBigNumberToString( (BigDecimal) object ) ); case STORAGE_TYPE_BINARY_STRING: String string = convertBigNumberToString( (BigDecimal) convertBinaryStringToNativeType( (byte[]) object ) ); return convertStringToBinaryString( string ); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( convertBigNumberToString( (BigDecimal) index[( (Integer) object ) .intValue()] ) ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BOOLEAN: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString( convertBooleanToString( (Boolean) object ) ); case STORAGE_TYPE_BINARY_STRING: String string = convertBooleanToString( (Boolean) convertBinaryStringToNativeType( (byte[]) object ) ); return convertStringToBinaryString( string ); case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( convertBooleanToString( (Boolean) index[( (Integer) object ) .intValue()] ) ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_BINARY: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return (byte[]) object; case STORAGE_TYPE_BINARY_STRING: return (byte[]) object; case STORAGE_TYPE_INDEXED: return (byte[]) index[( (Integer) object ).intValue()]; default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } case TYPE_SERIALIZABLE: switch ( storageType ) { case STORAGE_TYPE_NORMAL: return convertStringToBinaryString( object.toString() ); case STORAGE_TYPE_BINARY_STRING: return (byte[]) object; case STORAGE_TYPE_INDEXED: return convertStringToBinaryString( index[( (Integer) object ).intValue()].toString() ); default: throw new KettleValueException( toString() + " : Unknown storage type " + storageType + " specified." ); } default: throw new KettleValueException( toString() + " : Unknown type " + type + " specified." ); } } catch ( ClassCastException e ) { throw new KettleValueException( toString() + " : There was a data type error: the data type of " + object.getClass().getName() + " object [" + object + "] does not correspond to value meta [" + toStringMeta() + "]" ); } } /** * Checks whether or not the value is a String. * * @return true if the value is a String. */ @Override public boolean isString() { return type == TYPE_STRING; } /** * Checks whether or not this value is a Date * * @return true if the value is a Date */ @Override public boolean isDate() { return type == TYPE_DATE || type == TYPE_TIMESTAMP; } /** * Checks whether or not the value is a Big Number * * @return true is this value is a big number */ @Override public boolean isBigNumber() { return type == TYPE_BIGNUMBER; } /** * Checks whether or not the value is a Number * * @return true is this value is a number */ @Override public boolean isNumber() { return type == TYPE_NUMBER; } /** * Checks whether or not this value is a boolean * * @return true if this value has type boolean. */ @Override public boolean isBoolean() { return type == TYPE_BOOLEAN; } /** * Checks whether or not this value is of type Serializable * * @return true if this value has type Serializable */ @Override public boolean isSerializableType() { return type == TYPE_SERIALIZABLE; } /** * Checks whether or not this value is of type Binary * * @return true if this value has type Binary */ @Override public boolean isBinary() { return type == TYPE_BINARY; } /** * Checks whether or not this value is an Integer * * @return true if this value is an integer */ @Override public boolean isInteger() { return type == TYPE_INTEGER; } /** * Checks whether or not this Value is Numeric A Value is numeric if it is either of type Number or Integer * * @return true if the value is either of type Number or Integer */ @Override public boolean isNumeric() { return isInteger() || isNumber() || isBigNumber(); } /** * Checks whether or not the specified type is either Integer or Number * * @param t * the type to check * @return true if the type is Integer or Number */ public static final boolean isNumeric( int t ) { return t == TYPE_INTEGER || t == TYPE_NUMBER || t == TYPE_BIGNUMBER; } public boolean isSortedAscending() { return !isSortedDescending(); } /** * Return the type of a value in a textual form: "String", "Number", "Integer", "Boolean", "Date", ... * * @return A String describing the type of value. */ @Override public String getTypeDesc() { return getTypeDesc( type ); } /** * Return the storage type of a value in a textual form: "normal", "binary-string", "indexes" * * @return A String describing the storage type of the value metadata */ public String getStorageTypeDesc() { return storageTypeCodes[storageType]; } @Override public String toString() { return name + " " + toStringMeta(); } /** * a String text representation of this Value, optionally padded to the specified length * * @return a String text representation of this Value, optionally padded to the specified length */ @Override public String toStringMeta() { // We (Sven Boden) did explicit performance testing for this // part. The original version used Strings instead of StringBuilders, // performance between the 2 does not differ that much. A few milliseconds // on 100000 iterations in the advantage of StringBuilders. The // lessened creation of objects may be worth it in the long run. StringBuilder retval = new StringBuilder( getTypeDesc() ); switch ( getType() ) { case TYPE_STRING: if ( getLength() > 0 ) { retval.append( '(' ).append( getLength() ).append( ')' ); } break; case TYPE_NUMBER: case TYPE_BIGNUMBER: if ( getLength() > 0 ) { retval.append( '(' ).append( getLength() ); if ( getPrecision() > 0 ) { retval.append( ", " ).append( getPrecision() ); } retval.append( ')' ); } break; case TYPE_INTEGER: if ( getLength() > 0 ) { retval.append( '(' ).append( getLength() ).append( ')' ); } break; default: break; } if ( !isStorageNormal() ) { retval.append( "<" ).append( getStorageTypeDesc() ).append( ">" ); } return retval.toString(); } @Override public void writeData( DataOutputStream outputStream, Object object ) throws KettleFileException { try { // Is the value NULL? outputStream.writeBoolean( object == null ); if ( object != null ) { // otherwise there is no point switch ( storageType ) { case STORAGE_TYPE_NORMAL: // Handle Content -- only when not NULL switch ( getType() ) { case TYPE_STRING: writeString( outputStream, (String) object ); break; case TYPE_NUMBER: writeNumber( outputStream, (Double) object ); break; case TYPE_INTEGER: writeInteger( outputStream, (Long) object ); break; case TYPE_DATE: writeDate( outputStream, (Date) object ); break; case TYPE_BIGNUMBER: writeBigNumber( outputStream, (BigDecimal) object ); break; case TYPE_BOOLEAN: writeBoolean( outputStream, (Boolean) object ); break; case TYPE_BINARY: writeBinary( outputStream, (byte[]) object ); break; default: throw new KettleFileException( toString() + " : Unable to serialize data type " + getType() ); } break; case STORAGE_TYPE_BINARY_STRING: // Handle binary string content -- only when not NULL // In this case, we opt not to convert anything at all for speed. // That way, we can save on CPU power. // Since the streams can be compressed, volume shouldn't be an issue // at all. // writeBinaryString( outputStream, (byte[]) object ); break; case STORAGE_TYPE_INDEXED: writeInteger( outputStream, (Integer) object ); // just an index break; default: throw new KettleFileException( toString() + " : Unknown storage type " + getStorageType() ); } } } catch ( ClassCastException e ) { throw new RuntimeException( toString() + " : There was a data type error: the data type of " + object.getClass().getName() + " object [" + object + "] does not correspond to value meta [" + toStringMeta() + "]" ); } catch ( IOException e ) { throw new KettleFileException( toString() + " : Unable to write value data to output stream", e ); } } @Override public Object readData( DataInputStream inputStream ) throws KettleFileException, KettleEOFException, SocketTimeoutException { try { // Is the value NULL? if ( inputStream.readBoolean() ) { return null; // done } switch ( storageType ) { case STORAGE_TYPE_NORMAL: // Handle Content -- only when not NULL switch ( getType() ) { case TYPE_STRING: return readString( inputStream ); case TYPE_NUMBER: return readNumber( inputStream ); case TYPE_INTEGER: return readInteger( inputStream ); case TYPE_DATE: return readDate( inputStream ); case TYPE_BIGNUMBER: return readBigNumber( inputStream ); case TYPE_BOOLEAN: return readBoolean( inputStream ); case TYPE_BINARY: return readBinary( inputStream ); default: throw new KettleFileException( toString() + " : Unable to de-serialize data of type " + getType() ); } case STORAGE_TYPE_BINARY_STRING: return readBinaryString( inputStream ); case STORAGE_TYPE_INDEXED: return readSmallInteger( inputStream ); // just an index: 4-bytes should // be enough. default: throw new KettleFileException( toString() + " : Unknown storage type " + getStorageType() ); } } catch ( EOFException e ) { throw new KettleEOFException( e ); } catch ( SocketTimeoutException e ) { throw e; } catch ( IOException e ) { throw new KettleFileException( toString() + " : Unable to read value data from input stream", e ); } } protected void writeString( DataOutputStream outputStream, String string ) throws IOException { // Write the length and then the bytes if ( string == null ) { outputStream.writeInt( -1 ); } else { byte[] chars = string.getBytes( Const.XML_ENCODING ); outputStream.writeInt( chars.length ); outputStream.write( chars ); } } protected void writeBinaryString( DataOutputStream outputStream, byte[] binaryString ) throws IOException { // Write the length and then the bytes if ( binaryString == null ) { outputStream.writeInt( -1 ); } else { outputStream.writeInt( binaryString.length ); outputStream.write( binaryString ); } } protected String readString( DataInputStream inputStream ) throws IOException { // Read the length and then the bytes int length = inputStream.readInt(); if ( length < 0 ) { return null; } byte[] chars = new byte[length]; inputStream.readFully( chars ); String string = new String( chars, Const.XML_ENCODING ); // System.out.println("Read string("+getName()+"), length "+length+": "+string); return string; } protected byte[] readBinaryString( DataInputStream inputStream ) throws IOException { // Read the length and then the bytes int length = inputStream.readInt(); if ( length < 0 ) { return null; } byte[] chars = new byte[length]; inputStream.readFully( chars ); return chars; } protected void writeBigNumber( DataOutputStream outputStream, BigDecimal number ) throws IOException { String string = number.toString(); writeString( outputStream, string ); } protected BigDecimal readBigNumber( DataInputStream inputStream ) throws IOException { String string = readString( inputStream ); // System.out.println("Read big number("+getName()+") ["+string+"]"); return new BigDecimal( string ); } protected void writeDate( DataOutputStream outputStream, Date date ) throws IOException { outputStream.writeLong( date.getTime() ); } protected Date readDate( DataInputStream inputStream ) throws IOException { long time = inputStream.readLong(); // System.out.println("Read Date("+getName()+") ["+new Date(time)+"]"); return new Date( time ); } protected void writeBoolean( DataOutputStream outputStream, Boolean bool ) throws IOException { outputStream.writeBoolean( bool.booleanValue() ); } protected Boolean readBoolean( DataInputStream inputStream ) throws IOException { Boolean bool = Boolean.valueOf( inputStream.readBoolean() ); // System.out.println("Read boolean("+getName()+") ["+bool+"]"); return bool; } protected void writeNumber( DataOutputStream outputStream, Double number ) throws IOException { outputStream.writeDouble( number.doubleValue() ); } protected Double readNumber( DataInputStream inputStream ) throws IOException { Double d = new Double( inputStream.readDouble() ); // System.out.println("Read number("+getName()+") ["+d+"]"); return d; } protected void writeInteger( DataOutputStream outputStream, Long number ) throws IOException { outputStream.writeLong( number.longValue() ); } protected Long readInteger( DataInputStream inputStream ) throws IOException { Long l = new Long( inputStream.readLong() ); // System.out.println("Read integer("+getName()+") ["+l+"]"); return l; } protected void writeInteger( DataOutputStream outputStream, Integer number ) throws IOException { outputStream.writeInt( number.intValue() ); } protected Integer readSmallInteger( DataInputStream inputStream ) throws IOException { Integer i = Integer.valueOf( inputStream.readInt() ); // System.out.println("Read index integer("+getName()+") ["+i+"]"); return i; } protected void writeBinary( DataOutputStream outputStream, byte[] binary ) throws IOException { outputStream.writeInt( binary.length ); outputStream.write( binary ); } protected byte[] readBinary( DataInputStream inputStream ) throws IOException { int size = inputStream.readInt(); byte[] buffer = new byte[size]; inputStream.readFully( buffer ); // System.out.println("Read binary("+getName()+") with size="+size); return buffer; } @Override public void writeMeta( DataOutputStream outputStream ) throws KettleFileException { try { int type = getType(); // Handle type outputStream.writeInt( type ); // Handle storage type outputStream.writeInt( storageType ); switch ( storageType ) { case STORAGE_TYPE_INDEXED: // Save the indexed strings... if ( index == null ) { outputStream.writeInt( -1 ); // null } else { outputStream.writeInt( index.length ); for ( int i = 0; i < index.length; i++ ) { try { switch ( type ) { case TYPE_STRING: writeString( outputStream, (String) index[i] ); break; case TYPE_NUMBER: writeNumber( outputStream, (Double) index[i] ); break; case TYPE_INTEGER: writeInteger( outputStream, (Long) index[i] ); break; case TYPE_DATE: writeDate( outputStream, (Date) index[i] ); break; case TYPE_BIGNUMBER: writeBigNumber( outputStream, (BigDecimal) index[i] ); break; case TYPE_BOOLEAN: writeBoolean( outputStream, (Boolean) index[i] ); break; case TYPE_BINARY: writeBinary( outputStream, (byte[]) index[i] ); break; default: throw new KettleFileException( toString() + " : Unable to serialize indexe storage type for data type " + getType() ); } } catch ( ClassCastException e ) { throw new RuntimeException( toString() + " : There was a data type error: the data type of " + index[i].getClass().getName() + " object [" + index[i] + "] does not correspond to value meta [" + toStringMeta() + "]" ); } } } break; case STORAGE_TYPE_BINARY_STRING: // Save the storage meta data... // outputStream.writeBoolean( storageMetadata != null ); if ( storageMetadata != null ) { storageMetadata.writeMeta( outputStream ); } break; default: break; } // Handle name-length writeString( outputStream, name ); // length & precision outputStream.writeInt( getLength() ); outputStream.writeInt( getPrecision() ); // Origin writeString( outputStream, origin ); // Comments writeString( outputStream, comments ); // formatting Mask, decimal, grouping, currency writeString( outputStream, conversionMask ); writeString( outputStream, decimalSymbol ); writeString( outputStream, groupingSymbol ); writeString( outputStream, currencySymbol ); outputStream.writeInt( trimType ); // Case sensitivity of compare outputStream.writeBoolean( caseInsensitive ); // Sorting information outputStream.writeBoolean( sortedDescending ); // Padding information outputStream.writeBoolean( outputPaddingEnabled ); // date format lenient? outputStream.writeBoolean( dateFormatLenient ); // date format locale? writeString( outputStream, dateFormatLocale != null ? dateFormatLocale.toString() : null ); // date time zone? writeString( outputStream, dateFormatTimeZone != null ? dateFormatTimeZone.getID() : null ); // string to number conversion lenient? outputStream.writeBoolean( lenientStringToNumber ); } catch ( IOException e ) { throw new KettleFileException( toString() + " : Unable to write value metadata to output stream", e ); } } /** * Create a new Value meta object. * * @param inputStream * @throws KettleFileException * @throws KettleEOFException * @deprecated in favor of a combination of {@link ValueMetaFactory}.createValueMeta() and the loadMetaData() method. */ @Deprecated public ValueMetaBase( DataInputStream inputStream ) throws KettleFileException, KettleEOFException { this(); try { type = inputStream.readInt(); } catch ( EOFException e ) { throw new KettleEOFException( e ); } catch ( IOException e ) { throw new KettleFileException( toString() + " : Unable to read value metadata from input stream", e ); } readMetaData( inputStream ); } /** * Load the attributes of this particular value meta object from the input stream. Loading the type is not handled * here, this should be read from the stream previously! * * @param inputStream * the input stream to read from * @throws KettleFileException * In case there was a IO problem * @throws KettleEOFException * If we reached the end of the stream */ @Override public void readMetaData( DataInputStream inputStream ) throws KettleFileException, KettleEOFException { // Loading the type is not handled here, this should be read from the stream previously! // try { // Handle storage type storageType = inputStream.readInt(); // Read the data in the index switch ( storageType ) { case STORAGE_TYPE_INDEXED: int indexSize = inputStream.readInt(); if ( indexSize < 0 ) { index = null; } else { index = new Object[indexSize]; for ( int i = 0; i < indexSize; i++ ) { switch ( type ) { case TYPE_STRING: index[i] = readString( inputStream ); break; case TYPE_NUMBER: index[i] = readNumber( inputStream ); break; case TYPE_INTEGER: index[i] = readInteger( inputStream ); break; case TYPE_DATE: index[i] = readDate( inputStream ); break; case TYPE_BIGNUMBER: index[i] = readBigNumber( inputStream ); break; case TYPE_BOOLEAN: index[i] = readBoolean( inputStream ); break; case TYPE_BINARY: index[i] = readBinary( inputStream ); break; default: throw new KettleFileException( toString() + " : Unable to de-serialize indexed storage type for data type " + getType() ); } } } break; case STORAGE_TYPE_BINARY_STRING: // In case we do have storage metadata defined, we read that back in as // well.. if ( inputStream.readBoolean() ) { storageMetadata = new ValueMetaBase( inputStream ); } break; default: break; } // name name = readString( inputStream ); // length & precision length = inputStream.readInt(); precision = inputStream.readInt(); // Origin origin = readString( inputStream ); // Comments comments = readString( inputStream ); // formatting Mask, decimal, grouping, currency conversionMask = readString( inputStream ); decimalSymbol = readString( inputStream ); groupingSymbol = readString( inputStream ); currencySymbol = readString( inputStream ); trimType = inputStream.readInt(); // Case sensitivity caseInsensitive = inputStream.readBoolean(); // Sorting type sortedDescending = inputStream.readBoolean(); // Output padding? outputPaddingEnabled = inputStream.readBoolean(); // is date parsing lenient? // dateFormatLenient = inputStream.readBoolean(); // What is the date format locale? // String strDateFormatLocale = readString( inputStream ); if ( Const.isEmpty( strDateFormatLocale ) ) { dateFormatLocale = null; } else { dateFormatLocale = EnvUtil.createLocale( strDateFormatLocale ); } // What is the time zone to use for date parsing? // String strTimeZone = readString( inputStream ); if ( Const.isEmpty( strTimeZone ) ) { dateFormatTimeZone = TimeZone.getDefault(); } else { dateFormatTimeZone = EnvUtil.createTimeZone( strTimeZone ); } // is string to number parsing lenient? lenientStringToNumber = inputStream.readBoolean(); } catch ( EOFException e ) { throw new KettleEOFException( e ); } catch ( IOException e ) { throw new KettleFileException( toString() + " : Unable to read value metadata from input stream", e ); } } @Override public String getMetaXML() throws IOException { StringBuilder xml = new StringBuilder(); xml.append( XMLHandler.openTag( XML_META_TAG ) ); xml.append( XMLHandler.addTagValue( "type", getTypeDesc() ) ); xml.append( XMLHandler.addTagValue( "storagetype", getStorageTypeCode( getStorageType() ) ) ); switch ( storageType ) { case STORAGE_TYPE_INDEXED: xml.append( XMLHandler.openTag( "index" ) ); // Save the indexed strings... // if ( index != null ) { for ( int i = 0; i < index.length; i++ ) { try { switch ( type ) { case TYPE_STRING: xml.append( XMLHandler.addTagValue( "value", (String) index[i] ) ); break; case TYPE_NUMBER: xml.append( XMLHandler.addTagValue( "value", (Double) index[i] ) ); break; case TYPE_INTEGER: xml.append( XMLHandler.addTagValue( "value", (Long) index[i] ) ); break; case TYPE_DATE: xml.append( XMLHandler.addTagValue( "value", (Date) index[i] ) ); break; case TYPE_BIGNUMBER: xml.append( XMLHandler.addTagValue( "value", (BigDecimal) index[i] ) ); break; case TYPE_BOOLEAN: xml.append( XMLHandler.addTagValue( "value", (Boolean) index[i] ) ); break; case TYPE_BINARY: xml.append( XMLHandler.addTagValue( "value", (byte[]) index[i] ) ); break; default: throw new IOException( toString() + " : Unable to serialize index storage type to XML for data type " + getType() ); } } catch ( ClassCastException e ) { throw new RuntimeException( toString() + " : There was a data type error: the data type of " + index[i].getClass().getName() + " object [" + index[i] + "] does not correspond to value meta [" + toStringMeta() + "]" ); } } } xml.append( XMLHandler.closeTag( "index" ) ); break; case STORAGE_TYPE_BINARY_STRING: // Save the storage meta data... // if ( storageMetadata != null ) { xml.append( XMLHandler.openTag( "storage-meta" ) ); xml.append( storageMetadata.getMetaXML() ); xml.append( XMLHandler.closeTag( "storage-meta" ) ); } break; default: break; } xml.append( XMLHandler.addTagValue( "name", name ) ); xml.append( XMLHandler.addTagValue( "length", length ) ); xml.append( XMLHandler.addTagValue( "precision", precision ) ); xml.append( XMLHandler.addTagValue( "origin", origin ) ); xml.append( XMLHandler.addTagValue( "comments", comments ) ); xml.append( XMLHandler.addTagValue( "conversion_Mask", conversionMask ) ); xml.append( XMLHandler.addTagValue( "decimal_symbol", decimalSymbol ) ); xml.append( XMLHandler.addTagValue( "grouping_symbol", groupingSymbol ) ); xml.append( XMLHandler.addTagValue( "currency_symbol", currencySymbol ) ); xml.append( XMLHandler.addTagValue( "trim_type", getTrimTypeCode( trimType ) ) ); xml.append( XMLHandler.addTagValue( "case_insensitive", caseInsensitive ) ); xml.append( XMLHandler.addTagValue( "sort_descending", sortedDescending ) ); xml.append( XMLHandler.addTagValue( "output_padding", outputPaddingEnabled ) ); xml.append( XMLHandler.addTagValue( "date_format_lenient", dateFormatLenient ) ); xml.append( XMLHandler.addTagValue( "date_format_locale", dateFormatLocale != null ? dateFormatLocale.toString() : null ) ); xml.append( XMLHandler.addTagValue( "date_format_timezone", dateFormatTimeZone != null ? dateFormatTimeZone.getID() : null ) ); xml.append( XMLHandler.addTagValue( "lenient_string_to_number", lenientStringToNumber ) ); xml.append( XMLHandler.closeTag( XML_META_TAG ) ); return xml.toString(); } public ValueMetaBase( Node node ) throws KettleException { this(); type = getType( XMLHandler.getTagValue( node, "type" ) ); storageType = getStorageType( XMLHandler.getTagValue( node, "storagetype" ) ); switch ( storageType ) { case STORAGE_TYPE_INDEXED: Node indexNode = XMLHandler.getSubNode( node, "index" ); int nrIndexes = XMLHandler.countNodes( indexNode, "value" ); index = new Object[nrIndexes]; for ( int i = 0; i < index.length; i++ ) { Node valueNode = XMLHandler.getSubNodeByNr( indexNode, "value", i ); String valueString = XMLHandler.getNodeValue( valueNode ); if ( Const.isEmpty( valueString ) ) { index[i] = null; } else { switch ( type ) { case TYPE_STRING: index[i] = valueString; break; case TYPE_NUMBER: index[i] = Double.parseDouble( valueString ); break; case TYPE_INTEGER: index[i] = Long.parseLong( valueString ); break; case TYPE_DATE: index[i] = XMLHandler.stringToDate( valueString ); break; case TYPE_BIGNUMBER: index[i] = new BigDecimal( valueString ); break; case TYPE_BOOLEAN: index[i] = Boolean.valueOf( "Y".equalsIgnoreCase( valueString ) ); break; case TYPE_BINARY: index[i] = XMLHandler.stringToBinary( valueString ); break; default: throw new KettleException( toString() + " : Unable to de-serialize indexe storage type from XML for data type " + getType() ); } } } break; case STORAGE_TYPE_BINARY_STRING: // Load the storage meta data... // Node storageMetaNode = XMLHandler.getSubNode( node, "storage-meta" ); Node storageValueMetaNode = XMLHandler.getSubNode( storageMetaNode, XML_META_TAG ); if ( storageValueMetaNode != null ) { storageMetadata = new ValueMetaBase( storageValueMetaNode ); } break; default: break; } name = XMLHandler.getTagValue( node, "name" ); length = Integer.parseInt( XMLHandler.getTagValue( node, "length" ) ); precision = Integer.parseInt( XMLHandler.getTagValue( node, "precision" ) ); origin = XMLHandler.getTagValue( node, "origin" ); comments = XMLHandler.getTagValue( node, "comments" ); conversionMask = XMLHandler.getTagValue( node, "conversion_Mask" ); decimalSymbol = XMLHandler.getTagValue( node, "decimal_symbol" ); groupingSymbol = XMLHandler.getTagValue( node, "grouping_symbol" ); currencySymbol = XMLHandler.getTagValue( node, "currency_symbol" ); trimType = getTrimTypeByCode( XMLHandler.getTagValue( node, "trim_type" ) ); caseInsensitive = "Y".equalsIgnoreCase( XMLHandler.getTagValue( node, "case_insensitive" ) ); sortedDescending = "Y".equalsIgnoreCase( XMLHandler.getTagValue( node, "sort_descending" ) ); outputPaddingEnabled = "Y".equalsIgnoreCase( XMLHandler.getTagValue( node, "output_padding" ) ); dateFormatLenient = "Y".equalsIgnoreCase( XMLHandler.getTagValue( node, "date_format_lenient" ) ); String dateFormatLocaleString = XMLHandler.getTagValue( node, "date_format_locale" ); if ( !Const.isEmpty( dateFormatLocaleString ) ) { dateFormatLocale = EnvUtil.createLocale( dateFormatLocaleString ); } String dateTimeZoneString = XMLHandler.getTagValue( node, "date_format_timezone" ); if ( !Const.isEmpty( dateTimeZoneString ) ) { dateFormatTimeZone = EnvUtil.createTimeZone( dateTimeZoneString ); } else { dateFormatTimeZone = TimeZone.getDefault(); } lenientStringToNumber = "Y".equalsIgnoreCase( XMLHandler.getTagValue( node, "lenient_string_to_number" ) ); } @Override public String getDataXML( Object object ) throws IOException { StringBuilder xml = new StringBuilder(); String string; if ( object != null ) { try { switch ( storageType ) { case STORAGE_TYPE_NORMAL: // Handle Content -- only when not NULL // switch ( getType() ) { case TYPE_STRING: string = (String) object; break; case TYPE_NUMBER: string = Double.toString( (Double) object ); break; case TYPE_INTEGER: string = Long.toString( (Long) object ); break; case TYPE_DATE: string = XMLHandler.date2string( (Date) object ); break; case TYPE_BIGNUMBER: string = ( (BigDecimal) object ).toString(); break; case TYPE_BOOLEAN: string = Boolean.toString( (Boolean) object ); break; case TYPE_BINARY: string = XMLHandler.encodeBinaryData( (byte[]) object ); break; case TYPE_TIMESTAMP: string = XMLHandler.timestamp2string( (Timestamp) object ); break; case TYPE_INET: string = ( (InetAddress) object ).toString(); break; default: throw new IOException( toString() + " : Unable to serialize data type to XML " + getType() ); } break; case STORAGE_TYPE_BINARY_STRING: // Handle binary string content -- only when not NULL // In this case, we opt not to convert anything at all for speed. // That way, we can save on CPU power. // Since the streams can be compressed, volume shouldn't be an issue // at all. // string = XMLHandler.addTagValue( "binary-string", (byte[]) object ); break; case STORAGE_TYPE_INDEXED: // Just an index string = XMLHandler.addTagValue( "index-value", (Integer) object ); break; default: throw new IOException( toString() + " : Unknown storage type " + getStorageType() ); } } catch ( ClassCastException e ) { throw new RuntimeException( toString() + " : There was a data type error: the data type of " + object.getClass().getName() + " object [" + object + "] does not correspond to value meta [" + toStringMeta() + "]", e ); } catch ( Exception e ) { throw new RuntimeException( toString() + " : there was a value XML encoding error", e ); } } else { // If the object is null: give an empty string // string = ""; } xml.append( XMLHandler.addTagValue( XML_DATA_TAG, string ) ); return xml.toString(); } /** * Convert a data XML node to an Object that corresponds to the metadata. This is basically String to Object * conversion that is being done. * * @param node * the node to retrieve the data value from * @return the converted data value * @throws IOException * thrown in case there is a problem with the XML to object conversion */ @Override public Object getValue( Node node ) throws KettleException { switch ( storageType ) { case STORAGE_TYPE_NORMAL: String valueString = XMLHandler.getNodeValue( node ); if ( Const.isEmpty( valueString ) ) { return null; } // Handle Content -- only when not NULL // switch ( getType() ) { case TYPE_STRING: return valueString; case TYPE_NUMBER: return Double.parseDouble( valueString ); case TYPE_INTEGER: return Long.parseLong( valueString ); case TYPE_DATE: return XMLHandler.stringToDate( valueString ); case TYPE_BIGNUMBER: return new BigDecimal( valueString ); case TYPE_BOOLEAN: return Boolean.valueOf( "Y".equalsIgnoreCase( valueString ) ); case TYPE_BINARY: return XMLHandler.stringToBinary( XMLHandler.getTagValue( node, "binary-value" ) ); default: throw new KettleException( toString() + " : Unable to de-serialize '" + valueString + "' from XML for data type " + getType() ); } case STORAGE_TYPE_BINARY_STRING: // Handle binary string content -- only when not NULL // In this case, we opt not to convert anything at all for speed. // That way, we can save on CPU power. // Since the streams can be compressed, volume shouldn't be an issue at // all. // String binaryString = XMLHandler.getTagValue( node, "binary-string" ); if ( Const.isEmpty( binaryString ) ) { return null; } return XMLHandler.stringToBinary( binaryString ); case STORAGE_TYPE_INDEXED: String indexString = XMLHandler.getTagValue( node, "index-value" ); if ( Const.isEmpty( indexString ) ) { return null; } return Integer.parseInt( indexString ); default: throw new KettleException( toString() + " : Unknown storage type " + getStorageType() ); } } /** * get an array of String describing the possible types a Value can have. * * @return an array of String describing the possible types a Value can have. */ public static final String[] getTypes() { return ValueMetaFactory.getValueMetaNames(); /* * String retval[] = new String[typeCodes.length - 1]; System.arraycopy(typeCodes, 1, retval, 0, typeCodes.length - * 1); return retval; */ } /** * Get an array of String describing the possible types a Value can have. * * @return an array of String describing the possible types a Value can have. */ public static final String[] getAllTypes() { return ValueMetaFactory.getAllValueMetaNames(); /* * String retval[] = new String[typeCodes.length]; System.arraycopy(typeCodes, 0, retval, 0, typeCodes.length); * return retval; */ } /** * TODO: change Desc to Code all over the place. Make sure we can localise this stuff later on. * * @param type * the type * @return the description (code) of the type */ public static final String getTypeDesc( int type ) { return ValueMetaFactory.getValueMetaName( type ); // return typeCodes[type]; } /** * Convert the String description of a type to an integer type. * * @param desc * The description of the type to convert * @return The integer type of the given String. (ValueMetaInterface.TYPE_...) */ public static final int getType( String desc ) { return ValueMetaFactory.getIdForValueMeta( desc ); /* * for (int i = 1; i < typeCodes.length; i++) { if (typeCodes[i].equalsIgnoreCase(desc)) { return i; } } * * return TYPE_NONE; */ } /** * Convert the String description of a storage type to an integer type. * * @param desc * The description of the storage type to convert * @return The integer storage type of the given String. (ValueMetaInterface.STORAGE_TYPE_...) or -1 if the storage * type code not be found. */ public static final int getStorageType( String desc ) { for ( int i = 0; i < storageTypeCodes.length; i++ ) { if ( storageTypeCodes[i].equalsIgnoreCase( desc ) ) { return i; } } return -1; } public static final String getStorageTypeCode( int storageType ) { if ( storageType >= STORAGE_TYPE_NORMAL && storageType <= STORAGE_TYPE_INDEXED ) { return storageTypeCodes[storageType]; } return null; } /** * Determine if an object is null. This is the case if data==null or if it's an empty string. * * @param data * the object to test * @return true if the object is considered null. * @throws KettleValueException * in case there is a conversion error (only thrown in case of lazy conversion) */ @Override public boolean isNull( Object data ) throws KettleValueException { //noinspection deprecation return isNull( data, EMPTY_STRING_AND_NULL_ARE_DIFFERENT ); } /* * Do not use this method directly! It is for tests! */ @Deprecated boolean isNull( Object data, boolean emptyStringDiffersFromNull ) throws KettleValueException { try { Object value = data; if ( isStorageBinaryString() ) { if ( value == null || !emptyStringDiffersFromNull && ( (byte[]) value ).length == 0 ) { return true; // shortcut } value = convertBinaryStringToNativeType( (byte[]) data ); } // Re-check for null, even for lazy conversion. // A value (5 spaces for example) can be null after trim and conversion // if ( value == null ) { return true; } if ( emptyStringDiffersFromNull ) { return false; } // If it's a string and the string is empty, it's a null value as well // if ( isString() ) { if ( value.toString().length() == 0 ) { return true; } } // We tried everything else so we assume this value is not null. // return false; } catch ( ClassCastException e ) { throw new RuntimeException( "Unable to verify if [" + toString() + "] is null or not because of an error:" + e.toString(), e ); } } /* * Compare 2 binary strings, one byte at a time.<br> This algorithm is very fast but most likely wrong as well.<br> * * @param one The first binary string to compare with * * @param two the second binary string to compare to * * @return -1 if <i>one</i> is smaller than <i>two</i>, 0 is both byte arrays are identical and 1 if <i>one</i> is * larger than <i>two</i> protected int compareBinaryStrings(byte[] one, byte[] two) { * * for (int i=0;i<one.length;i++) { if (i>=two.length) return 1; // larger if (one[i]>two[i]) return 1; // larger if * (one[i]<two[i]) return -1; // smaller } if (one.length>two.length) return 1; // larger if (one.length>two.length) * return -11; // smaller return 0; } */ /** * Compare 2 values of the same data type * * @param data1 * the first value * @param data2 * the second value * @return 0 if the values are equal, -1 if data1 is smaller than data2 and +1 if it's larger. * @throws KettleValueException * In case we get conversion errors */ @Override public int compare( Object data1, Object data2 ) throws KettleValueException { boolean n1 = isNull( data1 ); boolean n2 = isNull( data2 ); // null is always smaller! if ( n1 && !n2 ) { return -1; } if ( !n1 && n2 ) { return 1; } if ( n1 && n2 ) { return 0; } int cmp = 0; switch ( getType() ) { case TYPE_STRING: // if (isStorageBinaryString() && identicalFormat && // storageMetadata.isSingleByteEncoding()) return // compareBinaryStrings((byte[])data1, (byte[])data2); TODO String one = getString( data1 ); String two = getString( data2 ); if ( caseInsensitive ) { cmp = one.compareToIgnoreCase( two ); } else { cmp = one.compareTo( two ); } break; case TYPE_INTEGER: // if (isStorageBinaryString() && identicalFormat) return // compareBinaryStrings((byte[])data1, (byte[])data2); TODO cmp = getInteger( data1 ).compareTo( getInteger( data2 ) ); break; case TYPE_NUMBER: cmp = Double.compare( getNumber( data1 ).doubleValue(), getNumber( data2 ).doubleValue() ); break; case TYPE_DATE: cmp = Long.valueOf( getDate( data1 ).getTime() ).compareTo( Long.valueOf( getDate( data2 ).getTime() ) ); break; case TYPE_BIGNUMBER: cmp = getBigNumber( data1 ).compareTo( getBigNumber( data2 ) ); break; case TYPE_BOOLEAN: if ( getBoolean( data1 ).booleanValue() == getBoolean( data2 ).booleanValue() ) { cmp = 0; // true == true, false == false } else if ( getBoolean( data1 ).booleanValue() && !getBoolean( data2 ).booleanValue() ) { cmp = 1; // true > false } else { cmp = -1; // false < true } break; case TYPE_BINARY: byte[] b1 = (byte[]) data1; byte[] b2 = (byte[]) data2; int length = b1.length < b2.length ? b1.length : b2.length; for ( int i = 0; i < length; i++ ) { cmp = b1[i] - b2[i]; if ( cmp != 0 ) { cmp = cmp < 0 ? -1 : 1; break; } } cmp = b1.length - b2.length; break; default: throw new KettleValueException( toString() + " : Comparing values can not be done with data type : " + getType() ); } if ( isSortedDescending() ) { return -cmp; } else { return cmp; } } /** * Compare 2 values of the same data type * * @param data1 * the first value * @param meta2 * the second value's metadata * @param data2 * the second value * @return 0 if the values are equal, -1 if data1 is smaller than data2 and +1 if it's larger. * @throws KettleValueException * In case we get conversion errors */ @Override public int compare( Object data1, ValueMetaInterface meta2, Object data2 ) throws KettleValueException { if ( meta2 == null ) { throw new KettleValueException( toStringMeta() + " : Second meta data (meta2) is null, please check one of the previous steps." ); } try { // Before we can compare data1 to data2 we need to make sure they have the // same data type etc. // if ( getType() == meta2.getType() ) { if ( getStorageType() == meta2.getStorageType() ) { return compare( data1, data2 ); } // Convert the storage type to compare the data. // switch ( getStorageType() ) { case STORAGE_TYPE_NORMAL: return compare( data1, meta2.convertToNormalStorageType( data2 ) ); case STORAGE_TYPE_BINARY_STRING: return compare( data1, meta2.convertToBinaryStringStorageType( data2 ) ); case STORAGE_TYPE_INDEXED: switch ( meta2.getStorageType() ) { case STORAGE_TYPE_INDEXED: return compare( data1, data2 ); // not accessible, just to make sure. case STORAGE_TYPE_NORMAL: return -meta2.compare( data2, convertToNormalStorageType( data1 ) ); case STORAGE_TYPE_BINARY_STRING: return -meta2.compare( data2, convertToBinaryStringStorageType( data1 ) ); default: throw new KettleValueException( meta2.toStringMeta() + " : Unknown storage type : " + meta2.getStorageType() ); } default: throw new KettleValueException( toStringMeta() + " : Unknown storage type : " + getStorageType() ); } } // If the data types are not the same, the first one is the driver... // The second data type is converted to the first one. // return compare( data1, convertData( meta2, data2 ) ); } catch ( Exception e ) { throw new KettleValueException( toStringMeta() + " : Unable to compare with value [" + meta2.toStringMeta() + "]", e ); } } /** * Convert the specified data to the data type specified in this object. * * @param meta2 * the metadata of the object to be converted * @param data2 * the data of the object to be converted * @return the object in the data type of this value metadata object * @throws KettleValueException * in case there is a data conversion error */ @Override public Object convertData( ValueMetaInterface meta2, Object data2 ) throws KettleValueException { switch ( getType() ) { case TYPE_STRING: return meta2.getString( data2 ); case TYPE_NUMBER: return meta2.getNumber( data2 ); case TYPE_INTEGER: return meta2.getInteger( data2 ); case TYPE_DATE: return meta2.getDate( data2 ); case TYPE_BIGNUMBER: return meta2.getBigNumber( data2 ); case TYPE_BOOLEAN: return meta2.getBoolean( data2 ); case TYPE_BINARY: return meta2.getBinary( data2 ); default: throw new KettleValueException( toString() + " : I can't convert the specified value to data type : " + getType() ); } } /** * Convert the specified data to the data type specified in this object. For String conversion, be compatible with * version 2.5.2. * * @param meta2 * the metadata of the object to be converted * @param data2 * the data of the object to be converted * @return the object in the data type of this value metadata object * @throws KettleValueException * in case there is a data conversion error */ @Override public Object convertDataCompatible( ValueMetaInterface meta2, Object data2 ) throws KettleValueException { switch ( getType() ) { case TYPE_STRING: return meta2.getCompatibleString( data2 ); case TYPE_NUMBER: return meta2.getNumber( data2 ); case TYPE_INTEGER: return meta2.getInteger( data2 ); case TYPE_DATE: return meta2.getDate( data2 ); case TYPE_BIGNUMBER: return meta2.getBigNumber( data2 ); case TYPE_BOOLEAN: return meta2.getBoolean( data2 ); case TYPE_BINARY: return meta2.getBinary( data2 ); default: throw new KettleValueException( toString() + " : I can't convert the specified value to data type : " + getType() ); } } /** * Convert an object to the data type specified in the conversion metadata * * @param data * The data * @return The data converted to the storage data type * @throws KettleValueException * in case there is a conversion error. */ @Override public Object convertDataUsingConversionMetaData( Object data ) throws KettleValueException { if ( conversionMetadata == null ) { throw new KettleValueException( "API coding error: please specify the conversion metadata before attempting to convert value " + name ); } // Suppose we have an Integer 123, length 5 // The string variation of this is " 00123" // To convert this back to an Integer we use the storage metadata // Specifically, in method convertStringToInteger() we consult the // storageMetaData to get the correct conversion mask // That way we're always sure that a conversion works both ways. // switch ( conversionMetadata.getType() ) { case TYPE_STRING: return getString( data ); case TYPE_INTEGER: return getInteger( data ); case TYPE_NUMBER: return getNumber( data ); case TYPE_DATE: return getDate( data ); case TYPE_BIGNUMBER: return getBigNumber( data ); case TYPE_BOOLEAN: return getBoolean( data ); case TYPE_BINARY: return getBinary( data ); default: throw new KettleValueException( toString() + " : I can't convert the specified value to data type : " + storageMetadata.getType() ); } } /** * Convert the specified string to the data type specified in this object. * * @param pol * the string to be converted * @param convertMeta * the metadata of the object (only string type) to be converted * @param nullIf * set the object to null if pos equals nullif (IgnoreCase) * @param ifNull * set the object to ifNull when pol is empty or null * @param trim_type * the trim type to be used (ValueMetaInterface.TRIM_TYPE_XXX) * @return the object in the data type of this value metadata object * @throws KettleValueException * in case there is a data conversion error */ @Override public Object convertDataFromString( String pol, ValueMetaInterface convertMeta, String nullIf, String ifNull, int trim_type ) throws KettleValueException { if ( convertMeta == null ) { throw new KettleValueException( "API coding error: convertMeta input parameter should not be equals to null" ); } // null handling and conversion of value to null // String null_value = nullIf; int inValueType = convertMeta.getType(); int outValueType = getType(); if ( null_value == null ) { switch ( inValueType ) { case ValueMetaInterface.TYPE_BOOLEAN: null_value = Const.NULL_BOOLEAN; break; case ValueMetaInterface.TYPE_STRING: null_value = Const.NULL_STRING; break; case ValueMetaInterface.TYPE_BIGNUMBER: null_value = Const.NULL_BIGNUMBER; break; case ValueMetaInterface.TYPE_NUMBER: null_value = Const.NULL_NUMBER; break; case ValueMetaInterface.TYPE_INTEGER: null_value = Const.NULL_INTEGER; break; case ValueMetaInterface.TYPE_DATE: null_value = Const.NULL_DATE; break; case ValueMetaInterface.TYPE_BINARY: null_value = Const.NULL_BINARY; break; default: null_value = Const.NULL_NONE; break; } } // See if we need to convert a null value into a String // For example, we might want to convert null into "Empty". // if ( !Const.isEmpty( ifNull ) ) { // Note that you can't pull the pad method up here as a nullComp variable // because you could get an NPE since you haven't checked isEmpty(pol) // yet! if ( Const.isEmpty( pol ) || pol.equalsIgnoreCase( Const.rightPad( new StringBuilder( null_value ), pol.length() ) ) ) { pol = ifNull; } } // See if the polled value is empty // In that case, we have a null value on our hands... // Object emptyValue = ( outValueType == Value.VALUE_TYPE_STRING ) ? Const.NULL_STRING : null; if ( pol == null ) { return null; } else if ( Const.isEmpty( pol ) && outValueType != Value.VALUE_TYPE_STRING ) { return null; } else { // if the null_value is specified, we try to match with that. // if ( !Const.isEmpty( null_value ) ) { if ( null_value.length() <= pol.length() ) { // If the polled value is equal to the spaces right-padded null_value, // we have a match // if ( pol.equalsIgnoreCase( Const.rightPad( new StringBuilder( null_value ), pol.length() ) ) ) { return emptyValue; } } } else { // Verify if there are only spaces in the polled value... // We consider that empty as well... // if ( Const.onlySpaces( pol ) ) { return emptyValue; } } } // Trimming StringBuilder strpol; switch ( trim_type ) { case ValueMetaInterface.TRIM_TYPE_LEFT: strpol = new StringBuilder( pol ); while ( strpol.length() > 0 && strpol.charAt( 0 ) == ' ' ) { strpol.deleteCharAt( 0 ); } pol = strpol.toString(); break; case ValueMetaInterface.TRIM_TYPE_RIGHT: strpol = new StringBuilder( pol ); while ( strpol.length() > 0 && strpol.charAt( strpol.length() - 1 ) == ' ' ) { strpol.deleteCharAt( strpol.length() - 1 ); } pol = strpol.toString(); break; case ValueMetaInterface.TRIM_TYPE_BOTH: strpol = new StringBuilder( pol ); while ( strpol.length() > 0 && strpol.charAt( 0 ) == ' ' ) { strpol.deleteCharAt( 0 ); } while ( strpol.length() > 0 && strpol.charAt( strpol.length() - 1 ) == ' ' ) { strpol.deleteCharAt( strpol.length() - 1 ); } pol = strpol.toString(); break; default: break; } // On with the regular program... // Simply call the ValueMeta routines to do the conversion // We need to do some effort here: copy all // return convertData( convertMeta, pol ); } /** * Calculate the hashcode of the specified data object * * @param object * the data value to calculate a hashcode for * @return the calculated hashcode * @throws KettleValueException */ @Override public int hashCode( Object object ) throws KettleValueException { int hash = 0; if ( isNull( object ) ) { switch ( getType() ) { case TYPE_BOOLEAN: hash ^= 1; break; case TYPE_DATE: hash ^= 2; break; case TYPE_NUMBER: hash ^= 4; break; case TYPE_STRING: hash ^= 8; break; case TYPE_INTEGER: hash ^= 16; break; case TYPE_BIGNUMBER: hash ^= 32; break; case TYPE_NONE: break; default: break; } } else { switch ( getType() ) { case TYPE_BOOLEAN: hash ^= getBoolean( object ).hashCode(); break; case TYPE_DATE: hash ^= getDate( object ).hashCode(); break; case TYPE_INTEGER: hash ^= getInteger( object ).hashCode(); break; case TYPE_NUMBER: hash ^= getNumber( object ).hashCode(); break; case TYPE_STRING: hash ^= getString( object ).hashCode(); break; case TYPE_BIGNUMBER: hash ^= getBigNumber( object ).hashCode(); break; case TYPE_NONE: break; default: break; } } return hash; } /** * Create an old-style value for backward compatibility reasons * * @param data * the data to store in the value * @return a newly created Value object * @throws KettleValueException * case there is a data conversion problem */ @Override public Value createOriginalValue( Object data ) throws KettleValueException { Value value = new Value( name, type ); value.setLength( length, precision ); if ( isNull( data ) ) { value.setNull(); } else { switch ( value.getType() ) { case TYPE_STRING: value.setValue( getString( data ) ); break; case TYPE_NUMBER: value.setValue( getNumber( data ).doubleValue() ); break; case TYPE_INTEGER: value.setValue( getInteger( data ).longValue() ); break; case TYPE_DATE: value.setValue( getDate( data ) ); break; case TYPE_BOOLEAN: value.setValue( getBoolean( data ).booleanValue() ); break; case TYPE_BIGNUMBER: value.setValue( getBigNumber( data ) ); break; case TYPE_BINARY: value.setValue( getBinary( data ) ); break; default: throw new KettleValueException( toString() + " : We can't convert data type " + getTypeDesc() + " to an original (V2) Value" ); } } return value; } /** * Extracts the primitive data from an old style Value object * * @param value * the old style Value object * @return the value's data, NOT the meta data. * @throws KettleValueException * case there is a data conversion problem */ @Override public Object getValueData( Value value ) throws KettleValueException { if ( value == null || value.isNull() ) { return null; } // So far the old types and the new types map to the same thing. // For compatibility we just ask the old-style value to convert to the new // one. // In the old transformation this would happen sooner or later anyway. // It doesn't throw exceptions or complain either (unfortunately). // switch ( getType() ) { case ValueMetaInterface.TYPE_STRING: return value.getString(); case ValueMetaInterface.TYPE_NUMBER: return value.getNumber(); case ValueMetaInterface.TYPE_INTEGER: return value.getInteger(); case ValueMetaInterface.TYPE_DATE: return value.getDate(); case ValueMetaInterface.TYPE_BOOLEAN: return value.getBoolean(); case ValueMetaInterface.TYPE_BIGNUMBER: return value.getBigNumber(); case ValueMetaInterface.TYPE_BINARY: return value.getBytes(); default: throw new KettleValueException( toString() + " : We can't convert original data type " + value.getTypeDesc() + " to a primitive data type" ); } } /** * @return the storageMetadata */ @Override public ValueMetaInterface getStorageMetadata() { return storageMetadata; } /** * @param storageMetadata * the storageMetadata to set */ @Override public void setStorageMetadata( ValueMetaInterface storageMetadata ) { this.storageMetadata = storageMetadata; compareStorageAndActualFormat(); } protected void compareStorageAndActualFormat() { if ( storageMetadata == null ) { identicalFormat = true; } else { // If a trim type is set, we need to at least try to trim the strings. // In that case, we have to set the identical format off. // if ( trimType != TRIM_TYPE_NONE ) { identicalFormat = false; } else { // If there is a string encoding set and it's the same encoding in the // binary string, then we don't have to convert // If there are no encodings set, then we're certain we don't have to // convert as well. // if ( getStringEncoding() != null && getStringEncoding().equals( storageMetadata.getStringEncoding() ) || getStringEncoding() == null && storageMetadata.getStringEncoding() == null ) { // However, perhaps the conversion mask changed since we read the // binary string? // The output can be different from the input. If the mask is // different, we need to do conversions. // Otherwise, we can just ignore it... // if ( isDate() ) { if ( ( getConversionMask() != null && getConversionMask().equals( storageMetadata.getConversionMask() ) ) || ( getConversionMask() == null && storageMetadata.getConversionMask() == null ) ) { identicalFormat = true; } else { identicalFormat = false; } } else if ( isNumeric() ) { // Check the lengths first // if ( getLength() != storageMetadata.getLength() ) { identicalFormat = false; } else if ( getPrecision() != storageMetadata.getPrecision() ) { identicalFormat = false; } else if ( ( getConversionMask() != null && getConversionMask().equals( storageMetadata.getConversionMask() ) || ( getConversionMask() == null && storageMetadata .getConversionMask() == null ) ) ) { // For the same reasons as above, if the conversion mask, the // decimal or the grouping symbol changes // we need to convert from the binary strings to the target data // type and then back to a string in the required format. // if ( ( getGroupingSymbol() != null && getGroupingSymbol().equals( storageMetadata.getGroupingSymbol() ) ) || ( getConversionMask() == null && storageMetadata.getConversionMask() == null ) ) { if ( ( getDecimalFormat( false ) != null && getDecimalFormat( false ).equals( storageMetadata.getDecimalFormat( false ) ) ) || ( getDecimalFormat( false ) == null && storageMetadata.getDecimalFormat( false ) == null ) ) { identicalFormat = true; } else { identicalFormat = false; } } else { identicalFormat = false; } } else { identicalFormat = false; } } } } } } /** * @return the trimType */ @Override public int getTrimType() { return trimType; } /** * @param trimType * the trimType to set */ @Override public void setTrimType( int trimType ) { this.trimType = trimType; } public static final int getTrimTypeByCode( String tt ) { if ( tt == null ) { return 0; } for ( int i = 0; i < trimTypeCode.length; i++ ) { if ( trimTypeCode[i].equalsIgnoreCase( tt ) ) { return i; } } return 0; } public static final int getTrimTypeByDesc( String tt ) { if ( tt == null ) { return 0; } for ( int i = 0; i < trimTypeDesc.length; i++ ) { if ( trimTypeDesc[i].equalsIgnoreCase( tt ) ) { return i; } } // If this fails, try to match using the code. return getTrimTypeByCode( tt ); } public static final String getTrimTypeCode( int i ) { if ( i < 0 || i >= trimTypeCode.length ) { return trimTypeCode[0]; } return trimTypeCode[i]; } public static final String getTrimTypeDesc( int i ) { if ( i < 0 || i >= trimTypeDesc.length ) { return trimTypeDesc[0]; } return trimTypeDesc[i]; } /** * @return the conversionMetadata */ @Override public ValueMetaInterface getConversionMetadata() { return conversionMetadata; } /** * @param conversionMetadata * the conversionMetadata to set */ @Override public void setConversionMetadata( ValueMetaInterface conversionMetadata ) { this.conversionMetadata = conversionMetadata; } /** * @return true if the String encoding used (storage) is single byte encoded. */ @Override public boolean isSingleByteEncoding() { return singleByteEncoding; } /** * @return the number of binary string to native data type conversions done with this object conversions */ @Override public long getNumberOfBinaryStringConversions() { return numberOfBinaryStringConversions; } /** * @param numberOfBinaryStringConversions * the number of binary string to native data type done with this object conversions to set */ @Override public void setNumberOfBinaryStringConversions( long numberOfBinaryStringConversions ) { this.numberOfBinaryStringConversions = numberOfBinaryStringConversions; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#isAutoIncrement() */ @Override public boolean isOriginalAutoIncrement() { return originalAutoIncrement; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#setAutoIncrement(boolean) */ @Override public void setOriginalAutoIncrement( boolean originalAutoIncrement ) { this.originalAutoIncrement = originalAutoIncrement; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#getColumnType() */ @Override public int getOriginalColumnType() { return originalColumnType; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#setColumnType(int) */ @Override public void setOriginalColumnType( int originalColumnType ) { this.originalColumnType = originalColumnType; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#getColumnTypeName() */ @Override public String getOriginalColumnTypeName() { return originalColumnTypeName; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#setColumnTypeName(java.lang.String) */ @Override public void setOriginalColumnTypeName( String originalColumnTypeName ) { this.originalColumnTypeName = originalColumnTypeName; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#isNullable() */ @Override public int isOriginalNullable() { return originalNullable; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#setNullable(int) */ @Override public void setOriginalNullable( int originalNullable ) { this.originalNullable = originalNullable; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#getPrecision() */ @Override public int getOriginalPrecision() { return originalPrecision; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#setPrecision(int) */ @Override public void setOriginalPrecision( int originalPrecision ) { this.originalPrecision = originalPrecision; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#getScale() */ @Override public int getOriginalScale() { return originalScale; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#setScale(int) */ @Override public void setOriginalScale( int originalScale ) { this.originalScale = originalScale; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#isSigned() */ @Override public boolean isOriginalSigned() { return originalSigned; } /* * Original JDBC RecordSetMetaData * * @see java.sql.ResultSetMetaData#setOriginalSigned(boolean) */ @Override public void setOriginalSigned( boolean originalSigned ) { this.originalSigned = originalSigned; } /** * @return the bigNumberFormatting flag : true if BigNumbers of formatted as well */ public boolean isBigNumberFormatting() { return bigNumberFormatting; } /** * @param bigNumberFormatting * the bigNumberFormatting flag to set : true if BigNumbers of formatted as well */ public void setBigNumberFormatting( boolean bigNumberFormatting ) { this.bigNumberFormatting = bigNumberFormatting; } /** * @return The available trim type codes (NOT localized, use for persistence) */ public static String[] getTrimTypeCodes() { return trimTypeCode; } /** * @return The available trim type descriptions (localized) */ public static String[] getTrimTypeDescriptions() { return trimTypeDesc; } @Override public boolean requiresRealClone() { return type == TYPE_BINARY || type == TYPE_SERIALIZABLE; } /** * @return the lenientStringToNumber */ @Override public boolean isLenientStringToNumber() { return lenientStringToNumber; } /** * @param lenientStringToNumber * the lenientStringToNumber to set */ @Override public void setLenientStringToNumber( boolean lenientStringToNumber ) { this.lenientStringToNumber = lenientStringToNumber; } /** * @return the date format time zone */ @Override public TimeZone getDateFormatTimeZone() { return dateFormatTimeZone; } /** * @param dateFormatTimeZone * the date format time zone to set */ @Override public void setDateFormatTimeZone( TimeZone dateFormatTimeZone ) { this.dateFormatTimeZone = dateFormatTimeZone; dateFormatChanged = true; } @Override public void drawValue( PrimitiveGCInterface gc, Object value ) throws KettleValueException { // Just draw the string by default. // gc.drawText( getString( value ), 0, 0 ); } @SuppressWarnings( "fallthrough" ) @Override public ValueMetaInterface getValueFromSQLType( DatabaseMeta databaseMeta, String name, java.sql.ResultSetMetaData rm, int index, boolean ignoreLength, boolean lazyConversion ) throws KettleDatabaseException { try { int length = -1; int precision = -1; int valtype = ValueMetaInterface.TYPE_NONE; boolean isClob = false; int type = rm.getColumnType( index ); boolean signed = rm.isSigned( index ); switch ( type ) { case java.sql.Types.CHAR: case java.sql.Types.VARCHAR: case java.sql.Types.NVARCHAR: case java.sql.Types.LONGVARCHAR: // Character Large Object valtype = ValueMetaInterface.TYPE_STRING; if ( !ignoreLength ) { length = rm.getColumnDisplaySize( index ); } break; case java.sql.Types.CLOB: case java.sql.Types.NCLOB: valtype = ValueMetaInterface.TYPE_STRING; length = DatabaseMeta.CLOB_LENGTH; isClob = true; break; case java.sql.Types.BIGINT: // verify Unsigned BIGINT overflow! // if ( signed ) { valtype = ValueMetaInterface.TYPE_INTEGER; precision = 0; // Max 9.223.372.036.854.775.807 length = 15; } else { valtype = ValueMetaInterface.TYPE_BIGNUMBER; precision = 0; // Max 18.446.744.073.709.551.615 length = 16; } break; case java.sql.Types.INTEGER: valtype = ValueMetaInterface.TYPE_INTEGER; precision = 0; // Max 2.147.483.647 length = 9; break; case java.sql.Types.SMALLINT: valtype = ValueMetaInterface.TYPE_INTEGER; precision = 0; // Max 32.767 length = 4; break; case java.sql.Types.TINYINT: valtype = ValueMetaInterface.TYPE_INTEGER; precision = 0; // Max 127 length = 2; break; case java.sql.Types.DECIMAL: case java.sql.Types.DOUBLE: case java.sql.Types.FLOAT: case java.sql.Types.REAL: case java.sql.Types.NUMERIC: valtype = ValueMetaInterface.TYPE_NUMBER; length = rm.getPrecision( index ); precision = rm.getScale( index ); if ( length >= 126 ) { length = -1; } if ( precision >= 126 ) { precision = -1; } if ( type == java.sql.Types.DOUBLE || type == java.sql.Types.FLOAT || type == java.sql.Types.REAL ) { if ( precision == 0 ) { precision = -1; // precision is obviously incorrect if the type if // Double/Float/Real } // If we're dealing with PostgreSQL and double precision types if ( databaseMeta.getDatabaseInterface() instanceof PostgreSQLDatabaseMeta && type == java.sql.Types.DOUBLE && precision >= 16 && length >= 16 ) { precision = -1; length = -1; } // MySQL: max resolution is double precision floating point (double) // The (12,31) that is given back is not correct if ( databaseMeta.getDatabaseInterface() instanceof MySQLDatabaseMeta ) { if ( precision >= length ) { precision = -1; length = -1; } } // if the length or precision needs a BIGNUMBER if ( length > 15 || precision > 15 ) { valtype = ValueMetaInterface.TYPE_BIGNUMBER; } } else { if ( precision == 0 ) { if ( length <= 18 && length > 0 ) { // Among others Oracle is affected // here. valtype = ValueMetaInterface.TYPE_INTEGER; // Long can hold up to 18 // significant digits } else if ( length > 18 ) { valtype = ValueMetaInterface.TYPE_BIGNUMBER; } } else { // we have a precision: keep NUMBER or change to BIGNUMBER? if ( length > 15 || precision > 15 ) { valtype = ValueMetaInterface.TYPE_BIGNUMBER; } } } if ( databaseMeta.getDatabaseInterface() instanceof PostgreSQLDatabaseMeta || databaseMeta.getDatabaseInterface() instanceof GreenplumDatabaseMeta ) { // undefined size => arbitrary precision if ( type == java.sql.Types.NUMERIC && length == 0 && precision == 0 ) { valtype = ValueMetaInterface.TYPE_BIGNUMBER; length = -1; precision = -1; } } if ( databaseMeta.getDatabaseInterface() instanceof OracleDatabaseMeta ) { if ( precision == 0 && length == 38 ) { valtype = ValueMetaInterface.TYPE_INTEGER; } if ( precision <= 0 && length <= 0 ) { // undefined size: BIGNUMBER, // precision on Oracle can be 38, too // big for a Number type valtype = ValueMetaInterface.TYPE_BIGNUMBER; length = -1; precision = -1; } } break; case java.sql.Types.TIMESTAMP: if ( databaseMeta.supportsTimestampDataType() ) { valtype = ValueMetaInterface.TYPE_TIMESTAMP; length = rm.getScale( index ); } break; case java.sql.Types.DATE: if ( databaseMeta.getDatabaseInterface() instanceof TeradataDatabaseMeta ) { precision = 1; } case java.sql.Types.TIME: valtype = ValueMetaInterface.TYPE_DATE; // if ( databaseMeta.getDatabaseInterface() instanceof MySQLDatabaseMeta ) { String property = databaseMeta.getConnectionProperties().getProperty( "yearIsDateType" ); if ( property != null && property.equalsIgnoreCase( "false" ) && rm.getColumnTypeName( index ).equalsIgnoreCase( "YEAR" ) ) { valtype = ValueMetaInterface.TYPE_INTEGER; precision = 0; length = 4; break; } } break; case java.sql.Types.BOOLEAN: case java.sql.Types.BIT: valtype = ValueMetaInterface.TYPE_BOOLEAN; break; case java.sql.Types.BINARY: case java.sql.Types.BLOB: case java.sql.Types.VARBINARY: case java.sql.Types.LONGVARBINARY: valtype = ValueMetaInterface.TYPE_BINARY; if ( databaseMeta.isDisplaySizeTwiceThePrecision() && ( 2 * rm.getPrecision( index ) ) == rm.getColumnDisplaySize( index ) ) { // set the length for "CHAR(X) FOR BIT DATA" length = rm.getPrecision( index ); } else if ( ( databaseMeta.getDatabaseInterface() instanceof OracleDatabaseMeta ) && ( type == java.sql.Types.VARBINARY || type == java.sql.Types.LONGVARBINARY ) ) { // set the length for Oracle "RAW" or "LONGRAW" data types valtype = ValueMetaInterface.TYPE_STRING; length = rm.getColumnDisplaySize( index ); } else if ( databaseMeta.isMySQLVariant() && ( type == java.sql.Types.VARBINARY || type == java.sql.Types.LONGVARBINARY ) ) { // set the data type to String, see PDI-4812 valtype = ValueMetaInterface.TYPE_STRING; // PDI-6677 - don't call 'length = rm.getColumnDisplaySize(index);' length = -1; // keep the length to -1, e.g. for string functions (e.g. // CONCAT see PDI-4812) } else if ( databaseMeta.getDatabaseInterface() instanceof SQLiteDatabaseMeta ) { valtype = ValueMetaInterface.TYPE_STRING; } else { length = -1; } precision = -1; break; default: valtype = ValueMetaInterface.TYPE_STRING; precision = rm.getScale( index ); break; } ValueMetaInterface v = ValueMetaFactory.createValueMeta( name, valtype ); v.setLength( length ); v.setPrecision( precision ); v.setLargeTextField( isClob ); getOriginalColumnMetadata( v, rm, index, ignoreLength ); // See if we need to enable lazy conversion... // if ( lazyConversion && valtype == ValueMetaInterface.TYPE_STRING ) { v.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); // TODO set some encoding to go with this. // Also set the storage metadata. a copy of the parent, set to String too. // try { ValueMetaInterface storageMetaData = ValueMetaFactory.cloneValueMeta( v, ValueMetaInterface.TYPE_STRING ); storageMetaData.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); v.setStorageMetadata( storageMetaData ); } catch ( Exception e ) { throw new SQLException( e ); } } ValueMetaInterface newV = null; try { newV = databaseMeta.getDatabaseInterface().customizeValueFromSQLType( v, rm, index ); } catch ( SQLException e ) { throw new SQLException( e ); } return newV == null ? v : newV; } catch ( Exception e ) { throw new KettleDatabaseException( "Error determining value metadata from SQL resultset metadata", e ); } } protected void getOriginalColumnMetadata( ValueMetaInterface v, ResultSetMetaData rm, int index, boolean ignoreLength ) throws SQLException { // Grab the comment as a description to the field as well. String comments = rm.getColumnLabel( index ); v.setComments( comments ); // get & store more result set meta data for later use int originalColumnType = rm.getColumnType( index ); v.setOriginalColumnType( originalColumnType ); String originalColumnTypeName = rm.getColumnTypeName( index ); v.setOriginalColumnTypeName( originalColumnTypeName ); int originalPrecision = -1; if ( !ignoreLength ) { // Throws exception on MySQL originalPrecision = rm.getPrecision( index ); } v.setOriginalPrecision( originalPrecision ); int originalScale = rm.getScale( index ); v.setOriginalScale( originalScale ); // DISABLED FOR PERFORMANCE REASONS : PDI-1788 // // boolean originalAutoIncrement=rm.isAutoIncrement(index); DISABLED FOR // PERFORMANCE REASONS : PDI-1788 // v.setOriginalAutoIncrement(originalAutoIncrement); // int originalNullable=rm.isNullable(index); DISABLED FOR PERFORMANCE // REASONS : PDI-1788 // v.setOriginalNullable(originalNullable); // boolean originalSigned = rm.isSigned( index ); v.setOriginalSigned( originalSigned ); } /** * Get a value from a result set column based on the current value metadata * * @param databaseInterface * the database metadata to use * @param resultSet * The JDBC result set to read from * @param index * The column index (1-based) * @return The Kettle native data type based on the value metadata * @throws KettleDatabaseException * in case something goes wrong. */ @Override public Object getValueFromResultSet( DatabaseInterface databaseInterface, ResultSet resultSet, int index ) throws KettleDatabaseException { try { Object data = null; switch ( getType() ) { case ValueMetaInterface.TYPE_BOOLEAN: data = Boolean.valueOf( resultSet.getBoolean( index + 1 ) ); break; case ValueMetaInterface.TYPE_NUMBER: data = new Double( resultSet.getDouble( index + 1 ) ); break; case ValueMetaInterface.TYPE_BIGNUMBER: data = resultSet.getBigDecimal( index + 1 ); break; case ValueMetaInterface.TYPE_INTEGER: data = Long.valueOf( resultSet.getLong( index + 1 ) ); break; case ValueMetaInterface.TYPE_STRING: if ( isStorageBinaryString() ) { data = resultSet.getBytes( index + 1 ); } else { data = resultSet.getString( index + 1 ); } break; case ValueMetaInterface.TYPE_BINARY: if ( databaseInterface.supportsGetBlob() ) { Blob blob = resultSet.getBlob( index + 1 ); if ( blob != null ) { data = blob.getBytes( 1L, (int) blob.length() ); } else { data = null; } } else { data = resultSet.getBytes( index + 1 ); } break; case ValueMetaInterface.TYPE_DATE: if ( getPrecision() != 1 && databaseInterface.supportsTimeStampToDateConversion() ) { data = resultSet.getTimestamp( index + 1 ); break; // Timestamp extends java.util.Date } else if ( databaseInterface instanceof NetezzaDatabaseMeta ) { // PDI-10877 workaround for IBM netezza jdbc 'special' implementation data = getNetezzaDateValueWorkaround( databaseInterface, resultSet, index + 1 ); break; } else { data = resultSet.getDate( index + 1 ); break; } default: break; } if ( resultSet.wasNull() ) { data = null; } return data; } catch ( SQLException e ) { throw new KettleDatabaseException( "Unable to get value '" + toStringMeta() + "' from database resultset, index " + index, e ); } } private Object getNetezzaDateValueWorkaround( DatabaseInterface databaseInterface, ResultSet resultSet, int index ) throws SQLException, KettleDatabaseException { Object data = null; int type = resultSet.getMetaData().getColumnType( index ); switch ( type ) { case Types.TIME: { data = resultSet.getTime( index ); break; } default: { data = resultSet.getDate( index ); } } return data; } @Override public void setPreparedStatementValue( DatabaseMeta databaseMeta, PreparedStatement preparedStatement, int index, Object data ) throws KettleDatabaseException { try { switch ( getType() ) { case ValueMetaInterface.TYPE_NUMBER: if ( !isNull( data ) ) { double num = getNumber( data ).doubleValue(); if ( databaseMeta.supportsFloatRoundingOnUpdate() && getPrecision() >= 0 ) { num = Const.round( num, getPrecision() ); } preparedStatement.setDouble( index, num ); } else { preparedStatement.setNull( index, java.sql.Types.DOUBLE ); } break; case ValueMetaInterface.TYPE_INTEGER: if ( !isNull( data ) ) { if ( databaseMeta.supportsSetLong() ) { preparedStatement.setLong( index, getInteger( data ).longValue() ); } else { double d = getNumber( data ).doubleValue(); if ( databaseMeta.supportsFloatRoundingOnUpdate() && getPrecision() >= 0 ) { preparedStatement.setDouble( index, d ); } else { preparedStatement.setDouble( index, Const.round( d, getPrecision() ) ); } } } else { preparedStatement.setNull( index, java.sql.Types.INTEGER ); } break; case ValueMetaInterface.TYPE_STRING: if ( getLength() < databaseMeta.getMaxTextFieldLength() ) { if ( !isNull( data ) ) { preparedStatement.setString( index, getString( data ) ); } else { preparedStatement.setNull( index, java.sql.Types.VARCHAR ); } } else { if ( !isNull( data ) ) { String string = getString( data ); int maxlen = databaseMeta.getMaxTextFieldLength(); int len = string.length(); // Take the last maxlen characters of the string... int begin = Math.max( len - maxlen, 0 ); if ( begin > 0 ) { // Truncate if logging result if it exceeds database maximum string field length log.logMinimal( String.format( "Truncating %d symbols of original message in '%s' field", begin, getName() ) ); string = string.substring( begin ); } if ( databaseMeta.supportsSetCharacterStream() ) { StringReader sr = new StringReader( string ); preparedStatement.setCharacterStream( index, sr, string.length() ); } else { preparedStatement.setString( index, string ); } } else { preparedStatement.setNull( index, java.sql.Types.VARCHAR ); } } break; case ValueMetaInterface.TYPE_DATE: if ( !isNull( data ) ) { // Environment variable to disable timezone setting for the database updates // When it is set, timezone will not be taken into account and the value will be converted // into the local java timezone if ( getPrecision() == 1 || !databaseMeta.supportsTimeStampToDateConversion() ) { // Convert to DATE! long dat = getInteger( data ).longValue(); // converts using Date.getTime() java.sql.Date ddate = new java.sql.Date( dat ); if ( ignoreTimezone || this.getDateFormatTimeZone() == null ) { preparedStatement.setDate( index, ddate ); } else { preparedStatement.setDate( index, ddate, Calendar.getInstance( this.getDateFormatTimeZone() ) ); } } else { if ( data instanceof java.sql.Timestamp ) { // Preserve ns precision! // if ( ignoreTimezone || this.getDateFormatTimeZone() == null ) { preparedStatement.setTimestamp( index, (java.sql.Timestamp) data ); } else { preparedStatement.setTimestamp( index, (java.sql.Timestamp) data, Calendar.getInstance( this .getDateFormatTimeZone() ) ); } } else { long dat = getInteger( data ).longValue(); // converts using Date.getTime() java.sql.Timestamp sdate = new java.sql.Timestamp( dat ); if ( ignoreTimezone || this.getDateFormatTimeZone() == null ) { preparedStatement.setTimestamp( index, sdate ); } else { preparedStatement.setTimestamp( index, sdate, Calendar.getInstance( this.getDateFormatTimeZone() ) ); } } } } else { if ( getPrecision() == 1 || !databaseMeta.supportsTimeStampToDateConversion() ) { preparedStatement.setNull( index, java.sql.Types.DATE ); } else { preparedStatement.setNull( index, java.sql.Types.TIMESTAMP ); } } break; case ValueMetaInterface.TYPE_BOOLEAN: if ( databaseMeta.supportsBooleanDataType() ) { if ( !isNull( data ) ) { preparedStatement.setBoolean( index, getBoolean( data ).booleanValue() ); } else { preparedStatement.setNull( index, java.sql.Types.BOOLEAN ); } } else { if ( !isNull( data ) ) { preparedStatement.setString( index, getBoolean( data ).booleanValue() ? "Y" : "N" ); } else { preparedStatement.setNull( index, java.sql.Types.CHAR ); } } break; case ValueMetaInterface.TYPE_BIGNUMBER: if ( !isNull( data ) ) { preparedStatement.setBigDecimal( index, getBigNumber( data ) ); } else { preparedStatement.setNull( index, java.sql.Types.DECIMAL ); } break; case ValueMetaInterface.TYPE_BINARY: if ( !isNull( data ) ) { preparedStatement.setBytes( index, getBinary( data ) ); } else { preparedStatement.setNull( index, java.sql.Types.BINARY ); } break; default: // placeholder preparedStatement.setNull( index, java.sql.Types.VARCHAR ); break; } } catch ( Exception e ) { throw new KettleDatabaseException( "Error setting value #" + index + " [" + toStringMeta() + "] on prepared statement", e ); } } @Override public Object getNativeDataType( Object object ) throws KettleValueException { switch ( getStorageType() ) { case STORAGE_TYPE_BINARY_STRING: return convertBinaryStringToNativeType( (byte[]) object ); case STORAGE_TYPE_INDEXED: return index[(Integer) object]; case STORAGE_TYPE_NORMAL: default: return object; } } @Override public String getDatabaseColumnTypeDefinition( DatabaseInterface databaseInterface, String tk, String pk, boolean use_autoinc, boolean add_fieldname, boolean add_cr ) { return null; // No default suggestions... } protected int getQuotesBeforeSymbol( String df, String symbols ) { int quotes = 0; int stopPos = df.indexOf( symbols ); if ( stopPos > 0 ) { int curPos = -1; do { curPos = df.indexOf( "'", curPos + 1 ); if ( curPos >= 0 && curPos < stopPos ) { quotes++; } } while ( curPos >= 0 && curPos < stopPos ); } return quotes; } public Class<?> getNativeDataTypeClass() throws KettleValueException { // Not implemented for base class throw new KettleValueException( getTypeDesc() + " does not implement this method" ); } }
{ "content_hash": "f3ce786c75bb4ae3f632bc29e8dca946", "timestamp": "", "source": "github", "line_count": 4911, "max_line_length": 136, "avg_line_length": 34.80370596619833, "alnum_prop": 0.5988614623129984, "repo_name": "IvanNikolaychuk/pentaho-kettle", "id": "6ee1f535fc56b7bf504bc7c3ab7df490d403e4f6", "size": "171825", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "core/src/org/pentaho/di/core/row/value/ValueMetaBase.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "14028" }, { "name": "CSS", "bytes": "30172" }, { "name": "GAP", "bytes": "4005" }, { "name": "HTML", "bytes": "86007" }, { "name": "Java", "bytes": "38287417" }, { "name": "JavaScript", "bytes": "41517" }, { "name": "Shell", "bytes": "18925" }, { "name": "XSLT", "bytes": "5600" } ], "symlink_target": "" }
module.exports = { /** * Find the first element (partially) matches text from an array */ findFirstElement: function(arr, text, caseSensitive) { if (!arr || arr.length == 0 || !text || text === '') { return -1; } var compareLength = text.length; for (var arrIter = 0; arrIter < arr.length; arrIter++) { // If current item's first letter matches with target's first letter var firstLetterFromItem = caseSensitive ? arr[arrIter][0] : arr[arrIter][0].toLowerCase(); var firstLetterFromTarget = caseSensitive ? text[0] : text[0].toLowerCase(); if (firstLetterFromItem === firstLetterFromTarget) { // We got one match (first letter) var matchedCount = 1; // Checking following letters are match or not for ( var textIter = 1; // It only need to iterate "Math.min(text.length, word.length)" times textIter < Math.min(compareLength, arr[arrIter].length); textIter++ ) { var currentLetterFromItem = caseSensitive ? arr[arrIter][textIter] : arr[arrIter][textIter].toLowerCase(); var currentLetterFromTarget = caseSensitive ? text[textIter] : text[textIter].toLowerCase(); if (currentLetterFromItem === currentLetterFromTarget) matchedCount++; } // If current item matches all given letters, returns current index if (matchedCount === compareLength) return arrIter; } } return -1; }, /** * Get N items from an array, start from startIndex */ getItemsFromArray: function(arr, startIndex, length) { var resultArray = []; for (var c = 0; c < length; c++) { resultArray.push(arr[startIndex + c]); } return resultArray; } };
{ "content_hash": "99859cac4b4822363e96e2b07a64d600", "timestamp": "", "source": "github", "line_count": 52, "max_line_length": 80, "avg_line_length": 35.19230769230769, "alnum_prop": 0.5972677595628415, "repo_name": "ajhsu/autocomplete", "id": "71e1678ecf528f41223154e0d3b21caba7b53d16", "size": "1830", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/utils.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1100" }, { "name": "HTML", "bytes": "864" }, { "name": "JavaScript", "bytes": "5738166" } ], "symlink_target": "" }
module.exports = require('./lib/rpc')
{ "content_hash": "7490f0812e23ce94a6bf5e02dd91d167", "timestamp": "", "source": "github", "line_count": 1, "max_line_length": 37, "avg_line_length": 38, "alnum_prop": 0.6842105263157895, "repo_name": "xmpp-ftw/xmpp-ftw-rpc", "id": "57780a6485c4cd7ad23d4ecc5cf59da8f95e495a", "size": "38", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "index.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "JavaScript", "bytes": "56716" } ], "symlink_target": "" }
require 'set' class String LETTERS = ('a'..'z').to_a # Finds and returns an array that links the current word to _dest_word_, # where each link in the chain is a word that differs from the previous # only by a single character. # # The _visitation_map_ parameter is a hash containing all legal words as # keys, and that should be initialized with the values mapping to the # deepest depth allowable. def chain_to( dest_word, visitation_map, depth=1 ) return nil if depth > $max_length # Find variations on myself which haven't been reached by a shorter path # and update the visitation map at the same time links = Set.new 0.upto( self.length-1 ){ |i| old_char = self[ i ] LETTERS.each{ |new_char| if new_char != old_char test_word = self.dup test_word[ i ] = new_char #Following returns nil if the word isn't in the dictionary shortest_path = visitation_map[ test_word ] if shortest_path && shortest_path > depth #I've gotten to this word faster than anyone else #Put my score in the high score board, and use this word again visitation_map[ test_word ] = depth links << test_word end end } } path_from_me = nil if links.include?( dest_word ) #Sweet, I have a direct route! path_from_me = [ self ] else links.each{ |test_word| path = test_word.chain_to( dest_word, visitation_map, depth + 1 ) if path total_length = depth + path.length + 1 #Only use the found path if it's shorter than one found already if total_length <= $max_length warn "Found a chain of length #{total_length}" if $DEBUG path_from_me = path $max_length = total_length end end } if path_from_me path_from_me.unshift( self ) end end path_from_me end end start_word = ARGV[0] || 'crave' end_word = ARGV[1] || 'primp' $max_length = Integer( ARGV[2] || start_word.length * 3 ) dict = ARGV[3] || '/usr/share/dict/words' #dict = ARGV[3] || '2of12inf.txt' desired_length = start_word.length unless end_word.length == desired_length msg = "Error: '#{start_word}' and '#{end_word}' are not the same length" msg << "(#{start_word.length} vs. #{end_word.length})" raise msg end # Load words of the right length avail_words = {} File.open( dict, 'r' ){ |f| w = f.read.split(/[\r\n]+/) # No capital words, or words ending with % (12dicts) w.reject!{ |word| word.length != desired_length or /[^a-z]/ =~ word } w.each{ |word| avail_words[ word ] = $max_length } } avail_words[ start_word ] = 1 puts "Searching in #{avail_words.length} words with #{desired_length} letters" unless avail_words.include?( end_word ) raise "Error: '#{end_word}' is not included in #{dict}" end print "Chain between '#{start_word}' and '#{end_word}', " puts "no longer than #{$max_length} links:" start_time = Time.new if path = start_word.chain_to( end_word, avail_words ) puts path.join( "\n" ) puts end_word else puts "(no such chain exists)" end end_time = Time.new puts "--> %.2fs (after loading dictionary)\n " % [ end_time-start_time ]
{ "content_hash": "62c4b431a4da26619a90f7a9f358fe1a", "timestamp": "", "source": "github", "line_count": 103, "max_line_length": 86, "avg_line_length": 35.28155339805825, "alnum_prop": 0.5528343423225096, "repo_name": "J-Y/RubyQuiz", "id": "380e250c810dd6993646862eaa64ae7cf2d10b5a", "size": "3656", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ruby_quiz/quiz44_sols/solutions/Gavin Kistner/word_chain.rb", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "2281" }, { "name": "Bison", "bytes": "261" }, { "name": "C", "bytes": "23577" }, { "name": "CSS", "bytes": "10128" }, { "name": "GAP", "bytes": "4684" }, { "name": "HTML", "bytes": "186387" }, { "name": "JavaScript", "bytes": "148" }, { "name": "Makefile", "bytes": "6175" }, { "name": "OCaml", "bytes": "7884" }, { "name": "Ruby", "bytes": "8196401" }, { "name": "Shell", "bytes": "1092" }, { "name": "TeX", "bytes": "4601" } ], "symlink_target": "" }
Transmission ============ This part of the tutorial will demonstrate how to install and configure an existing web application. Using Ansible Roles ------------------- Unlike with the webserver from the previous example we *will* create a custom configuration, so instead of littering our top level directory with yet more playbooks and templates we will configure this instance using a role. Let's first create the required structure:: mkdir -p roles/transmission/tasks mkdir -p roles/transmission/templates mkdir -p roles/transmission/handlers Populate them with a settings template in ``roles/transmission/templates/settings.json``: .. code-block:: json { "alt-speed-up": 50, "alt-speed-down": 200, "speed-limit-down": 5000, "speed-limit-down-enabled": true, "speed-limit-up": 100, "speed-limit-up-enabled": true, "start-added-torrents": true, "trash-original-torrent-files": true, "watch-dir": "{{download_dir}}", "watch-dir-enabled": true, "rpc-whitelist": "127.0.0.*,10.0.*.*", "ratio-limit": 1.25, "ratio-limit-enabled": true } And in ``roles/transmission/handlers/main.yml``: .. code-block:: yaml --- - name: restart transmission service: name=transmission state=restarted And finally in ``roles/transmission/tasks/main.yml``: .. code-block:: yaml - name: Ensure helper packages are installed pkgng: name={{ item }} state=present with_items: - transmission-daemon - transmission-web - name: Setup transmission to start on boot service: name=transmission enabled=yes - name: Configure transmission template: src=settings.json dest=/usr/local/etc/transmission/home/settings.json backup=yes owner=transmission notify: - restart transmission The above tasks should look pretty familiar by now: - install the required packages (this time it's more than one and we demonstrate the ``with_items`` method) - enable it in ``rc.conf`` - Finally, as a new technique we upload a settings file as a template and... - ... use ansible's *handlers* to make sure that the service is reloaded every time we change its settings. Exercise One ------------ Publish the transmission daemon's web UI at ``http://192.168.56.100/transmission``. .. note:: Proxying to transmission can be a bit finicky as it requires certain CRSF protection headers, so here's a small spoiler/hint. This is the required nginx configuration to proxy to transmission:: location /transmission { proxy_http_version 1.1; proxy_set_header Connection ""; proxy_pass_header X-Transmission-Session-Id; proxy_pass http://transmissionweb; proxy_redirect off; proxy_buffering off; proxy_set_header Host $host; proxy_set_header X-Real-IP $remote_addr; } Exercise Two ------------ Publish the downloads directory via nginx so users can download finished torrents from ``http://192.168.56.100/downloads``. Do this by configuring an additional jail that has read-only access to the download directory and publishes using its own nginx which is then targetted by the webserver jail.
{ "content_hash": "843e944541f2540e85768317c1f3b9a8", "timestamp": "", "source": "github", "line_count": 96, "max_line_length": 224, "avg_line_length": 34.135416666666664, "alnum_prop": 0.6765334147085749, "repo_name": "ployground/bsdploy", "id": "a48d1d8d86bb6bb147e21a7d50b6d25b07f3a774", "size": "3277", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "docs/tutorial/transmission.rst", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Makefile", "bytes": "436" }, { "name": "Nix", "bytes": "212" }, { "name": "Python", "bytes": "113675" }, { "name": "Shell", "bytes": "489" } ], "symlink_target": "" }
function GameManager(size, InputManager, Actuator, ScoreManager) { this.size = size; // Size of the grid this.inputManager = new InputManager; this.scoreManager = new ScoreManager; this.actuator = new Actuator; this.startTiles = 1; this.inputManager.on("move", this.move.bind(this)); this.inputManager.on("restart", this.restart.bind(this)); this.inputManager.on("keepPlaying", this.keepPlaying.bind(this)); this.setup(); } // Restart the game GameManager.prototype.restart = function () { this.actuator.continue(); this.setup(); }; // Keep playing after winning GameManager.prototype.keepPlaying = function () { this.keepPlaying = true; this.actuator.continue(); }; GameManager.prototype.isGameTerminated = function () { if (this.over || (this.won && !this.keepPlaying)) { return true; } else { return false; } }; // Set up the game GameManager.prototype.setup = function () { this.grid = new Grid(this.size); this.score = 0; this.over = false; this.won = false; this.keepPlaying = false; // Add the initial tiles this.addStartTiles(); // Update the actuator this.actuate(); }; // Set up the initial tiles to start the game with GameManager.prototype.addStartTiles = function () { for (var i = 0; i < this.startTiles; i++) { this.addRandomTile(); } }; // Adds a tile in a random position GameManager.prototype.addRandomTile = function () { if (this.grid.cellsAvailable()) { var value = 2; var tile = new Tile(this.grid.randomAvailableCell(), value); tile.x = 0; tile.y = 0; this.grid.insertTile(tile); } }; // Sends the updated grid to the actuator GameManager.prototype.actuate = function () { if (this.scoreManager.get() < this.score) { this.scoreManager.set(this.score); } this.actuator.actuate(this.grid, { score: this.score, over: this.over, won: this.won, bestScore: this.scoreManager.get(), terminated: this.isGameTerminated() }); }; // Save all tile positions and remove merger info GameManager.prototype.prepareTiles = function () { this.grid.eachCell(function (x, y, tile) { if (tile) { tile.mergedFrom = null; tile.savePosition(); } }); }; // Move a tile and its representation GameManager.prototype.moveTile = function (tile, cell) { this.grid.cells[tile.x][tile.y] = null; this.grid.cells[cell.x][cell.y] = tile; tile.updatePosition(cell); }; // Move tiles on the grid in the specified direction GameManager.prototype.move = function (direction) { // 0: up, 1: right, 2:down, 3: left var self = this; if (this.isGameTerminated()) return; // Don't do anything if the game's over var cell, tile; var vector = this.getVector(direction); var traversals = this.buildTraversals(vector); var moved = false; // Save the current tile positions and remove merger information this.prepareTiles(); // Traverse the grid in the right direction and move tiles traversals.x.forEach(function (x) { traversals.y.forEach(function (y) { cell = { x: x, y: y }; tile = self.grid.cellContent(cell); if (tile) { var positions = self.findFarthestPosition(cell, vector); var next = self.grid.cellContent(positions.next); // Only one merger per row traversal? if (next && next.value === tile.value && !next.mergedFrom) { var multiply = Math.random() < 0.986 ? 2 : 4; var merged = new Tile(positions.next, tile.value * multiply); merged.mergedFrom = [tile, next]; self.grid.insertTile(merged); self.grid.removeTile(tile); // Converge the two tiles' positions tile.updatePosition(positions.next); // Update the score self.score += merged.value; // The mighty 16384 tile if (merged.value === 16384) self.won = true; } else { self.moveTile(tile, positions.farthest); } if (!self.positionsEqual(cell, tile)) { moved = true; // The tile moved from its original cell! } } }); }); if (moved) { /* this.addRandomTile(); if (!this.movesAvailable()) { this.over = true; // Game over! } */ this.actuate(); } }; // Get the vector representing the chosen direction GameManager.prototype.getVector = function (direction) { // Vectors representing tile movement var map = { 0: { x: 0, y: -1 }, // up 1: { x: 1, y: 0 }, // right 2: { x: 0, y: 1 }, // down 3: { x: -1, y: 0 } // left }; return map[direction]; }; // Build a list of positions to traverse in the right order GameManager.prototype.buildTraversals = function (vector) { var traversals = { x: [], y: [] }; for (var pos = 0; pos < this.size; pos++) { traversals.x.push(pos); traversals.y.push(pos); } // Always traverse from the farthest cell in the chosen direction if (vector.x === 1) traversals.x = traversals.x.reverse(); if (vector.y === 1) traversals.y = traversals.y.reverse(); return traversals; }; GameManager.prototype.findFarthestPosition = function (cell, vector) { var previous; // Progress to the next cell if it is in bounds previous = cell; cell = { x: previous.x + vector.x, y: previous.y + vector.y }; if (this.grid.withinBounds(cell)) { previous = cell; } return { farthest: previous, next: cell } /* do { previous = cell; cell = { x: previous.x + vector.x, y: previous.y + vector.y }; } while (this.grid.withinBounds(cell) && this.grid.cellAvailable(cell)); return { farthest: previous, next: cell // Used to check if a merge is required }; */ }; GameManager.prototype.movesAvailable = function () { return this.grid.cellsAvailable() || this.tileMatchesAvailable(); }; // Check for available matches between tiles (more expensive check) GameManager.prototype.tileMatchesAvailable = function () { var self = this; var tile; for (var x = 0; x < this.size; x++) { for (var y = 0; y < this.size; y++) { tile = this.grid.cellContent({ x: x, y: y }); if (tile) { for (var direction = 0; direction < 4; direction++) { var vector = self.getVector(direction); var cell = { x: x + vector.x, y: y + vector.y }; var other = self.grid.cellContent(cell); if (other && other.value === tile.value) { return true; // These two tiles can be merged } } } } } return false; }; GameManager.prototype.positionsEqual = function (first, second) { return first.x === second.x && first.y === second.y; };
{ "content_hash": "da72cb07397499fbfa459dc4c136d1ed", "timestamp": "", "source": "github", "line_count": 267, "max_line_length": 78, "avg_line_length": 25.228464419475657, "alnum_prop": 0.6177256532066508, "repo_name": "JeffreyWest/exploring_ninja", "id": "7ffbdedc83ae1614a9b6f05e94fa63be957b5778", "size": "6736", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "js/game_manager.js", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "31904" }, { "name": "JavaScript", "bytes": "17866" } ], "symlink_target": "" }
var OverlayBackground = function(cell) { this.init(cell); }; var OverlayBackgroundPrototype = function() { this.type = "overlay-background"; this.height = 0; this.width = 0; this.top = 0; this.left = 0; this.is_dirty = true; this.setDimensions = function(force_redraw) { if (!this.horizontal_border_padding) this.setCSSProperties(); var dim = this.cell.top - 1; if (dim != this.top) { this.is_dirty = true; this.top = dim; } dim = this.cell.left; if (dim != this.left) { this.is_dirty = true; this.left = dim; } dim = this.cell.width - this.horizontal_border_padding; if (dim != this.width) { this.is_dirty = true; this.width = dim; } dim = this.cell.height - this.vertical_border_padding + 1; if (dim != this.height) { this.is_dirty = true; this.height = dim; } this.update(force_redraw); }; this.setup = function(view_id) { if (!document.getElementById(this.type + '-to-' + this.cell.id)) this.update(); }; this.update = function(force_redraw) { if (force_redraw) this.is_dirty = true; var id = this.type + '-to-' + this.cell.id; var ele = document.getElementById(id); if (!ele) { ele = document.render(["div", "class", "background-overlay", "id", id]); viewport.appendChild(ele); } if (this.is_dirty) { this.is_dirty = false; this.update_style(ele.style); this.update_sub_class(); } return ele; } this.init = function(cell) { this.cell = cell; this.initBase(); }; }; OverlayBackgroundPrototype.prototype = UIBase; OverlayBackground.prototype = new OverlayBackgroundPrototype();
{ "content_hash": "248f755fb37728c0cd2998daeaeea460", "timestamp": "", "source": "github", "line_count": 81, "max_line_length": 78, "avg_line_length": 21.567901234567902, "alnum_prop": 0.5867200915855753, "repo_name": "operasoftware/dragonfly", "id": "084e275a382f4f8eb574e32e2a86ec9f3daa28e9", "size": "1749", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/ui-scripts/overlaybackground.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "JavaScript", "bytes": "3848248" }, { "name": "PHP", "bytes": "4120" }, { "name": "Python", "bytes": "31116" }, { "name": "Shell", "bytes": "39" } ], "symlink_target": "" }
@interface NSArray (PIXCategory) - (NSArray *)PIXArrayByReplacingNullsWithBlanks; @end
{ "content_hash": "6f983fba8ec1ff7101f0921ee60b997c", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 48, "avg_line_length": 29, "alnum_prop": 0.8160919540229885, "repo_name": "pixnet/pixnet-ios-sdk", "id": "e3a9a42a659d612bb570d464488692d0c8aabd0d", "size": "271", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "PIXNET-iOS-SDK/Classes/NSArray+PIXCategory.h", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "HTML", "bytes": "64210" }, { "name": "Objective-C", "bytes": "647793" }, { "name": "Ruby", "bytes": "6510" } ], "symlink_target": "" }
package net.bytebuddy.implementation.attribute; import net.bytebuddy.build.HashCodeAndEqualsPlugin; import net.bytebuddy.description.annotation.AnnotationDescription; import net.bytebuddy.description.enumeration.EnumerationDescription; import net.bytebuddy.description.method.MethodDescription; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.description.type.TypeList; import org.objectweb.asm.*; import java.lang.reflect.Array; import java.util.List; /** * Annotation appenders are capable of writing annotations to a specified target. */ public interface AnnotationAppender { /** * A constant for informing ASM over ignoring a given name. */ String NO_NAME = null; /** * Writes the given annotation to the target that this appender represents. * * @param annotationDescription The annotation to be written. * @param annotationValueFilter The annotation value filter to use. * @return Usually {@code this} or any other annotation appender capable of writing another annotation to the specified target. */ AnnotationAppender append(AnnotationDescription annotationDescription, AnnotationValueFilter annotationValueFilter); /** * Writes the given type annotation to the target that this appender represents. * * @param annotationDescription The annotation to be written. * @param annotationValueFilter The annotation value filter to use. * @param typeReference The type variable's type reference. * @param typePath The type variable's type path. * @return Usually {@code this} or any other annotation appender capable of writing another annotation to the specified target. */ AnnotationAppender append(AnnotationDescription annotationDescription, AnnotationValueFilter annotationValueFilter, int typeReference, String typePath); /** * Represents a target for an annotation writing process. */ interface Target { /** * Creates an annotation visitor for writing the specified annotation. * * @param annotationTypeDescriptor The type descriptor for the annotation to be written. * @param visible {@code true} if the annotation is to be visible at runtime. * @return An annotation visitor for consuming the specified annotation. */ AnnotationVisitor visit(String annotationTypeDescriptor, boolean visible); /** * Creates an annotation visitor for writing the specified type annotation. * * @param annotationTypeDescriptor The type descriptor for the annotation to be written. * @param visible {@code true} if the annotation is to be visible at runtime. * @param typeReference The type annotation's type reference. * @param typePath The type annotation's type path. * @return An annotation visitor for consuming the specified annotation. */ AnnotationVisitor visit(String annotationTypeDescriptor, boolean visible, int typeReference, String typePath); /** * Target for an annotation that is written to a Java type. */ @HashCodeAndEqualsPlugin.Enhance class OnType implements Target { /** * The class visitor to write the annotation to. */ private final ClassVisitor classVisitor; /** * Creates a new wrapper for a Java type. * * @param classVisitor The ASM class visitor to which the annotations are appended to. */ public OnType(ClassVisitor classVisitor) { this.classVisitor = classVisitor; } @Override public AnnotationVisitor visit(String annotationTypeDescriptor, boolean visible) { return classVisitor.visitAnnotation(annotationTypeDescriptor, visible); } @Override public AnnotationVisitor visit(String annotationTypeDescriptor, boolean visible, int typeReference, String typePath) { return classVisitor.visitTypeAnnotation(typeReference, TypePath.fromString(typePath), annotationTypeDescriptor, visible); } } /** * Target for an annotation that is written to a Java method or constructor. */ @HashCodeAndEqualsPlugin.Enhance class OnMethod implements Target { /** * The method visitor to write the annotation to. */ private final MethodVisitor methodVisitor; /** * Creates a new wrapper for a Java method or constructor. * * @param methodVisitor The ASM method visitor to which the annotations are appended to. */ public OnMethod(MethodVisitor methodVisitor) { this.methodVisitor = methodVisitor; } @Override public AnnotationVisitor visit(String annotationTypeDescriptor, boolean visible) { return methodVisitor.visitAnnotation(annotationTypeDescriptor, visible); } @Override public AnnotationVisitor visit(String annotationTypeDescriptor, boolean visible, int typeReference, String typePath) { return methodVisitor.visitTypeAnnotation(typeReference, TypePath.fromString(typePath), annotationTypeDescriptor, visible); } } /** * Target for an annotation that is written to a Java method or constructor parameter. */ @HashCodeAndEqualsPlugin.Enhance class OnMethodParameter implements Target { /** * The method visitor to write the annotation to. */ private final MethodVisitor methodVisitor; /** * The method parameter index to write the annotation to. */ private final int parameterIndex; /** * Creates a new wrapper for a Java method or constructor. * * @param methodVisitor The ASM method visitor to which the annotations are appended to. * @param parameterIndex The index of the method parameter. */ public OnMethodParameter(MethodVisitor methodVisitor, int parameterIndex) { this.methodVisitor = methodVisitor; this.parameterIndex = parameterIndex; } @Override public AnnotationVisitor visit(String annotationTypeDescriptor, boolean visible) { return methodVisitor.visitParameterAnnotation(parameterIndex, annotationTypeDescriptor, visible); } @Override public AnnotationVisitor visit(String annotationTypeDescriptor, boolean visible, int typeReference, String typePath) { return methodVisitor.visitTypeAnnotation(typeReference, TypePath.fromString(typePath), annotationTypeDescriptor, visible); } } /** * Target for an annotation that is written to a Java field. */ @HashCodeAndEqualsPlugin.Enhance class OnField implements Target { /** * The field visitor to write the annotation to. */ private final FieldVisitor fieldVisitor; /** * Creates a new wrapper for a Java field. * * @param fieldVisitor The ASM field visitor to which the annotations are appended to. */ public OnField(FieldVisitor fieldVisitor) { this.fieldVisitor = fieldVisitor; } @Override public AnnotationVisitor visit(String annotationTypeDescriptor, boolean visible) { return fieldVisitor.visitAnnotation(annotationTypeDescriptor, visible); } @Override public AnnotationVisitor visit(String annotationTypeDescriptor, boolean visible, int typeReference, String typePath) { return fieldVisitor.visitTypeAnnotation(typeReference, TypePath.fromString(typePath), annotationTypeDescriptor, visible); } } } /** * A default implementation for an annotation appender that writes annotations to a given byte consumer * represented by an ASM {@link org.objectweb.asm.AnnotationVisitor}. */ @HashCodeAndEqualsPlugin.Enhance class Default implements AnnotationAppender { /** * The target onto which an annotation write process is to be applied. */ private final Target target; /** * Creates a default annotation appender. * * @param target The target to which annotations are written to. */ public Default(Target target) { this.target = target; } /** * Handles the writing of a single annotation to an annotation visitor. * * @param annotationVisitor The annotation visitor the write process is to be applied on. * @param annotation The annotation to be written. * @param annotationValueFilter The value filter to apply for discovering which values of an annotation should be written. */ private static void handle(AnnotationVisitor annotationVisitor, AnnotationDescription annotation, AnnotationValueFilter annotationValueFilter) { for (MethodDescription.InDefinedShape methodDescription : annotation.getAnnotationType().getDeclaredMethods()) { if (annotationValueFilter.isRelevant(annotation, methodDescription)) { apply(annotationVisitor, methodDescription.getReturnType().asErasure(), methodDescription.getName(), annotation.getValue(methodDescription).resolve()); } } annotationVisitor.visitEnd(); } /** * Performs the writing of a given annotation value to an annotation visitor. * * @param annotationVisitor The annotation visitor the write process is to be applied on. * @param valueType The type of the annotation value. * @param name The name of the annotation type. * @param value The annotation's value. */ public static void apply(AnnotationVisitor annotationVisitor, TypeDescription valueType, String name, Object value) { if (valueType.isArray()) { // The Android emulator reads annotation arrays as annotation types. Therefore, this check needs to come first. AnnotationVisitor arrayVisitor = annotationVisitor.visitArray(name); int length = Array.getLength(value); TypeDescription componentType = valueType.getComponentType(); for (int index = 0; index < length; index++) { apply(arrayVisitor, componentType, NO_NAME, Array.get(value, index)); } arrayVisitor.visitEnd(); } else if (valueType.isAnnotation()) { handle(annotationVisitor.visitAnnotation(name, valueType.getDescriptor()), (AnnotationDescription) value, AnnotationValueFilter.Default.APPEND_DEFAULTS); } else if (valueType.isEnum()) { annotationVisitor.visitEnum(name, valueType.getDescriptor(), ((EnumerationDescription) value).getValue()); } else if (valueType.represents(Class.class)) { annotationVisitor.visit(name, Type.getType(((TypeDescription) value).getDescriptor())); } else { annotationVisitor.visit(name, value); } } @Override public AnnotationAppender append(AnnotationDescription annotationDescription, AnnotationValueFilter annotationValueFilter) { switch (annotationDescription.getRetention()) { case RUNTIME: doAppend(annotationDescription, true, annotationValueFilter); break; case CLASS: doAppend(annotationDescription, false, annotationValueFilter); break; case SOURCE: break; default: throw new IllegalStateException("Unexpected retention policy: " + annotationDescription.getRetention()); } return this; } /** * Tries to append a given annotation by reflectively reading an annotation. * * @param annotation The annotation to be written. * @param visible {@code true} if this annotation should be treated as visible at runtime. * @param annotationValueFilter The annotation value filter to apply. */ private void doAppend(AnnotationDescription annotation, boolean visible, AnnotationValueFilter annotationValueFilter) { handle(target.visit(annotation.getAnnotationType().getDescriptor(), visible), annotation, annotationValueFilter); } @Override public AnnotationAppender append(AnnotationDescription annotationDescription, AnnotationValueFilter annotationValueFilter, int typeReference, String typePath) { switch (annotationDescription.getRetention()) { case RUNTIME: doAppend(annotationDescription, true, annotationValueFilter, typeReference, typePath); break; case CLASS: doAppend(annotationDescription, false, annotationValueFilter, typeReference, typePath); break; case SOURCE: break; default: throw new IllegalStateException("Unexpected retention policy: " + annotationDescription.getRetention()); } return this; } /** * Tries to append a given annotation by reflectively reading an annotation. * * @param annotation The annotation to be written. * @param visible {@code true} if this annotation should be treated as visible at runtime. * @param annotationValueFilter The annotation value filter to apply. * @param typeReference The type annotation's type reference. * @param typePath The type annotation's type path. */ private void doAppend(AnnotationDescription annotation, boolean visible, AnnotationValueFilter annotationValueFilter, int typeReference, String typePath) { handle(target.visit(annotation.getAnnotationType().getDescriptor(), visible, typeReference, typePath), annotation, annotationValueFilter); } } /** * A type visitor that visits all type annotations of a generic type and writes any discovered annotation to a * supplied {@link AnnotationAppender}. */ @HashCodeAndEqualsPlugin.Enhance class ForTypeAnnotations implements TypeDescription.Generic.Visitor<AnnotationAppender> { /** * Indicates that type variables type annotations are written on a Java type. */ public static final boolean VARIABLE_ON_TYPE = true; /** * Indicates that type variables type annotations are written on a Java method or constructor. */ public static final boolean VARIABLE_ON_INVOKEABLE = false; /** * Represents an empty type path. */ private static final String EMPTY_TYPE_PATH = ""; /** * Represents a step to a component type within a type path. */ private static final char COMPONENT_TYPE_PATH = '['; /** * Represents a wildcard type step within a type path. */ private static final char WILDCARD_TYPE_PATH = '*'; /** * Represents a (reversed) type step to an inner class within a type path. */ private static final char INNER_CLASS_PATH = '.'; /** * Represents an index type delimiter within a type path. */ private static final char INDEXED_TYPE_DELIMITER = ';'; /** * The index that indicates that super type type annotations are written onto a super class. */ private static final int SUPER_CLASS_INDEX = -1; /** * The annotation appender to use. */ private final AnnotationAppender annotationAppender; /** * The annotation value filter to use. */ private final AnnotationValueFilter annotationValueFilter; /** * The type reference to use. */ private final int typeReference; /** * The type path to use. */ private final String typePath; /** * Creates a new type annotation appending visitor for an empty type path. * * @param annotationAppender The annotation appender to use. * @param annotationValueFilter The annotation value filter to use. * @param typeReference The type reference to use. */ protected ForTypeAnnotations(AnnotationAppender annotationAppender, AnnotationValueFilter annotationValueFilter, TypeReference typeReference) { this(annotationAppender, annotationValueFilter, typeReference.getValue(), EMPTY_TYPE_PATH); } /** * Creates a new type annotation appending visitor. * * @param annotationAppender The annotation appender to use. * @param annotationValueFilter The annotation value filter to use. * @param typeReference The type reference to use. * @param typePath The type path to use. */ protected ForTypeAnnotations(AnnotationAppender annotationAppender, AnnotationValueFilter annotationValueFilter, int typeReference, String typePath) { this.annotationAppender = annotationAppender; this.annotationValueFilter = annotationValueFilter; this.typeReference = typeReference; this.typePath = typePath; } /** * Creates a type annotation appender for a type annotations of a super class type. * * @param annotationAppender The annotation appender to write any type annotation to. * @param annotationValueFilter The annotation value filter to apply. * @return A visitor for appending type annotations of a super class. */ public static TypeDescription.Generic.Visitor<AnnotationAppender> ofSuperClass(AnnotationAppender annotationAppender, AnnotationValueFilter annotationValueFilter) { return new ForTypeAnnotations(annotationAppender, annotationValueFilter, TypeReference.newSuperTypeReference(SUPER_CLASS_INDEX)); } /** * Creates a type annotation appender for type annotations of an interface type. * * @param annotationAppender The annotation appender to write any type annotation to. * @param annotationValueFilter The annotation value filter to apply. * @param index The index of the interface type. * @return A visitor for appending type annotations of an interface type. */ public static TypeDescription.Generic.Visitor<AnnotationAppender> ofInterfaceType(AnnotationAppender annotationAppender, AnnotationValueFilter annotationValueFilter, int index) { return new ForTypeAnnotations(annotationAppender, annotationValueFilter, TypeReference.newSuperTypeReference(index)); } /** * Creates a type annotation appender for type annotations of a field's type. * * @param annotationAppender The annotation appender to write any type annotation to. * @param annotationValueFilter The annotation value filter to apply. * @return A visitor for appending type annotations of a field's type. */ public static TypeDescription.Generic.Visitor<AnnotationAppender> ofFieldType(AnnotationAppender annotationAppender, AnnotationValueFilter annotationValueFilter) { return new ForTypeAnnotations(annotationAppender, annotationValueFilter, TypeReference.newTypeReference(TypeReference.FIELD)); } /** * Creates a type annotation appender for type annotations of a method's return type. * * @param annotationAppender The annotation appender to write any type annotation to. * @param annotationValueFilter The annotation value filter to apply. * @return A visitor for appending type annotations of a method's return type. */ public static TypeDescription.Generic.Visitor<AnnotationAppender> ofMethodReturnType(AnnotationAppender annotationAppender, AnnotationValueFilter annotationValueFilter) { return new ForTypeAnnotations(annotationAppender, annotationValueFilter, TypeReference.newTypeReference(TypeReference.METHOD_RETURN)); } /** * Creates a type annotation appender for type annotations of a method's parameter type. * * @param annotationAppender The annotation appender to write any type annotation to. * @param annotationValueFilter The annotation value filter to apply. * @param index The parameter index. * @return A visitor for appending type annotations of a method's parameter type. */ public static TypeDescription.Generic.Visitor<AnnotationAppender> ofMethodParameterType(AnnotationAppender annotationAppender, AnnotationValueFilter annotationValueFilter, int index) { return new ForTypeAnnotations(annotationAppender, annotationValueFilter, TypeReference.newFormalParameterReference(index)); } /** * Creates a type annotation appender for type annotations of a method's exception type. * * @param annotationAppender The annotation appender to write any type annotation to. * @param annotationValueFilter The annotation value filter to apply. * @param index The exception type's index. * @return A visitor for appending type annotations of a method's exception type. */ public static TypeDescription.Generic.Visitor<AnnotationAppender> ofExceptionType(AnnotationAppender annotationAppender, AnnotationValueFilter annotationValueFilter, int index) { return new ForTypeAnnotations(annotationAppender, annotationValueFilter, TypeReference.newExceptionReference(index)); } /** * Creates a type annotation appender for type annotations of a method's receiver type. * * @param annotationAppender The annotation appender to write any type annotation to. * @param annotationValueFilter The annotation value filter to apply. * @return A visitor for appending type annotations of a method's receiver type. */ public static TypeDescription.Generic.Visitor<AnnotationAppender> ofReceiverType(AnnotationAppender annotationAppender, AnnotationValueFilter annotationValueFilter) { return new ForTypeAnnotations(annotationAppender, annotationValueFilter, TypeReference.newTypeReference(TypeReference.METHOD_RECEIVER)); } /** * Appends all supplied type variables to the supplied method appender. * * @param annotationAppender The annotation appender to write any type annotation to. * @param annotationValueFilter The annotation value filter to apply. * @param variableOnType {@code true} if the type variables are declared by a type, {@code false} if they are declared by a method. * @param typeVariables The type variables to append. * @return The resulting annotation appender. */ public static AnnotationAppender ofTypeVariable(AnnotationAppender annotationAppender, AnnotationValueFilter annotationValueFilter, boolean variableOnType, List<? extends TypeDescription.Generic> typeVariables) { return ofTypeVariable(annotationAppender, annotationValueFilter, variableOnType, 0, typeVariables); } /** * Appends all supplied type variables to the supplied method appender. * * @param annotationAppender The annotation appender to write any type annotation to. * @param annotationValueFilter The annotation value filter to apply. * @param variableOnType {@code true} if the type variables are declared by a type, {@code false} if they are declared by a method. * @param subListIndex The index of the first type variable to append. All previous type variables are ignored. * @param typeVariables The type variables to append. * @return The resulting annotation appender. */ public static AnnotationAppender ofTypeVariable(AnnotationAppender annotationAppender, AnnotationValueFilter annotationValueFilter, boolean variableOnType, int subListIndex, List<? extends TypeDescription.Generic> typeVariables) { int typeVariableIndex = subListIndex, variableBaseReference, variableBoundBaseBase; if (variableOnType) { variableBaseReference = TypeReference.CLASS_TYPE_PARAMETER; variableBoundBaseBase = TypeReference.CLASS_TYPE_PARAMETER_BOUND; } else { variableBaseReference = TypeReference.METHOD_TYPE_PARAMETER; variableBoundBaseBase = TypeReference.METHOD_TYPE_PARAMETER_BOUND; } for (TypeDescription.Generic typeVariable : typeVariables.subList(subListIndex, typeVariables.size())) { int typeReference = TypeReference.newTypeParameterReference(variableBaseReference, typeVariableIndex).getValue(); for (AnnotationDescription annotationDescription : typeVariable.getDeclaredAnnotations()) { annotationAppender = annotationAppender.append(annotationDescription, annotationValueFilter, typeReference, EMPTY_TYPE_PATH); } int boundIndex = !typeVariable.getUpperBounds().get(0).getSort().isTypeVariable() && typeVariable.getUpperBounds().get(0).isInterface() ? 1 : 0; for (TypeDescription.Generic typeBound : typeVariable.getUpperBounds()) { annotationAppender = typeBound.accept(new ForTypeAnnotations(annotationAppender, annotationValueFilter, TypeReference.newTypeParameterBoundReference(variableBoundBaseBase, typeVariableIndex, boundIndex++))); } typeVariableIndex++; } return annotationAppender; } @Override public AnnotationAppender onGenericArray(TypeDescription.Generic genericArray) { return genericArray.getComponentType().accept(new ForTypeAnnotations(apply(genericArray, typePath), annotationValueFilter, typeReference, typePath + COMPONENT_TYPE_PATH)); } @Override public AnnotationAppender onWildcard(TypeDescription.Generic wildcard) { TypeList.Generic lowerBounds = wildcard.getLowerBounds(); return (lowerBounds.isEmpty() ? wildcard.getUpperBounds().getOnly() : lowerBounds.getOnly()).accept(new ForTypeAnnotations(apply(wildcard, typePath), annotationValueFilter, typeReference, typePath + WILDCARD_TYPE_PATH)); } @Override public AnnotationAppender onParameterizedType(TypeDescription.Generic parameterizedType) { StringBuilder typePath = new StringBuilder(this.typePath); for (int index = 0; index < parameterizedType.asErasure().getInnerClassCount(); index++) { typePath = typePath.append(INNER_CLASS_PATH); } AnnotationAppender annotationAppender = apply(parameterizedType, typePath.toString()); TypeDescription.Generic ownerType = parameterizedType.getOwnerType(); if (ownerType != null) { annotationAppender = ownerType.accept(new ForTypeAnnotations(annotationAppender, annotationValueFilter, typeReference, this.typePath)); } int index = 0; for (TypeDescription.Generic typeArgument : parameterizedType.getTypeArguments()) { annotationAppender = typeArgument.accept(new ForTypeAnnotations(annotationAppender, annotationValueFilter, typeReference, typePath.toString() + index++ + INDEXED_TYPE_DELIMITER)); } return annotationAppender; } @Override public AnnotationAppender onTypeVariable(TypeDescription.Generic typeVariable) { return apply(typeVariable, typePath); } @Override public AnnotationAppender onNonGenericType(TypeDescription.Generic typeDescription) { StringBuilder typePath = new StringBuilder(this.typePath); for (int index = 0; index < typeDescription.asErasure().getInnerClassCount(); index++) { typePath = typePath.append(INNER_CLASS_PATH); } AnnotationAppender annotationAppender = apply(typeDescription, typePath.toString()); if (typeDescription.isArray()) { annotationAppender = typeDescription.getComponentType().accept(new ForTypeAnnotations(annotationAppender, annotationValueFilter, typeReference, this.typePath + COMPONENT_TYPE_PATH)); // Impossible to be inner class } return annotationAppender; } /** * Writes all annotations of the supplied type to this instance's annotation appender. * * @param typeDescription The type of what all annotations should be written of. * @param typePath The type path to use. * @return The resulting annotation appender. */ private AnnotationAppender apply(TypeDescription.Generic typeDescription, String typePath) { AnnotationAppender annotationAppender = this.annotationAppender; for (AnnotationDescription annotationDescription : typeDescription.getDeclaredAnnotations()) { annotationAppender = annotationAppender.append(annotationDescription, annotationValueFilter, typeReference, typePath); } return annotationAppender; } } }
{ "content_hash": "d4595ea0af28a444927844557082b966", "timestamp": "", "source": "github", "line_count": 645, "max_line_length": 172, "avg_line_length": 50.31782945736434, "alnum_prop": 0.6227391773224464, "repo_name": "CodingFabian/byte-buddy", "id": "00edf5baf5fcdba11a5a3a92b9c375d2cd161bda", "size": "32455", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "byte-buddy-dep/src/main/java/net/bytebuddy/implementation/attribute/AnnotationAppender.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "9640446" } ], "symlink_target": "" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xml:lang="en" lang="en"> <head> <meta http-equiv="content-type" content="text/html; charset=UTF-8" /> <title>Play! 2.x Provider for Play! 2.1.x 1.0.0-alpha8 Reference</title> <link rel="stylesheet" type="text/css" href="stylesheet.css" title="style" /> </head> <body> <h3> <a href="allclasses-frame.html" target="packageFrame">All Classes</a> </h3> <h3>Packages</h3> <ul> <li> <a href="com/google/code/play2/provider/play21/package-frame.html" target="packageFrame">com.google.code.play2.provider.play21</a> </li> </ul> </body> </html>
{ "content_hash": "3a31c885f787f55af6a748a20bf1ba96", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 134, "avg_line_length": 29.5, "alnum_prop": 0.6567796610169492, "repo_name": "play2-maven-plugin/play2-maven-plugin.github.io", "id": "8b214141af5db01352041945429fbe2e78b0fcb9", "size": "709", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "play2-maven-plugin/1.0.0-alpha8/play2-providers/play2-provider-play21/xref/overview-frame.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "2793124" }, { "name": "HTML", "bytes": "178221432" }, { "name": "JavaScript", "bytes": "120742" } ], "symlink_target": "" }
Service Catalog maintains metrics through the Prometheus client library and exposes them through the Prometheus http adapter at the Controller's /metrics endpoint. These metrics can be accessed directly via HTTP GET or more commonly scraped by the Prometheus monitoring application which persists metrics and facilitates analysis through a Web UI and powerful query language. Many metrics are not created and exposed until Service Catalog performs operations which would impact the metrics. If you have no Service Brokers defined, there will be no metrics for class or plan count and likely no OSB Client operation metrics. This just means the metric names will not show up, it may look like Prometheus isn't collecting metrics from Service Catalog. So before proceeding, it's recommended you have a Broker defined and created a Service Instance. To view the raw metrics: ``` # setup a port forward so we can curl against Controller Manager $ kubectl get pods -l app=catalog-catalog-controller-manager -n catalog -o name | \ sed 's/^.*\///' | xargs -I{} kubectl port-forward {} -n catalog 8089:8080 & $ curl -s http://localhost:8089/metrics | grep servicecatalog Handling connection for 8089 # HELP servicecatalog_broker_service_class_count Number of services classes by Broker. # TYPE servicecatalog_broker_service_class_count gauge servicecatalog_broker_service_class_count{broker="ups-broker"} 1 # HELP servicecatalog_broker_service_plan_count Number of services classes by Broker. # TYPE servicecatalog_broker_service_plan_count gauge servicecatalog_broker_service_plan_count{broker="ups-broker"} 2 # HELP servicecatalog_osb_request_count Cumulative number of HTTP requests from the OSB Client to the specified Service Broker grouped by broker name, broker method, and response status. # TYPE servicecatalog_osb_request_count counter servicecatalog_osb_request_count{broker="ups-broker",method="Bind",status="2xx"} 41 servicecatalog_osb_request_count{broker="ups-broker",method="GetCatalog",status="2xx"} 1 servicecatalog_osb_request_count{broker="ups-broker",method="ProvisionInstance",status="2xx"} 2 ``` Alternatively, and the more common approach to utlizing metrics, deploy Prometheus. [This YAML](prometheus.yml) creates a Prometheus instance preconfigured to gather Kubernetes platform and node metrics. If you deploy the Service Catalog Controller Manager via Helm with the optional `enablePrometheusScrape` parameter set to true (either edit the parameter in [charts/catalog/values.yaml](../../../charts/catalog/values.yaml) or specify "--set enablePrometheusScrape=true" when installing Catalog with helm), this configuration will direct Prometheus to automatically scrape custom metrics exposed from Service Catalog as well. Most any Prometheus configuration for Kubernetes (ie [Prometheus Operator](https://github.com/coreos/prometheus-operator)) will pick up the Service Catalog metrics as long as it's looking for pods with the `prometheus.io/scrape` annotation. To deploy Prometheus, run: ``` $ kubectl create -f contrib/examples/prometheus/prometheus.yml ``` To access the Promentheus application, you must either expose it as a service or provide port forwarding to the Prometheus app: ``` $ kubectl get pods -l app=prometheus -o name | \ sed 's/^.*\///' | \ xargs -I{} kubectl port-forward {} 9090:9090 ``` Now you can view Prometheus at http://localhost:9090. If you navigate to "Status" -> "Targets" you will see the endpoints that Prometheus is scraping. It should include the "catalog-controller-manager" pod if you deployed Catalog with enablePrometheusScrape. If you navigate back to "Graph" and type "catalog" into the expression filter you should see metrics from Service Catlog. **The present set of Catalog metics needs to be greatly expanded upon** -- it's really simple to add additional metrics, or drop me (jboyd01) a note if you have ideas but not the time to implement. If you want to add metrics, briefly review [pkg/metrics/metrics.go](../../../pkg/metrics/metrics.go) and [pkg/controller/controller_clusterservicebroker.go](../../../pkg/controller/controller_clusterservicebroker.go) for reference. ## Useful metrics queries tbd ## Helpful Prometheus Links Getting started with Prometheus: https://prometheus.io/docs/prometheus/latest/getting_started/ Basics for Querying Prometheus: https://prometheus.io/docs/prometheus/latest/querying/basics/ Instrumenting your App: https://godoc.org/github.com/prometheus/client_golang/prometheus
{ "content_hash": "d5e8bea692223665d6970c4a11cbed4a", "timestamp": "", "source": "github", "line_count": 87, "max_line_length": 186, "avg_line_length": 51.88505747126437, "alnum_prop": 0.7893221089942402, "repo_name": "staebler/service-catalog", "id": "07b1e51f5cbb14a1777fbb3d18e714cfc366be55", "size": "4536", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "contrib/examples/prometheus/README.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "98489" }, { "name": "Go", "bytes": "3767591" }, { "name": "HTML", "bytes": "30822" }, { "name": "JavaScript", "bytes": "34298" }, { "name": "Makefile", "bytes": "16652" }, { "name": "Python", "bytes": "2031" }, { "name": "Ruby", "bytes": "10517" }, { "name": "Shell", "bytes": "223279" }, { "name": "Smarty", "bytes": "1707" } ], "symlink_target": "" }
package org.amv.access.client.android.model; public class AccessCertificateDto { public String id; public String device_access_certificate; public String vehicle_access_certificate; public String name; }
{ "content_hash": "5a5e675997d78213560caf7cbfce38e1", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 45, "avg_line_length": 27.625, "alnum_prop": 0.7647058823529411, "repo_name": "amvnetworks/amv-access-api-poc", "id": "b459fb59a2b80c7a0e2bc8a3996bc3f1ee50d151", "size": "221", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "client-model-android/src/main/java/org/amv/access/client/android/model/AccessCertificateDto.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "48955" } ], "symlink_target": "" }
Filter ====== [![Build Status](https://travis-ci.org/SugiPHP/Filter.png)](https://travis-ci.org/SugiPHP/Filter) [![Scrutinizer Code Quality](https://scrutinizer-ci.com/g/SugiPHP/Filter/badges/quality-score.png?b=master)](https://scrutinizer-ci.com/g/SugiPHP/Filter/?branch=master) Simple filter helper functions. Integers -------- Validates integer value in a range within $min and $max if they are not false. Returns default value if the given is not an integer or it's out of the range. ```php <?php Filter::int($value, $min = false, $max = false, $default = false); // Examples: Filter::int(0); // 0 Filter::int(""); // false Filter::int(1.0); // 1 Filter::int(1.1); // false Filter::int(1, 2); // false - outside the minimum range Filter::int(5, 2, 4); // false - outside maximum allowed value Filter::int("1"); // 1 Filter::int("1.0"); // false Filter::int("1a"); // false Filter::int("hi", false, false, 77); // 77 - Returns the default value ?> ``` Usually a developer needs to validate something that was provided by a user, so above example are rarely used. Instead several other filters for integers are more often used: ```php <?php // Validate integer from GET parameter - $_GET["key"]. // Default value is returned if the key is not found, or cannot be converted to an integer, // or the value is outside the min / max range. Filter::intGet($key, $min_range = false, $max_range = false, $default = false); // for example if the URL is http://example.com?page=12 Filter::intGet("page", 1, false, 1); // returns 12 Filter::intGet("foo"); // returns FALSE // Validate integer from POST parameter - $_POST["key"]. // Works like intGet() Filter::intPost($key, $min_range = false, $max_range = false, $default = false); // Validate integer from COOKIES - $_COOKIE[$key] Filter::intCookie($key, $min_range = false, $max_range = false, $default = false); // and from SESSION Filter::intSession($key, $min_range = false, $max_range = false, $default = false); ?> ``` Strings ------- Validates string values. You can set size restrictions - minimum and maximum string lengths. Returns default value if the given string is outside the boundaries or not a string. ```php <?php Filter::str($value, $minLength = 0, $maxLength = false, $default = false); // Examples: Filter::str("a"); // "a" Filter::str(1); // "1" Filter::str(" a "); // "a" Filter::str(""); // "" Filter::str("", 1); // false Filter::str(" a ", 1); // "a" Filter::str("ab", 1, 1); // false Filter::str("abc", 1, 2, "error"); // "error" Filter::str("abc", 1, false, "error"); // "abc" // Slightly different version of the Filter::str() method is Filter::plain() // This will firstly strip all tags and then it will act exactly like Filter::str() method. Filter::plain($value, $minLength = 0, $maxLength = false, $default = false); ?> ``` Similar to integer filters there are some for validating string from $_GET, $_POST and $_COOKIE arrays ```php <?php Filter::strGet($key, $minLength = 0, $maxLength = false, $default = false); Filter::strPost($key, $minLength = 0, $maxLength = false, $default = false); Filter::strCookie($key, $minLength = 0, $maxLength = false, $default = false); Filter::strSession($key, $minLength = 0, $maxLength = false, $default = false); // Validates plain text from $_GET, $_POST, $_COOKIE and $_SESSION parameters Filter::plainGet($key, $minLength = 0, $maxLength = false, $default = false); Filter::plainPost($key, $minLength = 0, $maxLength = false, $default = false); Filter::plainCookie($key, $minLength = 0, $maxLength = false, $default = false); Filter::plainSession($key, $minLength = 0, $maxLength = false, $default = false); ?> ``` URL's ----- Validates URL, accepting only http or https protocols ```php <?php Filter::url($value, $default = false); // Examples: Filter::url("http://igrivi.com"); // true Filter::url("igrivi.com"); // false Filter::url("http://localhost"); // false - The filter is mainly used for user inputs, so when we need URL, we intentionally don't want localhost Filter::url("8.8.8.8"); // false Filter::url("http://somedomain.com:81"); // true Filter::url("http://somedomain.com:6"); // false ?> ``` Emails ------ Validates email addresses. If third parameter is set to true it will check for MX record(s) for mail's domain. If the email is not valid or the MX record is not present the default value will be returned. ```php <?php Filter::email($value, $default = false, $checkMxRecord = false); ?> ``` Arrays ------ ```php <?php // Checks the existence of the key in a given array, returning default value if $key is not present. Filter::key($key, $array, $default = null); // Example: Filter::key("foo", array("one", "foo" => "bar", "foobar" => 2)); // "bar" // Validates $_GET[$key] value. Filter::get($key, $default = null); // Validates $_POST[$key] value. Filter::post($key, $default = null); // Validates $_COOKIE[$key] value. Filter::cookie($key, $default = null); // Validates $_SESSION[$key] value. Filter::session($key, $deafult = null); ?> ```
{ "content_hash": "fbd6999678c3633a9f357f043b9e6916", "timestamp": "", "source": "github", "line_count": 152, "max_line_length": 168, "avg_line_length": 33.46052631578947, "alnum_prop": 0.6600471883602045, "repo_name": "SugiPHP/Filter", "id": "468e84260bff703e2316834c0af30e1022977631", "size": "5086", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "18076" } ], "symlink_target": "" }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.1-257 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2016.04.07 at 07:27:56 AM EDT // package com.cisco.dvbu.ps.deploytool.modules; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * * The set option can be repeated to set different attributes. * * Multiple class paths can be set with a single statement. * * • The Composite resource name is given by "resourcePath". * * • The "resourceType" is equal to “DATA_SOURCE” when attribute is classpath, host, port, database, * user, or password. * * • The "attribute" can be: classpath, host, port, database, user, or password: * • user "login" or "username" or error depending on source type * • password "password" or error depending on source type * • user2 "appUserName" or error if not Oracle EBS * • password2 "appPassword" or error if not Oracle EBS * • host "urlIP" or "dsn" or "server" or "appServer" or "url" or "root" or error * depending on the source type * • port "urlPort" or "port" or error depending on source type * • database "urlDatabaseName" or "enterprise" or "appServer" or error depending on the source type * • path "root" or "url" or error depending on source type * • annotation * * • Set "value" to a valid entry for the selected attribute. * String values can be enclosed with double quotes to allow for spaces. * * For Windows systems, use the semicolon delimiter: * C:\DevZone\ATeam\Jars\my.jar;D:\Current\Ref\classes * For UNIX systems, use colons as the delimiter: * /lib/ext/classes:/lib/src/jars * * * <p>Java class for ArchiveResourceModificationType complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="ArchiveResourceModificationType"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="resourceAttribute" type="{http://www.dvbu.cisco.com/ps/deploytool/modules}ArchiveResourceAttributeModificationType" maxOccurs="unbounded"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ArchiveResourceModificationType", propOrder = { "resourceAttribute" }) public class ArchiveResourceModificationType { @XmlElement(required = true) protected List<ArchiveResourceAttributeModificationType> resourceAttribute; /** * Gets the value of the resourceAttribute property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the resourceAttribute property. * * <p> * For example, to add a new item, do as follows: * <pre> * getResourceAttribute().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link ArchiveResourceAttributeModificationType } * * */ public List<ArchiveResourceAttributeModificationType> getResourceAttribute() { if (resourceAttribute == null) { resourceAttribute = new ArrayList<ArchiveResourceAttributeModificationType>(); } return this.resourceAttribute; } }
{ "content_hash": "146883e2e2dfd23e60c8e675ef2a2e45", "timestamp": "", "source": "github", "line_count": 107, "max_line_length": 168, "avg_line_length": 37.532710280373834, "alnum_prop": 0.6755478087649402, "repo_name": "dvbu-test/PDTool", "id": "6e569e16dbda46a7b86e7e2637722b1b64a8f317", "size": "4016", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "PDToolModules/src/com/cisco/dvbu/ps/deploytool/modules/ArchiveResourceModificationType.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "1004538" }, { "name": "HTML", "bytes": "852584" }, { "name": "Java", "bytes": "7945910" }, { "name": "Perl", "bytes": "9840" }, { "name": "Python", "bytes": "3299" }, { "name": "Shell", "bytes": "117180" }, { "name": "XSLT", "bytes": "242648" } ], "symlink_target": "" }
module ElectricSheep module Metadata class Job < Base include Pipe include Monitor option :id, required: true option :description option :private_key attr_reader :schedule, :starts_with def start_with!(resource) @starts_with = resource end def notifier(metadata) notifiers << metadata end def validate(config) queue.each do |step| unless step.validate(config) errors.add(:base, "Invalid step #{step.to_s}", step.errors) end end super end def schedule!(schedule) @schedule = schedule end def on_schedule(&block) if @schedule && @schedule.expired? @schedule.update! yield self end end def name description.nil? ? "#{id}" : "#{description} (#{id})" end def notifiers @notifiers ||= [] end end end end
{ "content_hash": "e40497c24e6e76e7e37977b59c45da25", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 71, "avg_line_length": 19.058823529411764, "alnum_prop": 0.5390946502057613, "repo_name": "benitoDeLaCasita/electric_sheep", "id": "981edd55eece1eb53cb0847cb79df0dcac9280a4", "size": "972", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/electric_sheep/metadata/job.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Makefile", "bytes": "1679" }, { "name": "Ruby", "bytes": "205424" }, { "name": "Shell", "bytes": "3040" } ], "symlink_target": "" }
ACCEPTED #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "aaecda85e27a2437ba07679fac32bc91", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.307692307692308, "alnum_prop": 0.6940298507462687, "repo_name": "mdoering/backbone", "id": "3fe645011889ac030d94696a11b5dea75f4209ef", "size": "185", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Cyclolobium/Cyclolobium louveira/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package com.vmware.vim25; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for VmUuidConflictEvent complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="VmUuidConflictEvent"> * &lt;complexContent> * &lt;extension base="{urn:vim25}VmEvent"> * &lt;sequence> * &lt;element name="conflictedVm" type="{urn:vim25}VmEventArgument"/> * &lt;element name="uuid" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "VmUuidConflictEvent", propOrder = { "conflictedVm", "uuid" }) public class VmUuidConflictEvent extends VmEvent { @XmlElement(required = true) protected VmEventArgument conflictedVm; @XmlElement(required = true) protected String uuid; /** * Gets the value of the conflictedVm property. * * @return * possible object is * {@link VmEventArgument } * */ public VmEventArgument getConflictedVm() { return conflictedVm; } /** * Sets the value of the conflictedVm property. * * @param value * allowed object is * {@link VmEventArgument } * */ public void setConflictedVm(VmEventArgument value) { this.conflictedVm = value; } /** * Gets the value of the uuid property. * * @return * possible object is * {@link String } * */ public String getUuid() { return uuid; } /** * Sets the value of the uuid property. * * @param value * allowed object is * {@link String } * */ public void setUuid(String value) { this.uuid = value; } }
{ "content_hash": "40b9d9356ae31f1a9bbbdd95b154011a", "timestamp": "", "source": "github", "line_count": 92, "max_line_length": 95, "avg_line_length": 23.77173913043478, "alnum_prop": 0.5743026977594878, "repo_name": "jdgwartney/vsphere-ws", "id": "cacb6fb8946330ae21718bcc109951687bc4c642", "size": "2187", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "java/JAXWS/samples/com/vmware/vim25/VmUuidConflictEvent.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "1349" }, { "name": "C#", "bytes": "775222" }, { "name": "C++", "bytes": "14040" }, { "name": "CSS", "bytes": "48826" }, { "name": "Java", "bytes": "13417097" }, { "name": "JavaScript", "bytes": "24681" }, { "name": "Shell", "bytes": "9982" }, { "name": "Smalltalk", "bytes": "14906" } ], "symlink_target": "" }
layout: page title: Vision Global Award Ceremony date: 2016-05-24 author: Dorothy Chang tags: weekly links, java status: published summary: Praesent ex arcu, ornare et quam. banner: images/banner/leisure-02.jpg booking: startDate: 02/18/2016 endDate: 02/19/2016 ctyhocn: PIEFLHX groupCode: VGAC published: true --- Duis lobortis sapien leo, ac efficitur nisi vehicula eu. Mauris vehicula faucibus volutpat. Nunc pretium non velit ut rhoncus. Sed imperdiet ex non orci porttitor porttitor. Donec lobortis massa eget eros commodo venenatis. Integer ac lacinia tellus. In rhoncus dolor felis. Pellentesque placerat neque id facilisis euismod. Nullam porttitor ac magna tempus finibus. Curabitur eget ante vitae nulla viverra ultricies. Suspendisse eget nisl neque. Vivamus euismod felis justo, sed fringilla ligula volutpat vitae. Mauris id mattis odio. * Nullam pellentesque lorem sit amet ligula semper luctus. Cras varius ut eros non iaculis. Morbi dapibus magna dolor, eget vestibulum felis congue id. Curabitur neque lacus, fermentum sed facilisis ac, eleifend id mi. Donec malesuada aliquam dolor, sit amet interdum sem. Donec at ligula lobortis, finibus nisl non, scelerisque nunc. Proin auctor justo mattis, luctus eros a, eleifend erat. Etiam at turpis egestas, aliquet velit id, ullamcorper diam. Suspendisse potenti.
{ "content_hash": "96a4f8b05817af1bfdb83760f0ae50d1", "timestamp": "", "source": "github", "line_count": 20, "max_line_length": 535, "avg_line_length": 66.75, "alnum_prop": 0.80374531835206, "repo_name": "KlishGroup/prose-pogs", "id": "61cba53add4dc74671152cfa9247ab0a3a870a45", "size": "1339", "binary": false, "copies": "1", "ref": "refs/heads/gh-pages", "path": "pogs/P/PIEFLHX/VGAC/index.md", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
<?php namespace common\models; use Yii; use yii\behaviors\TimestampBehavior; use yii\db\Expression; /** * This is the model class for table "codes". * * @property integer $id * @property string $code * @property integer $used * @property string $created_at * @property string $updated_at */ class Codes extends \yii\db\ActiveRecord { public function behaviors() { return [ "timestamp" => [ 'class' => TimestampBehavior::className(), 'createdAtAttribute' => 'created_at', 'updatedAtAttribute' => 'updated_at', 'value' => new Expression('NOW()'), ], 'codegen' => [ 'class' => 'common\behaviors\Codegen', "out_attribute" => 'code' ] ]; } /** * @inheritdoc */ public static function tableName() { return 'codes'; } /** * @inheritdoc */ public function rules() { return [ [['used', 'user_id'], 'integer'], [['created_at', 'updated_at'], 'safe'], [['code'], 'string', 'max' => 255], ]; } /** * @inheritdoc */ public function attributeLabels() { return [ 'id' => Yii::t('app', 'ID'), 'code' => Yii::t('app', 'Code'), 'used' => Yii::t('app', 'Used'), 'user_id' => Yii::t('app', 'User ID'), 'created_at' => Yii::t('app', 'Created At'), 'updated_at' => Yii::t('app', 'Updated At'), ]; } }
{ "content_hash": "8076fda14abfb5fc3f8d45dd5d203792", "timestamp": "", "source": "github", "line_count": 70, "max_line_length": 58, "avg_line_length": 22.685714285714287, "alnum_prop": 0.4628463476070529, "repo_name": "djrosl/lkpro_back", "id": "d3d12484581fb0fc8fef21df77919416d0a30050", "size": "1588", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "common/models/Codes.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Batchfile", "bytes": "1541" }, { "name": "CSS", "bytes": "3736" }, { "name": "JavaScript", "bytes": "4336" }, { "name": "PHP", "bytes": "433274" } ], "symlink_target": "" }
package org.spongepowered.server.launch.transformer.deobf; public interface SrgRemapper { SrgRemapper NONE = new SrgRemapper(){}; default String mapSrgField(String name) { return name; } default String mapSrgMethod(String name) { return name; } // Method name + descriptor default String mapSrgMethodIdentifier(String identifier) { int pos = identifier.indexOf('('); return mapSrgMethod(identifier.substring(0, pos)).concat(identifier.substring(pos)); } }
{ "content_hash": "1cc8780ac6f45869b16c18bdde28c939", "timestamp": "", "source": "github", "line_count": 22, "max_line_length": 92, "avg_line_length": 23.954545454545453, "alnum_prop": 0.6793168880455408, "repo_name": "SpongePowered/SpongeVanilla", "id": "b6a4ed4231c2964b053aaba4f980e6298bae8b61", "size": "1774", "binary": false, "copies": "1", "ref": "refs/heads/stable-7", "path": "src/main/java/org/spongepowered/server/launch/transformer/deobf/SrgRemapper.java", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "461438" }, { "name": "Shell", "bytes": "1072" } ], "symlink_target": "" }
<cs-parameters-pair [name]="'VM_PAGE.NETWORK_DETAILS.ipAddress'" [value]="ipWithSuffix | translate" ></cs-parameters-pair> <cs-parameters-pair *ngIf="nic.macaddress" [name]="'VM_PAGE.NETWORK_DETAILS.macAddress'" [value]="nic.macaddress" ></cs-parameters-pair> <cs-parameters-pair *ngIf="nic.gateway" [name]="'VM_PAGE.NETWORK_DETAILS.gateway'" [value]="nic.gateway" ></cs-parameters-pair> <cs-parameters-pair *ngIf="nic.type" [name]="'VM_PAGE.NETWORK_DETAILS.type'" [value]="nic.type" ></cs-parameters-pair> <cs-parameters-pair *ngIf="nic.ip6address" [name]="'VM_PAGE.NETWORK_DETAILS.ip6Address'" [value]="nic.ip6address" ></cs-parameters-pair> <cs-parameters-pair *ngIf="nic.ip6cidr" [name]="'VM_PAGE.NETWORK_DETAILS.ip6Cidr'" [value]="nic.ip6cidr" ></cs-parameters-pair> <cs-parameters-pair *ngIf="nic.ip6gateway" [name]="'VM_PAGE.NETWORK_DETAILS.ip6Gateway'" [value]="nic.ip6gateway" ></cs-parameters-pair>
{ "content_hash": "d1800a7934a9a81172e814224915881d", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 47, "avg_line_length": 27.142857142857142, "alnum_prop": 0.6957894736842105, "repo_name": "bwsw/cloudstack-ui", "id": "56356b18a0d9ec1863f807ea444ea601e9ef6c66", "size": "950", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/app/vm/vm-sidebar/network-detail/nics/nic/nic-fields/nic-fields.component.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "68760" }, { "name": "Dockerfile", "bytes": "1348" }, { "name": "HTML", "bytes": "299264" }, { "name": "JavaScript", "bytes": "9660" }, { "name": "Shell", "bytes": "9443" }, { "name": "TypeScript", "bytes": "2030848" } ], "symlink_target": "" }
<?php defined('BASEPATH') OR exit('No direct script access allowed'); /* |-------------------------------------------------------------------------- | Base Site URL |-------------------------------------------------------------------------- | | URL to your CodeIgniter root. Typically this will be your base URL, | WITH a trailing slash: | | http://example.com/ | | WARNING: You MUST set this value! | | If it is not set, then CodeIgniter will try guess the protocol and path | your installation, but due to security concerns the hostname will be set | to $_SERVER['SERVER_ADDR'] if available, or localhost otherwise. | The auto-detection mechanism exists only for convenience during | development and MUST NOT be used in production! | | If you need to allow multiple domains, remember that this file is still | a PHP script and you can easily do that on your own. | */ $config['base_url'] = 'http://www.vakhuis.dvc-icta.nl/'; /* |-------------------------------------------------------------------------- | Index File |-------------------------------------------------------------------------- | | Typically this will be your index.php file, unless you've renamed it to | something else. If you are using mod_rewrite to remove the page set this | variable so that it is blank. | */ $config['index_page'] = ''; /* |-------------------------------------------------------------------------- | URI PROTOCOL |-------------------------------------------------------------------------- | | This item determines which server global should be used to retrieve the | URI string. The default setting of 'REQUEST_URI' works for most servers. | If your links do not seem to work, try one of the other delicious flavors: | | 'REQUEST_URI' Uses $_SERVER['REQUEST_URI'] | 'QUERY_STRING' Uses $_SERVER['QUERY_STRING'] | 'PATH_INFO' Uses $_SERVER['PATH_INFO'] | | WARNING: If you set this to 'PATH_INFO', URIs will always be URL-decoded! */ $config['uri_protocol'] = 'REQUEST_URI'; /* |-------------------------------------------------------------------------- | URL suffix |-------------------------------------------------------------------------- | | This option allows you to add a suffix to all URLs generated by CodeIgniter. | For more information please see the user guide: | | https://codeigniter.com/user_guide/general/urls.html */ $config['url_suffix'] = ''; /* |-------------------------------------------------------------------------- | Default Language |-------------------------------------------------------------------------- | | This determines which set of language files should be used. Make sure | there is an available translation if you intend to use something other | than english. | */ $config['language'] = 'english'; /* |-------------------------------------------------------------------------- | Default Character Set |-------------------------------------------------------------------------- | | This determines which character set is used by default in various methods | that require a character set to be provided. | | See http://php.net/htmlspecialchars for a list of supported charsets. | */ $config['charset'] = 'UTF-8'; /* |-------------------------------------------------------------------------- | Enable/Disable System Hooks |-------------------------------------------------------------------------- | | If you would like to use the 'hooks' feature you must enable it by | setting this variable to TRUE (boolean). See the user guide for details. | */ $config['enable_hooks'] = FALSE; /* |-------------------------------------------------------------------------- | Class Extension Prefix |-------------------------------------------------------------------------- | | This item allows you to set the filename/classname prefix when extending | native libraries. For more information please see the user guide: | | https://codeigniter.com/user_guide/general/core_classes.html | https://codeigniter.com/user_guide/general/creating_libraries.html | */ $config['subclass_prefix'] = 'MY_'; /* |-------------------------------------------------------------------------- | Composer auto-loading |-------------------------------------------------------------------------- | | Enabling this setting will tell CodeIgniter to look for a Composer | package auto-loader script in application/vendor/autoload.php. | | $config['composer_autoload'] = TRUE; | | Or if you have your vendor/ directory located somewhere else, you | can opt to set a specific path as well: | | $config['composer_autoload'] = '/path/to/vendor/autoload.php'; | | For more information about Composer, please visit http://getcomposer.org/ | | Note: This will NOT disable or override the CodeIgniter-specific | autoloading (application/config/autoload.php) */ $config['composer_autoload'] = FALSE; /* |-------------------------------------------------------------------------- | Allowed URL Characters |-------------------------------------------------------------------------- | | This lets you specify which characters are permitted within your URLs. | When someone tries to submit a URL with disallowed characters they will | get a warning message. | | As a security measure you are STRONGLY encouraged to restrict URLs to | as few characters as possible. By default only these are allowed: a-z 0-9~%.:_- | | Leave blank to allow all characters -- but only if you are insane. | | The configured value is actually a regular expression character group | and it will be executed as: ! preg_match('/^[<permitted_uri_chars>]+$/i | | DO NOT CHANGE THIS UNLESS YOU FULLY UNDERSTAND THE REPERCUSSIONS!! | */ $config['permitted_uri_chars'] = 'a-z 0-9~%.:_\-'; /* |-------------------------------------------------------------------------- | Enable Query Strings |-------------------------------------------------------------------------- | | By default CodeIgniter uses search-engine friendly segment based URLs: | example.com/who/what/where/ | | You can optionally enable standard query string based URLs: | example.com?who=me&what=something&where=here | | Options are: TRUE or FALSE (boolean) | | The other items let you set the query string 'words' that will | invoke your controllers and its functions: | example.com/index.php?c=controller&m=function | | Please note that some of the helpers won't work as expected when | this feature is enabled, since CodeIgniter is designed primarily to | use segment based URLs. | */ $config['enable_query_strings'] = FALSE; $config['controller_trigger'] = 'c'; $config['function_trigger'] = 'm'; $config['directory_trigger'] = 'd'; /* |-------------------------------------------------------------------------- | Allow $_GET array |-------------------------------------------------------------------------- | | By default CodeIgniter enables access to the $_GET array. If for some | reason you would like to disable it, set 'allow_get_array' to FALSE. | | WARNING: This feature is DEPRECATED and currently available only | for backwards compatibility purposes! | */ $config['allow_get_array'] = TRUE; /* |-------------------------------------------------------------------------- | Error Logging Threshold |-------------------------------------------------------------------------- | | You can enable error logging by setting a threshold over zero. The | threshold determines what gets logged. Threshold options are: | | 0 = Disables logging, Error logging TURNED OFF | 1 = Error Messages (including PHP errors) | 2 = Debug Messages | 3 = Informational Messages | 4 = All Messages | | You can also pass an array with threshold levels to show individual error types | | array(2) = Debug Messages, without Error Messages | | For a live site you'll usually only enable Errors (1) to be logged otherwise | your log files will fill up very fast. | */ $config['log_threshold'] = 0; /* |-------------------------------------------------------------------------- | Error Logging Directory Path |-------------------------------------------------------------------------- | | Leave this BLANK unless you would like to set something other than the default | application/logs/ directory. Use a full server path with trailing slash. | */ $config['log_path'] = ''; /* |-------------------------------------------------------------------------- | Log File Extension |-------------------------------------------------------------------------- | | The default filename extension for log files. The default 'php' allows for | protecting the log files via basic scripting, when they are to be stored | under a publicly accessible directory. | | Note: Leaving it blank will default to 'php'. | */ $config['log_file_extension'] = ''; /* |-------------------------------------------------------------------------- | Log File Permissions |-------------------------------------------------------------------------- | | The file system permissions to be applied on newly created log files. | | IMPORTANT: This MUST be an integer (no quotes) and you MUST use octal | integer notation (i.e. 0700, 0644, etc.) */ $config['log_file_permissions'] = 0644; /* |-------------------------------------------------------------------------- | Date Format for Logs |-------------------------------------------------------------------------- | | Each item that is logged has an associated date. You can use PHP date | codes to set your own date formatting | */ $config['log_date_format'] = 'Y-m-d H:i:s'; /* |-------------------------------------------------------------------------- | Error Views Directory Path |-------------------------------------------------------------------------- | | Leave this BLANK unless you would like to set something other than the default | application/views/errors/ directory. Use a full server path with trailing slash. | */ $config['error_views_path'] = ''; /* |-------------------------------------------------------------------------- | Cache Directory Path |-------------------------------------------------------------------------- | | Leave this BLANK unless you would like to set something other than the default | application/cache/ directory. Use a full server path with trailing slash. | */ $config['cache_path'] = ''; /* |-------------------------------------------------------------------------- | Cache Include Query String |-------------------------------------------------------------------------- | | Whether to take the URL query string into consideration when generating | output cache files. Valid options are: | | FALSE = Disabled | TRUE = Enabled, take all query parameters into account. | Please be aware that this may result in numerous cache | files generated for the same page over and over again. | array('q') = Enabled, but only take into account the specified list | of query parameters. | */ $config['cache_query_string'] = FALSE; /* |-------------------------------------------------------------------------- | Encryption Key |-------------------------------------------------------------------------- | | If you use the Encryption class, you must set an encryption key. | See the user guide for more info. | | https://codeigniter.com/user_guide/libraries/encryption.html | */ $config['encryption_key'] = ''; /* |-------------------------------------------------------------------------- | Session Variables |-------------------------------------------------------------------------- | | 'sess_driver' | | The storage driver to use: files, database, redis, memcached | | 'sess_cookie_name' | | The session cookie name, must contain only [0-9a-z_-] characters | | 'sess_expiration' | | The number of SECONDS you want the session to last. | Setting to 0 (zero) means expire when the browser is closed. | | 'sess_save_path' | | The location to save sessions to, driver dependent. | | For the 'files' driver, it's a path to a writable directory. | WARNING: Only absolute paths are supported! | | For the 'database' driver, it's a table name. | Please read up the manual for the format with other session drivers. | | IMPORTANT: You are REQUIRED to set a valid save path! | | 'sess_match_ip' | | Whether to match the user's IP address when reading the session data. | | WARNING: If you're using the database driver, don't forget to update | your session table's PRIMARY KEY when changing this setting. | | 'sess_time_to_update' | | How many seconds between CI regenerating the session ID. | | 'sess_regenerate_destroy' | | Whether to destroy session data associated with the old session ID | when auto-regenerating the session ID. When set to FALSE, the data | will be later deleted by the garbage collector. | | Other session cookie settings are shared with the rest of the application, | except for 'cookie_prefix' and 'cookie_httponly', which are ignored here. | */ $config['sess_driver'] = 'database'; $config['sess_cookie_name'] = 'ci_session'; $config['sess_expiration'] = 7200; $config['sess_save_path'] = 'ci_sessions'; $config['sess_match_ip'] = FALSE; $config['sess_time_to_update'] = 300; $config['sess_regenerate_destroy'] = FALSE; /* |-------------------------------------------------------------------------- | Cookie Related Variables |-------------------------------------------------------------------------- | | 'cookie_prefix' = Set a cookie name prefix if you need to avoid collisions | 'cookie_domain' = Set to .your-domain.com for site-wide cookies | 'cookie_path' = Typically will be a forward slash | 'cookie_secure' = Cookie will only be set if a secure HTTPS connection exists. | 'cookie_httponly' = Cookie will only be accessible via HTTP(S) (no javascript) | | Note: These settings (with the exception of 'cookie_prefix' and | 'cookie_httponly') will also affect sessions. | */ $config['cookie_prefix'] = ''; $config['cookie_domain'] = ''; $config['cookie_path'] = '/'; $config['cookie_secure'] = FALSE; $config['cookie_httponly'] = FALSE; /* |-------------------------------------------------------------------------- | Standardize newlines |-------------------------------------------------------------------------- | | Determines whether to standardize newline characters in input data, | meaning to replace \r\n, \r, \n occurrences with the PHP_EOL value. | | WARNING: This feature is DEPRECATED and currently available only | for backwards compatibility purposes! | */ $config['standardize_newlines'] = FALSE; /* |-------------------------------------------------------------------------- | Global XSS Filtering |-------------------------------------------------------------------------- | | Determines whether the XSS filter is always active when GET, POST or | COOKIE data is encountered | | WARNING: This feature is DEPRECATED and currently available only | for backwards compatibility purposes! | */ $config['global_xss_filtering'] = FALSE; /* |-------------------------------------------------------------------------- | Cross Site Request Forgery |-------------------------------------------------------------------------- | Enables a CSRF cookie token to be set. When set to TRUE, token will be | checked on a submitted form. If you are accepting user data, it is strongly | recommended CSRF protection be enabled. | | 'csrf_token_name' = The token name | 'csrf_cookie_name' = The cookie name | 'csrf_expire' = The number in seconds the token should expire. | 'csrf_regenerate' = Regenerate token on every submission | 'csrf_exclude_uris' = Array of URIs which ignore CSRF checks */ $config['csrf_protection'] = FALSE; $config['csrf_token_name'] = 'csrf_test_name'; $config['csrf_cookie_name'] = 'csrf_cookie_name'; $config['csrf_expire'] = 7200; $config['csrf_regenerate'] = TRUE; $config['csrf_exclude_uris'] = array(); /* |-------------------------------------------------------------------------- | Output Compression |-------------------------------------------------------------------------- | | Enables Gzip output compression for faster page loads. When enabled, | the output class will test whether your server supports Gzip. | Even if it does, however, not all browsers support compression | so enable only if you are reasonably sure your visitors can handle it. | | Only used if zlib.output_compression is turned off in your php.ini. | Please do not use it together with httpd-level output compression. | | VERY IMPORTANT: If you are getting a blank page when compression is enabled it | means you are prematurely outputting something to your browser. It could | even be a line of whitespace at the end of one of your scripts. For | compression to work, nothing can be sent before the output buffer is called | by the output class. Do not 'echo' any values with compression enabled. | */ $config['compress_output'] = FALSE; /* |-------------------------------------------------------------------------- | Master Time Reference |-------------------------------------------------------------------------- | | Options are 'local' or any PHP supported timezone. This preference tells | the system whether to use your server's local time as the master 'now' | reference, or convert it to the configured one timezone. See the 'date | helper' page of the user guide for information regarding date handling. | */ $config['time_reference'] = 'local'; /* |-------------------------------------------------------------------------- | Rewrite PHP Short Tags |-------------------------------------------------------------------------- | | If your PHP installation does not have short tag support enabled CI | can rewrite the tags on-the-fly, enabling you to utilize that syntax | in your view files. Options are TRUE or FALSE (boolean) | | Note: You need to have eval() enabled for this to work. | */ $config['rewrite_short_tags'] = FALSE; /* |-------------------------------------------------------------------------- | Reverse Proxy IPs |-------------------------------------------------------------------------- | | If your server is behind a reverse proxy, you must whitelist the proxy | IP addresses from which CodeIgniter should trust headers such as | HTTP_X_FORWARDED_FOR and HTTP_CLIENT_IP in order to properly identify | the visitor's IP address. | | You can use both an array or a comma-separated list of proxy addresses, | as well as specifying whole subnets. Here are a few examples: | | Comma-separated: '10.0.1.200,192.168.5.0/24' | Array: array('10.0.1.200', '192.168.5.0/24') */ $config['proxy_ips'] = '';
{ "content_hash": "748ff7c4bf7e8a6da6745800f09b5170", "timestamp": "", "source": "github", "line_count": 523, "max_line_length": 83, "avg_line_length": 35.31548757170172, "alnum_prop": 0.542609637249594, "repo_name": "RedNova007/CodeIgniter-Vakantiehuis", "id": "89f1c605e59aa6ad95b9e46bd4fca6d6596f5525", "size": "18470", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "config.php", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "131970" }, { "name": "HTML", "bytes": "8515642" }, { "name": "JavaScript", "bytes": "56182" }, { "name": "PHP", "bytes": "1849288" } ], "symlink_target": "" }
import { handleActions } from 'redux-actions' import { ITEM_DELETE, ITEM_COMMIT, ITEM_BEGIN_LOADING, ITEM_END_LOADING, ITEM_BEGIN_SAVING, ITEM_END_SAVING, ITEM_META_SET, ITEM_META_TOGGLE, ITEM_META_DELETE } from '../../constants/item' import { ITEM_ATTRIBUTES_SET_OBJECT, ITEM_ATTRIBUTE_SET, ITEM_ATTRIBUTE_RESET, ITEM_ATTRIBUTE_TOGGLE, ITEM_ATTRIBUTE_DELETE } from '../../constants/attributes' import { selectData, selectKey, selectValue } from '../../selectors/action' import get from 'lodash.get' import changedAttributesReducer from './changedAttributesReducer' import deletedAttributesReducer from './deletedAttributesReducer' const itemMetaReducer = handleActions({ [ITEM_DELETE]: (state, action) => { return { ...state, isDeleted: true } }, [ITEM_COMMIT]: (state, action) => { const newState = { ...state, isSaved: false } delete newState.changedAttributes delete newState.deletedAttributes return newState }, [ITEM_ATTRIBUTES_SET_OBJECT]: (state, action) => { const data = selectData(action) const { changedAttributes } = state return { ...state, changedAttributes: { ...changedAttributes, ...data }, isSaved: false } }, [ITEM_ATTRIBUTE_SET]: (state, action) => { const { changedAttributes } = state return { ...state, changedAttributes: changedAttributesReducer(changedAttributes, action), isSaved: false } }, [ITEM_ATTRIBUTE_RESET]: (state, action) => { const changedAttributes = get(state, 'changedAttributes') const deletedAttributes = get(state, 'deletedAttributes') const newChangedAttributes = changedAttributesReducer(changedAttributes, action) const newDeletedAttributes = deletedAttributesReducer(deletedAttributes, action) const hasChangedAttributes = !!Object.keys(newChangedAttributes).length const hasDeletedAttributes = newDeletedAttributes.length const newState = { ...state } if (hasChangedAttributes) { newState.changedAttributes = newChangedAttributes } else { delete newState.changedAttributes } if (hasDeletedAttributes) { newState.deletedAttributes = newDeletedAttributes } else { delete newState.deletedAttributes } newState.isSaved = !hasChangedAttributes && !hasDeletedAttributes return newState }, [ITEM_ATTRIBUTE_TOGGLE]: (state, action) => { const changedAttributes = get(state, 'changedAttributes') const deletedAttributes = get(state, 'deletedAttributes') const newState = { ...state } const newChangedAttributes = changedAttributesReducer(changedAttributes, action) const hasChangedAttributes = !!Object.keys(newChangedAttributes).length if (hasChangedAttributes) { newState.changedAttributes = newChangedAttributes newState.isSaved = false } else if (!hasChangedAttributes && changedAttributes) { delete newState.changedAttributes newState.isSaved = !deletedAttributes } return newState }, [ITEM_ATTRIBUTE_DELETE]: (state, action) => { const changedAttributes = get(state, 'changedAttributes') const deletedAttributes = get(state, 'deletedAttributes') const newState = { ...state, deletedAttributes: deletedAttributesReducer(deletedAttributes, action), isSaved: false } if (changedAttributes) { const newChangedAttributes = changedAttributesReducer(changedAttributes, action) const hasChangedAttributes = !!Object.keys(newChangedAttributes).length if (hasChangedAttributes) { newState.changedAttributes = newChangedAttributes } else if (!hasChangedAttributes && changedAttributes) { delete newState.changedAttributes } } return newState }, [ITEM_META_SET]: (state, action) => { const key = selectKey(action) const value = selectValue(action) if (!key) { return state } return { ...state, [key]: value } }, [ITEM_META_TOGGLE]: (state, action) => { const key = selectKey(action) if (!key) { return state } return { ...state, [key]: !state[key] } }, [ITEM_META_DELETE]: (state, action) => { const key = selectKey(action) if (!key) { return state } const newState = { ...state } delete newState[key] return newState }, [ITEM_BEGIN_LOADING]: (state, action) => { return { ...state, isSaving: true } }, [ITEM_END_LOADING]: (state, action) => { return { ...state, isSaving: false } }, [ITEM_BEGIN_SAVING]: (state, action) => { return { ...state, isSaving: true } }, [ITEM_END_SAVING]: (state, action) => { const newState = { ...state, isSaving: false, isSaved: true } if (newState.hasOwnProperty('isNew')) { delete newState.isNew } return newState } }, {}) export default itemMetaReducer
{ "content_hash": "447a04496352dd6b7889a0518a8146b3", "timestamp": "", "source": "github", "line_count": 152, "max_line_length": 86, "avg_line_length": 31.769736842105264, "alnum_prop": 0.6775729964796025, "repo_name": "heygrady/redux-data-collections", "id": "ec265a0c818a0d7fb872dc3b335558e9438532e2", "size": "4829", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/reducers/item/metaReducer.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "160" }, { "name": "JavaScript", "bytes": "118566" } ], "symlink_target": "" }
using Smart.Core.Configuration; using Smart.Core.Dependency; using Smart.Core.Infrastructure; using System.Runtime.CompilerServices; namespace Smart.Core { /// <summary> /// 上下文访问对象(IOC容器,日志工具等) /// </summary> public class SmartContext { #region 属性 /// <summary> /// 默认的缓存键 /// </summary> public const string DEFAULT_CACHE_KEY = "smart.httpCache"; /// <summary> /// 获取配置信息 /// </summary> public static SmartConfig Config { get { return Current.Resolve<SmartConfig>(); } } /// <summary> /// 获取当前容器实例 /// </summary> public static IContainerManager Current { get { if (Singleton<IContainerManager>.Instance == null) { Initialize(null, false); } return Singleton<IContainerManager>.Instance; } } #endregion /// <summary> /// 初始化一个静态实例工厂。 /// </summary> /// <param name="config">配置信息</param> /// <param name="forceRecreate">创建一个新工厂实例,即使工厂已被初始化。</param> /// <returns></returns> [MethodImpl(MethodImplOptions.Synchronized)] public static IContainerManager Initialize(SmartConfig config = null, bool forceRecreate = false) { if (Singleton<IContainerManager>.Instance == null || forceRecreate) { var container = new ContainerManager(); container.Initialize(config); Singleton<IContainerManager>.Instance = container; } return Singleton<IContainerManager>.Instance; } /// <summary> /// 替换容器,实现自定义容器 /// </summary> /// <param name="containerManager">容器</param> /// <remarks></remarks> public static void Replace(IContainerManager containerManager) { Singleton<IContainerManager>.Instance = containerManager; } } }
{ "content_hash": "b1075b33c09dda87e1788971781d8e14", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 105, "avg_line_length": 28.63888888888889, "alnum_prop": 0.537827352085354, "repo_name": "SmallAnts/Smart", "id": "3ef27cee0a06264107a00cae6fb2464e669aab8f", "size": "2242", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Src/Framework/Smart.Core.Shared/SmartContext.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "920736" }, { "name": "HTML", "bytes": "1787" } ], "symlink_target": "" }
<?php namespace Matisse\Properties\Base; use Matisse\Exceptions\ComponentException; use Matisse\Properties\TypeSystem\type; /** * Properties of a Metadata component. */ class MetadataProperties extends AbstractProperties { /** * Dynamic set of attributes, as specified on the source markup. * * @var array */ protected $props = []; function __get ($name) { return $this->get ($name); } function __set ($name, $value) { $this->set ($name, $value); } function __isset ($name) { return $this->defines ($name) && !is_null ($this->get ($name)); } function __unset ($name) { unset ($this->props[$name]); } function defines ($name, $asSubtag = false) { return true; } function get ($name, $default = null) { if (property_exists ($this, $name)) return $this->$name; if (array_key_exists ($name, $this->props)) return $this->props [$name]; return $default; } function getAll () { return array_merge (object_publicProps ($this), $this->props); } function getDefaultValue ($name) { if (property_exists ($this, $name)) { $c = new \ReflectionClass($this); $props = $c->getDefaultProperties (); return isset($props[$name]) ? $props[$name] : null; } return null; } /** * Gets a map of the dynamic (non-predefined) properties of the component. * <p>Properties declared on the class are excluded. * * @return array A map of property names to property values. */ function getDynamic () { return $this->props; } function getEnumOf ($name) { return []; } function getPropertyNames () { return array_merge (object_propNames ($this), array_keys ($this->props)); } function getRelatedTypeOf ($name) { return type::content; } function getTypeOf ($name) { return null; } function isEnum ($name) { return false; } function isRequired ($propName) { return false; } function isScalar ($name) { return isset($this->$name) ? is_scalar ($this->$name) : true; } function set ($name, $value) { // This is relevant only to subclasses. if (!$this->defines ($name)) throw new ComponentException(null, "Undefined parameter <kbd>$name</kbd>."); $value = $this->typecastPropertyValue ($name, $value); if (property_exists ($this, $name)) $this->$name = $value; else $this->props[$name] = $value; if ($this->isModified ($name)) $this->onPropertyChange ($name); } }
{ "content_hash": "8867ab770bae852eac5ca4b0cabae8a0", "timestamp": "", "source": "github", "line_count": 131, "max_line_length": 82, "avg_line_length": 19.396946564885496, "alnum_prop": 0.6025186934277843, "repo_name": "electro-modules/matisse", "id": "156968d1f052b88daf4ec3dac5936373878196bb", "size": "2541", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Properties/Base/MetadataProperties.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "263602" } ], "symlink_target": "" }
"""List package items.""" # :license: MIT, see LICENSE for more details. import click from SoftLayer.CLI import environment from SoftLayer.CLI import formatting from SoftLayer.managers import ordering from SoftLayer.utils import lookup COLUMNS = ['category', 'keyName', 'description'] @click.command() @click.argument('package_keyname') @click.option('--keyword', help="A word (or string) used to filter item names.") @click.option('--category', help="Category code to filter items by") @environment.pass_env def cli(env, package_keyname, keyword, category): """List package items used for ordering. The items listed can be used with `slcli order place` to specify the items that are being ordered in the package. Package keynames can be retrieved using `slcli order package-list` \b Note: Items with a numbered category, like disk0 or gpu0, can be included multiple times in an order to match how many of the item you want to order. \b Example: # List all items in the VSI package slcli order item-list CLOUD_SERVER The --keyword option is used to filter items by name. The --category option is used to filter items by category. Both --keyword and --category can be used together. \b Example: # List Ubuntu OSes from the os category of the Bare Metal package slcli order item-list BARE_METAL_SERVER --category os --keyword ubuntu """ table = formatting.Table(COLUMNS) manager = ordering.OrderingManager(env.client) _filter = {'items': {}} if keyword: _filter['items']['description'] = {'operation': '*= %s' % keyword} if category: _filter['items']['categories'] = {'categoryCode': {'operation': '_= %s' % category}} items = manager.list_items(package_keyname, filter=_filter) sorted_items = sort_items(items) categories = sorted_items.keys() for catname in sorted(categories): for item in sorted_items[catname]: table.add_row([catname, item['keyName'], item['description']]) env.fout(table) def sort_items(items): """sorts the items into a dictionary of categories, with a list of items""" sorted_items = {} for item in items: category = lookup(item, 'itemCategory', 'categoryCode') if sorted_items.get(category) is None: sorted_items[category] = [] sorted_items[category].append(item) return sorted_items
{ "content_hash": "c8382306977fb8ab17969c5182868588", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 92, "avg_line_length": 31.935064935064936, "alnum_prop": 0.6726311508743391, "repo_name": "nanjj/softlayer-python", "id": "74f8fd4e74a828f084a48bdec383e06cb76a7d53", "size": "2459", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "SoftLayer/CLI/order/item_list.py", "mode": "33188", "license": "mit", "language": [ { "name": "DIGITAL Command Language", "bytes": "854" }, { "name": "Python", "bytes": "1584710" } ], "symlink_target": "" }
'use strict'; angular.module('app').controller('TestController', TestController); TestController.$inject = []; function TestController() { var vm = this; vm.getText = getText; function getText() { return 'some text'; } }
{ "content_hash": "8f5525d56c1ff13d6283da97a917e3be", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 67, "avg_line_length": 16.6, "alnum_prop": 0.6385542168674698, "repo_name": "TheOneYouDontLike/AngularSandbox", "id": "baaa8417e8d74886a84f9d9f8e19e4957d5fbb04", "size": "249", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "testController.js", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "2393" } ], "symlink_target": "" }
package dcraft.io; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; public class ByteBufferInputStream extends InputStream { protected ByteBuffer buf = null; /** * be aware this resets the position to 0 * * @param buf source for the stream */ public ByteBufferInputStream(ByteBuffer buf) { buf.position(0); this.buf = buf; } public synchronized int read() throws IOException { if (!buf.hasRemaining()) return -1; return (int)(buf.get() & 0xFF); } public synchronized int read(byte[] bytes, int off, int len) throws IOException { if (!buf.hasRemaining()) return -1; len = Math.min(len, buf.remaining()); buf.get(bytes, off, len); return len; } }
{ "content_hash": "7f371db97c7222623f5d74ea4363dd34", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 82, "avg_line_length": 21.36111111111111, "alnum_prop": 0.6527958387516255, "repo_name": "Gadreel/dcraft", "id": "f6f7b1120eccfd5f719177758f6e0c8d77a9132e", "size": "1184", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "dcraft.core/src/main/java/dcraft/io/ByteBufferInputStream.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "658969" }, { "name": "Groovy", "bytes": "2020" }, { "name": "HTML", "bytes": "409561" }, { "name": "Java", "bytes": "4571019" }, { "name": "JavaScript", "bytes": "2084581" } ], "symlink_target": "" }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * A path-matching rule for a URL. If matched, will use the specified BackendService to handle the * traffic arriving at this URL. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class PathRule extends com.google.api.client.json.GenericJson { /** * The list of path patterns to match. Each must start with / and the only place a * is allowed is * at the end following a /. The string fed to the path matcher does not include any text after * the first ? or #, and those chars are not allowed here. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> paths; /** * In response to a matching path, the load balancer performs advanced routing actions, such as * URL rewrites and header transformations, before forwarding the request to the selected backend. * If routeAction specifies any weightedBackendServices, service must not be set. Conversely if * service is set, routeAction cannot contain any weightedBackendServices. Only one of routeAction * or urlRedirect must be set. URL maps for external HTTP(S) load balancers support only the * urlRewrite action within a path rule's routeAction. * The value may be {@code null}. */ @com.google.api.client.util.Key private HttpRouteAction routeAction; /** * The full or partial URL of the backend service resource to which traffic is directed if this * rule is matched. If routeAction is also specified, advanced routing actions, such as URL * rewrites, take effect before sending the request to the backend. However, if service is * specified, routeAction cannot contain any weightedBackendServices. Conversely, if routeAction * specifies any weightedBackendServices, service must not be specified. Only one of urlRedirect, * service or routeAction.weightedBackendService must be set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String service; /** * When a path pattern is matched, the request is redirected to a URL specified by urlRedirect. If * urlRedirect is specified, service or routeAction must not be set. Not supported when the URL * map is bound to a target gRPC proxy. * The value may be {@code null}. */ @com.google.api.client.util.Key private HttpRedirectAction urlRedirect; /** * The list of path patterns to match. Each must start with / and the only place a * is allowed is * at the end following a /. The string fed to the path matcher does not include any text after * the first ? or #, and those chars are not allowed here. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getPaths() { return paths; } /** * The list of path patterns to match. Each must start with / and the only place a * is allowed is * at the end following a /. The string fed to the path matcher does not include any text after * the first ? or #, and those chars are not allowed here. * @param paths paths or {@code null} for none */ public PathRule setPaths(java.util.List<java.lang.String> paths) { this.paths = paths; return this; } /** * In response to a matching path, the load balancer performs advanced routing actions, such as * URL rewrites and header transformations, before forwarding the request to the selected backend. * If routeAction specifies any weightedBackendServices, service must not be set. Conversely if * service is set, routeAction cannot contain any weightedBackendServices. Only one of routeAction * or urlRedirect must be set. URL maps for external HTTP(S) load balancers support only the * urlRewrite action within a path rule's routeAction. * @return value or {@code null} for none */ public HttpRouteAction getRouteAction() { return routeAction; } /** * In response to a matching path, the load balancer performs advanced routing actions, such as * URL rewrites and header transformations, before forwarding the request to the selected backend. * If routeAction specifies any weightedBackendServices, service must not be set. Conversely if * service is set, routeAction cannot contain any weightedBackendServices. Only one of routeAction * or urlRedirect must be set. URL maps for external HTTP(S) load balancers support only the * urlRewrite action within a path rule's routeAction. * @param routeAction routeAction or {@code null} for none */ public PathRule setRouteAction(HttpRouteAction routeAction) { this.routeAction = routeAction; return this; } /** * The full or partial URL of the backend service resource to which traffic is directed if this * rule is matched. If routeAction is also specified, advanced routing actions, such as URL * rewrites, take effect before sending the request to the backend. However, if service is * specified, routeAction cannot contain any weightedBackendServices. Conversely, if routeAction * specifies any weightedBackendServices, service must not be specified. Only one of urlRedirect, * service or routeAction.weightedBackendService must be set. * @return value or {@code null} for none */ public java.lang.String getService() { return service; } /** * The full or partial URL of the backend service resource to which traffic is directed if this * rule is matched. If routeAction is also specified, advanced routing actions, such as URL * rewrites, take effect before sending the request to the backend. However, if service is * specified, routeAction cannot contain any weightedBackendServices. Conversely, if routeAction * specifies any weightedBackendServices, service must not be specified. Only one of urlRedirect, * service or routeAction.weightedBackendService must be set. * @param service service or {@code null} for none */ public PathRule setService(java.lang.String service) { this.service = service; return this; } /** * When a path pattern is matched, the request is redirected to a URL specified by urlRedirect. If * urlRedirect is specified, service or routeAction must not be set. Not supported when the URL * map is bound to a target gRPC proxy. * @return value or {@code null} for none */ public HttpRedirectAction getUrlRedirect() { return urlRedirect; } /** * When a path pattern is matched, the request is redirected to a URL specified by urlRedirect. If * urlRedirect is specified, service or routeAction must not be set. Not supported when the URL * map is bound to a target gRPC proxy. * @param urlRedirect urlRedirect or {@code null} for none */ public PathRule setUrlRedirect(HttpRedirectAction urlRedirect) { this.urlRedirect = urlRedirect; return this; } @Override public PathRule set(String fieldName, Object value) { return (PathRule) super.set(fieldName, value); } @Override public PathRule clone() { return (PathRule) super.clone(); } }
{ "content_hash": "d58e56ee73bbee0ae290a030d71ba437", "timestamp": "", "source": "github", "line_count": 181, "max_line_length": 182, "avg_line_length": 45.07734806629834, "alnum_prop": 0.7351391101850717, "repo_name": "googleapis/google-api-java-client-services", "id": "d44df3890ee540652598252fdf4f10fa1e461532", "size": "8159", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "clients/google-api-services-compute/beta/1.31.0/com/google/api/services/compute/model/PathRule.java", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<!DOCTYPE html> <html> <head> <meta name="viewport" content="width=device-width" /> <meta charset="utf-8"> <base href="https://wordbit.com"> <title> Welcome to Wordpspace </title> <link rel="canonical" href="https://wordbit.com/posts/welcome-to-wordpspace/"> <link rel="stylesheet" href="/css/poole.css"> <link rel="stylesheet" href="/css/lanyon.css"> <link rel="stylesheet" href="/css/custom.css"> </head> <body class="theme-base-08"> <input type="checkbox" class="sidebar-checkbox" id="sidebar-checkbox" xmlns="http://www.w3.org/1999/html" xmlns="http://www.w3.org/1999/html"> <div class="sidebar" id="sidebar"> <nav class="sidebar-nav"> <a class="sidebar-nav-item" href="/">Home</a> <a class="sidebar-nav-item" href="/posts">All Blog Posts</a> <a class="sidebar-nav-item" href="/pages/status">Status Updates</a> <a class="sidebar-nav-item" href="/pages/about">About</a> </nav> <div class="sidebar-item"> <p> &copy; 2021. All rights reserved. </p> </div> </div> <div class="wrap"> <div class="masthead"> <div class="container"> <h3 class="masthead-title"> <a href="/" title="Home">wordbit</a> </h3> </div> </div> <div class="container content"> <h1 class="post-title">Welcome to Wordpspace</h1> <section id="main"> <h5 id="date"> Fri Oct 27, 2006 </h5> <p>Welcome to wordspace. A place for my words to breathe. I’ve spent a fair bit of time building this website using PHP, MYSQL and the Wordpress engine. The template was designed by a webmaster called Nik. I got the ideas from Derek Miller - writer, editor and web guy. This is an organic project, and it’s just for fun really, but I plan on gradually making improvements to it as time permits. The neat features available at the moment are customizable wallpapers - you can choose what wallpaper you want to see by clicking on the squares above. Also, you can subscribe to my RRS feed, if you don’t know what that is, <a href="http://www.mezzoblue.com/subscribe/about/">click here</a>.</p> <p>You can search for blog entries by category in the Wordpress engine too, which is way cooler than blogger. I plan on uploading some of the writing I’m doing in the Print Futures Professional Writing program, and maybe getting some of my fellow wordsmiths up there too. With their permission of course.</p> </section> </div> </div> <label for="sidebar-checkbox" class="sidebar-toggle"></label> <div class="container"> <hr /> <span class="left"> &nbsp; <em>&laquo; Previous:</em> <a class="next" href="https://wordbit.com/posts/halloween/">Halloween</a> </span> <span class="right"> <em>Next: </em><a class="next" href="https://wordbit.com/posts/wordspace/"> &nbsp; wordspace</a> &raquo; </span> <br> </div> <br /> <div class="container content"> <footer> <div> </div> </footer> </div> </body> </html>
{ "content_hash": "5225103a0770367fc0094c6dacbd7c21", "timestamp": "", "source": "github", "line_count": 114, "max_line_length": 700, "avg_line_length": 26.42105263157895, "alnum_prop": 0.649070385126162, "repo_name": "wordbit/wordbit", "id": "5d99956ee568e415f901b5947edbfae2c7068f6b", "size": "3025", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "public/posts/welcome-to-wordpspace/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "45272" }, { "name": "HTML", "bytes": "2729347" }, { "name": "JavaScript", "bytes": "19908" }, { "name": "Shell", "bytes": "410" }, { "name": "Vim Snippet", "bytes": "55" } ], "symlink_target": "" }
<!DOCTYPE html> <html> <head> <title>Web Builder</title> <link rel="stylesheet" href="css/style.css" /> <link rel="stylesheet" href="css/dragula.min.css" /> <script src="js/jquery.js"></script> <script src="js/dragula.min.js"></script> </head> <body> </body> <script src="js/fileHolder.js"></script> <script src="js/main.js"></script> </html>
{ "content_hash": "d5a53a369a902efa28f2fcf1af6f365f", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 60, "avg_line_length": 27, "alnum_prop": 0.5679012345679012, "repo_name": "celie56/webBuilder", "id": "60e8d282026855842c74d445b0d36c3c0869259e", "size": "405", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "resources/app/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1978" }, { "name": "HTML", "bytes": "405" }, { "name": "JavaScript", "bytes": "17121" }, { "name": "TypeScript", "bytes": "1674" } ], "symlink_target": "" }
package httpclient import ( "bytes" "context" "crypto/rand" "crypto/tls" "encoding/base64" "encoding/json" "fmt" "io" "io/ioutil" "net/http" "net/http/httputil" "net/url" "os" "strings" "time" "github.com/rightscale/rsc/log" "github.com/rightscale/rsc/recording" ) const ( // NoDump is the default value for DumpFormat. NoDump Format = 1 << iota // Debug formats the dumps in human readable format, the use of this flag is exclusive with // JSON. Debug // JSON formats the dumps in JSON, the use of this flag is exclusive with Debug. JSON // Verbose enables the dumps for all requests and auth headers. Verbose // Record causes the dumps to be written to the recorder file descriptor (used by tests). Record ) const ( noRedirectError = "no redirect" requestIdHeader = "X-Request-Id" ) var ( // defaults defaultHiddenHeaders = map[string]bool{"Authorization": true, "Cookie": true} defaultResponseHeaderTimeout = 300 * time.Second // DumpFormat dictates how HTTP requests and responses are logged: NoDump prevents logging // altogether, Debug generates logs in human readable format and JSON in JSON format. // Verbose causes all headers to be logged - including sensitive ones. DumpFormat Format // Insecure dictates whether HTTP (true) or HTTPS (false) should be used to connect to the // API endpoints. Insecure bool // NoCertCheck dictates whether the SSL handshakes should bypass X509 certificate // validation (true) or not (false). NoCertCheck bool // ResponseHeaderTimeout if non-zero, specifies the amount of // time to wait in seconds for a server's response headers after fully // writing the request (including its body, if any). This // time does not include the time to read the response body. ResponseHeaderTimeout = defaultResponseHeaderTimeout // HiddenHeaders lists headers that should not be logged unless DumpFormat is Verbose. HiddenHeaders map[string]bool ) // For tests var ( OsStderr io.Writer = os.Stderr ) type ( // HTTPClient makes it easier to stub HTTP clients for testing. HTTPClient interface { // Do makes a regular http request and returns the response/error. Do(req *http.Request) (*http.Response, error) // DoWithContext performs a request and is context-aware. DoWithContext(ctx context.Context, req *http.Request) (*http.Response, error) // DoHidden prevents logging, useful for requests made during authorization. DoHidden(req *http.Request) (*http.Response, error) // DoHiddenWithContext prevents logging and performs a context-aware request. DoHiddenWithContext(ctx context.Context, req *http.Request) (*http.Response, error) } // Format is the request/response dump format. Format int // ParamBlock is used to create a new client without using the package variables, // which are not go-routine safe. ParamBlock struct { // DumpFormat dictates how HTTP requests and responses are logged: NoDump prevents logging // altogether, Debug generates logs in human readable format and JSON in JSON format. // Verbose causes all headers to be logged - including sensitive ones. DumpFormat Format // HiddenHeaders lists headers that should not be logged unless DumpFormat is Verbose. HiddenHeaders map[string]bool // Insecure dictates whether HTTP (true) or HTTPS (false) should be used to connect to the // API endpoints. Insecure bool // NoCertCheck dictates whether the SSL handshakes should bypass X509 certificate // validation (true) or not (false). NoCertCheck bool // NoRedirect as true to not follow redirects. false follows redirects. NoRedirect bool // ResponseHeaderTimeout if non-zero, specifies the amount of // time to wait in seconds for a server's response headers after fully // writing the request (including its body, if any). This // time does not include the time to read the response body. ResponseHeaderTimeout time.Duration } // HTTP client that optionally dumps requests and responses. // This client also disables the default http client redirect handling. dumpClient struct { Client *http.Client isInsecure func() bool dumpFormat func() Format hiddenHeaders func() map[string]bool } ) // Default DumpFormat to NoDump func init() { DumpFormat = NoDump // copy default hidden headers to avoid modifying original. HiddenHeaders = copyHiddenHeaders(defaultHiddenHeaders) } // New returns an HTTP client using the settings specified by this package variables. func New() HTTPClient { return newVariableDumpClient(newRawClient(false, NoCertCheck, ResponseHeaderTimeout)) } // NewNoRedirect returns an HTTP client that does not follow redirects. func NewNoRedirect() HTTPClient { return newVariableDumpClient(newRawClient(true, NoCertCheck, ResponseHeaderTimeout)) } // NewPB returns an HTTP client using only the parameter block and ignoring // the current values of the package variables, which are not go-routine safe. func NewPB(pb *ParamBlock) HTTPClient { responseHeaderTimeout := pb.ResponseHeaderTimeout if responseHeaderTimeout == 0 { responseHeaderTimeout = defaultResponseHeaderTimeout } dumpFormat := pb.DumpFormat if dumpFormat == 0 { dumpFormat = NoDump } hiddenHeaders := pb.HiddenHeaders if hiddenHeaders == nil { hiddenHeaders = defaultHiddenHeaders // immutable } else { hiddenHeaders = copyHiddenHeaders(hiddenHeaders) // copy to avoid side-effects } dc := &dumpClient{Client: newRawClient(pb.NoRedirect, pb.NoCertCheck, responseHeaderTimeout)} dc.isInsecure = func() bool { return pb.Insecure } dc.dumpFormat = func() Format { return dumpFormat } dc.hiddenHeaders = func() map[string]bool { return hiddenHeaders } return dc } // newVariableDumpClient defines accessors for package variables, which are not // go-routine safe so can theoretically change value while the client is in use. // this emulates the legacy behavior. func newVariableDumpClient(c *http.Client) HTTPClient { dc := &dumpClient{Client: c} dc.isInsecure = func() bool { return Insecure } dc.dumpFormat = func() Format { return DumpFormat } dc.hiddenHeaders = func() map[string]bool { return HiddenHeaders } return dc } // ShortToken creates a 6 bytes unique string. // Not meant to be cryptographically unique but good enough for logs. func ShortToken() string { b := make([]byte, 6) io.ReadFull(rand.Reader, b) return base64.StdEncoding.EncodeToString(b) } // newRawClient creates an http package Client taking into account both the parameters and package // variables. func newRawClient( noRedirect, noCertCheck bool, responseHeaderTimeout time.Duration) *http.Client { tr := http.Transport{ResponseHeaderTimeout: responseHeaderTimeout, Proxy: http.ProxyFromEnvironment} tr.TLSClientConfig = &tls.Config{InsecureSkipVerify: noCertCheck} c := http.Client{Transport: &tr} if noRedirect { c.CheckRedirect = func(*http.Request, []*http.Request) error { return fmt.Errorf(noRedirectError) } } return &c } // IsDebug is a convenience wrapper that returns true if the Debug bit is set on the flag. func (f Format) IsDebug() bool { return f&Debug != 0 } // IsJSON is a convenience wrapper that returns true if the JSON bit is set on the flag. func (f Format) IsJSON() bool { return f&JSON != 0 } // IsVerbose is a convenience wrapper that returns true if the Verbose bit is set on the flag. func (f Format) IsVerbose() bool { return f&Verbose != 0 } // IsRecord is a convenience wrapper that returns true if the Record bit is set on the flag. func (f Format) IsRecord() bool { return f&Record != 0 } // DoHidden is equivalent to Do with the exception that nothing gets logged unless DumpFormat is // set to Verbose. func (d *dumpClient) DoHidden(req *http.Request) (*http.Response, error) { return d.doImp(req, true, nil) } // Do dumps the request, makes the request and dumps the response as specified by DumpFormat. func (d *dumpClient) Do(req *http.Request) (*http.Response, error) { return d.doImp(req, false, nil) } func (d *dumpClient) DoWithContext(ctx context.Context, req *http.Request) (*http.Response, error) { return d.doImp(req, true, ctx) } func (d *dumpClient) DoHiddenWithContext(ctx context.Context, req *http.Request) (*http.Response, error) { return d.doImp(req, false, ctx) } // doImp actually performs the HTTP request logging according to the various settings. func (d *dumpClient) doImp(req *http.Request, hidden bool, ctx context.Context) (*http.Response, error) { if req.URL.Scheme == "" { if d.isInsecure() { req.URL.Scheme = "http" } else { req.URL.Scheme = "https" } } req.Header.Set("User-Agent", UA) var reqBody []byte startedAt := time.Now() // prefer the X-Request-Id header as request token for logging, if present. id := req.Header.Get(requestIdHeader) if id == "" { id = ShortToken() } log.Info("started", "id", id, req.Method, req.URL.String()) df := d.dumpFormat() hide := (df == NoDump) || (hidden && !df.IsVerbose()) if !hide { startedAt = time.Now() reqBody = d.dumpRequest(req) } var resp *http.Response var err error if ctx == nil { resp, err = d.Client.Do(req) } else { resp, err = ctxhttpDo(ctx, d.getClientWithoutTimeout(), req) } if urlError, ok := err.(*url.Error); ok { if urlError.Err.Error() == noRedirectError { err = nil } } if err != nil { return nil, err } if !hide { d.dumpResponse(resp, req, reqBody) } log.Info("completed", "id", id, "status", resp.Status, "time", time.Since(startedAt).String()) return resp, nil } // getClientWithoutTimeout returns a modified client that doesn't have the ResponseHeaderTimeout field set // in its Transport. func (d *dumpClient) getClientWithoutTimeout() *http.Client { // Get a copy of the client and modify as multiple concurrent go routines can be using this client. client := *d.Client tr, ok := client.Transport.(*http.Transport) if ok { // note that the http.Transport struct has internal mutex fields that are // not safe to copy. we have to be selective in copying fields. trCopy := &http.Transport{ Proxy: tr.Proxy, DialContext: tr.DialContext, Dial: tr.Dial, DialTLS: tr.DialTLS, TLSClientConfig: tr.TLSClientConfig, TLSHandshakeTimeout: tr.TLSHandshakeTimeout, DisableKeepAlives: tr.DisableKeepAlives, DisableCompression: tr.DisableCompression, MaxIdleConns: tr.MaxIdleConns, MaxIdleConnsPerHost: tr.MaxIdleConnsPerHost, IdleConnTimeout: tr.IdleConnTimeout, ResponseHeaderTimeout: 0, // explicitly zeroed-out ExpectContinueTimeout: tr.ExpectContinueTimeout, TLSNextProto: tr.TLSNextProto, MaxResponseHeaderBytes: tr.MaxResponseHeaderBytes, } tr = trCopy } else { // note that the following code has a known issue in that it depends on the // current value of the NoCertCheck global. if that global changes after // creation of this client then the behavior is undefined. tr = &http.Transport{Proxy: http.ProxyFromEnvironment} tr.TLSClientConfig = &tls.Config{InsecureSkipVerify: NoCertCheck} } client.Transport = tr return &client } // Dump request if needed. // Return request serialized as JSON if DumpFormat is JSON, nil otherwise. func (d *dumpClient) dumpRequest(req *http.Request) []byte { df := d.dumpFormat() if df == NoDump { return nil } reqBody, err := dumpReqBody(req) if err != nil { log.Error("Failed to load request body for dump", "error", err.Error()) } if df.IsDebug() { var buffer bytes.Buffer buffer.WriteString(req.Method + " " + req.URL.String() + "\n") d.writeHeaders(&buffer, req.Header) if reqBody != nil { buffer.WriteString("\n") buffer.Write(reqBody) buffer.WriteString("\n") } fmt.Fprint(OsStderr, buffer.String()) } else if df.IsJSON() { return reqBody } return nil } // dumpResponse dumps the response and optionally the request (in case of JSON format) according to // DumpFormat. // It also checks whether the special recorder pipe is opened and if so writes the dump to it. func (d *dumpClient) dumpResponse(resp *http.Response, req *http.Request, reqBody []byte) { df := d.dumpFormat() if df == NoDump { return } respBody, _ := dumpRespBody(resp) if df.IsDebug() { var buffer bytes.Buffer buffer.WriteString("==> " + resp.Proto + " " + resp.Status + "\n") d.writeHeaders(&buffer, resp.Header) if respBody != nil { buffer.WriteString("\n") buffer.Write(respBody) buffer.WriteString("\n") } fmt.Fprint(OsStderr, buffer.String()) } else if df.IsJSON() { reqHeaders := make(http.Header) hh := d.hiddenHeaders() filterHeaders(df, hh, req.Header, func(name string, value []string) { reqHeaders[name] = value }) respHeaders := make(http.Header) filterHeaders(df, hh, resp.Header, func(name string, value []string) { respHeaders[name] = value }) dumped := recording.RequestResponse{ Verb: req.Method, URI: req.URL.String(), ReqHeader: reqHeaders, ReqBody: string(reqBody), Status: resp.StatusCode, RespHeader: respHeaders, RespBody: string(respBody), } b, err := json.MarshalIndent(dumped, "", " ") if err != nil { log.Error("Failed to dump request content", "error", err.Error()) return } if df.IsRecord() { f := os.NewFile(10, "fd10") _, err = f.Stat() if err == nil { // fd 10 is open, dump to it (used by recorder) fmt.Fprintf(f, "%s\n", string(b)) } } fmt.Fprint(OsStderr, string(b)) } } // writeHeaders is a helper function that writes the given HTTP headers to the given buffer as // human readable strings. If DumpFormat is not Verbose then writeHeaders filters out headers whose // names are keys of HiddenHeaders. func (d *dumpClient) writeHeaders(buffer *bytes.Buffer, headers http.Header) { filterHeaders( d.dumpFormat(), d.hiddenHeaders(), headers, func(name string, value []string) { buffer.WriteString(name) buffer.WriteString(": ") buffer.WriteString(strings.Join(value, ", ")) buffer.WriteString("\n") }) } // Dump request body, strongly inspired from httputil.DumpRequest func dumpReqBody(req *http.Request) ([]byte, error) { if req.Body == nil { return nil, nil } var save io.ReadCloser var err error save, req.Body, err = drainBody(req.Body) if err != nil { return nil, err } var b bytes.Buffer var dest io.Writer = &b chunked := len(req.TransferEncoding) > 0 && req.TransferEncoding[0] == "chunked" if chunked { dest = httputil.NewChunkedWriter(dest) } _, err = io.Copy(dest, req.Body) if chunked { dest.(io.Closer).Close() io.WriteString(&b, "\r\n") } req.Body = save return b.Bytes(), err } // Dump response body, strongly inspired from httputil.DumpResponse func dumpRespBody(resp *http.Response) ([]byte, error) { if resp.Body == nil { return nil, nil } var b bytes.Buffer savecl := resp.ContentLength var save io.ReadCloser var err error save, resp.Body, err = drainBody(resp.Body) if err != nil { return nil, err } _, err = io.Copy(&b, resp.Body) if err != nil { return nil, err } resp.Body = save resp.ContentLength = savecl if err != nil { return nil, err } return b.Bytes(), nil } // One of the copies, say from b to r2, could be avoided by using a more // elaborate trick where the other copy is made during Request/Response.Write. // This would complicate things too much, given that these functions are for // debugging only. func drainBody(b io.ReadCloser) (r1, r2 io.ReadCloser, err error) { var buf bytes.Buffer if _, err = buf.ReadFrom(b); err != nil { return nil, nil, err } if err = b.Close(); err != nil { return nil, nil, err } return ioutil.NopCloser(&buf), ioutil.NopCloser(bytes.NewReader(buf.Bytes())), nil } // headerIterator is a HTTP header iterator. type headerIterator func(name string, value []string) // filterHeaders iterates through the headers skipping hidden headers unless DumpFormat is Verbose. // It calls the given iterator for each header name/value pair. The values are serialized as // strings. func filterHeaders( dumpFormat Format, hiddenHeaders map[string]bool, headers http.Header, iterator headerIterator) { for k, v := range headers { if !dumpFormat.IsVerbose() { if _, ok := hiddenHeaders[k]; ok { continue } } iterator(k, v) } } // copyHiddenHeaders copies the given map func copyHiddenHeaders(from map[string]bool) (to map[string]bool) { to = make(map[string]bool) for k, v := range from { to[k] = v } return } // Do sends an HTTP request with the provided http.Client and returns // an HTTP response. // // If the client is nil, http.DefaultClient is used. // // The provided ctx must be non-nil. If it is canceled or times out, // ctx.Err() will be returned. // // Borrowed originally from "https://github.com/golang/net/blob/master/context/ctxhttp/ctxhttp.go" func ctxhttpDo(ctx context.Context, client *http.Client, req *http.Request) (*http.Response, error) { if client == nil { client = http.DefaultClient } resp, err := client.Do(req.WithContext(ctx)) // If we got an error, and the context has been canceled, // the context's error is probably more useful. if err != nil { select { case <-ctx.Done(): err = ctx.Err() default: } } return resp, err }
{ "content_hash": "fde10c007def1e08c8e3b7257a733944", "timestamp": "", "source": "github", "line_count": 570, "max_line_length": 106, "avg_line_length": 30.391228070175437, "alnum_prop": 0.7126941061017145, "repo_name": "rgeyer/rsc", "id": "295251c6fba64f37384e5f94c0e5746bf6f75dfc", "size": "17323", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "httpclient/http.go", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "560" }, { "name": "Go", "bytes": "2599665" }, { "name": "HTML", "bytes": "6146" }, { "name": "JavaScript", "bytes": "52" }, { "name": "Makefile", "bytes": "9715" }, { "name": "Ruby", "bytes": "10296" }, { "name": "Shell", "bytes": "5739" } ], "symlink_target": "" }
<?php namespace esperanto\NewsletterBundle\Form\Type; use Symfony\Component\Form\AbstractType; use Symfony\Component\Form\FormBuilderInterface; use Symfony\Component\OptionsResolver\OptionsResolverInterface; class NewsletterType extends AbstractType { public function buildForm(FormBuilderInterface $builder, array $options) { $builder->add('title', 'text'); $builder->add('subject', 'text'); $builder->add('text', 'wysiwyg'); $builder->add('sent', 'checkbox', array( 'read_only' => true )); } public function setDefaultOptions(OptionsResolverInterface $resolver) { $resolver->setDefaults(array( 'data_class' => 'esperanto\NewsletterBundle\Entity\Newsletter' )); } public function getName() { return 'esperanto_newsletter_newsletter'; } }
{ "content_hash": "4b2cdbe00e681c80c495311be576cad4", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 76, "avg_line_length": 24.194444444444443, "alnum_prop": 0.6590126291618829, "repo_name": "jennyhelbing/esperanto-cms", "id": "df6cf9a05a44ae998b5683d88e587fa44ac5f233", "size": "871", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/esperanto/NewsletterBundle/Form/Type/NewsletterType.php", "mode": "33188", "license": "mit", "language": [ { "name": "ApacheConf", "bytes": "1239" }, { "name": "CSS", "bytes": "41829" }, { "name": "HTML", "bytes": "49985" }, { "name": "JavaScript", "bytes": "472718" }, { "name": "PHP", "bytes": "445461" } ], "symlink_target": "" }
/** @file CompareDump.cpp * @brief Implementation of the 'assimp cmpdmp', which compares * two model dumps for equality. It plays an important role * in the regression test suite. */ #include "Main.h" const char* AICMD_MSG_CMPDUMP_HELP = "assimp cmpdump <actual> <expected>\n" "\tCompare two short dumps produced with \'assimp dump <..> -s\' for equality.\n" ; #include "../../code/assbin_chunks.h" //////////////////////////////////////////////////////////////////////////////////////////////////// #include "generic_inserter.hpp" #include <map> #include <deque> #include <stack> #include <sstream> #include <iostream> #include "../../include/assimp/ai_assert.h" // get << for aiString template <typename char_t, typename traits_t> void mysprint(std::basic_ostream<char_t, traits_t>& os, const aiString& vec) { os << "[length: \'" << std::dec << vec.length << "\' content: \'" << vec.data << "\']"; } template <typename char_t, typename traits_t> std::basic_ostream<char_t, traits_t>& operator<< (std::basic_ostream<char_t, traits_t>& os, const aiString& vec) { return generic_inserter(mysprint<char_t,traits_t>, os, vec); } class sliced_chunk_iterator; //////////////////////////////////////////////////////////////////////////////////////////////////// /// @class compare_fails_exception /// /// @brief Sentinel exception to return quickly from deeply nested control paths //////////////////////////////////////////////////////////////////////////////////////////////////// class compare_fails_exception : public virtual std::exception { public: enum {MAX_ERR_LEN = 4096}; /* public c'tors */ compare_fails_exception(const char* msg) { strncpy(mywhat,msg,MAX_ERR_LEN-1); strcat(mywhat,"\n"); } /* public member functions */ const char* what() const throw() { return mywhat; } private: char mywhat[MAX_ERR_LEN+1]; }; #define MY_FLT_EPSILON 1e-1f #define MY_DBL_EPSILON 1e-1 //////////////////////////////////////////////////////////////////////////////////////////////////// /// @class comparer_context /// /// @brief Record our way through the files to be compared and dump useful information if we fail. //////////////////////////////////////////////////////////////////////////////////////////////////// class comparer_context { friend class sliced_chunk_iterator; public: /* construct given two file handles to compare */ comparer_context(FILE* actual,FILE* expect) : actual(actual) , expect(expect) , cnt_chunks(0) { ai_assert(actual); ai_assert(expect); fseek(actual,0,SEEK_END); lengths.push(std::make_pair(static_cast<uint32_t>(ftell(actual)),0)); fseek(actual,0,SEEK_SET); history.push_back(HistoryEntry("---",PerChunkCounter())); } public: /* set new scope */ void push_elem(const char* msg) { const std::string s = msg; PerChunkCounter::const_iterator it = history.back().second.find(s); if(it != history.back().second.end()) { ++history.back().second[s]; } else history.back().second[s] = 0; history.push_back(HistoryEntry(s,PerChunkCounter())); debug_trace.push_back("PUSH " + s); } /* leave current scope */ void pop_elem() { ai_assert(history.size()); debug_trace.push_back("POP "+ history.back().first); history.pop_back(); } /* push current chunk length and start offset on top of stack */ void push_length(uint32_t nl, uint32_t start) { lengths.push(std::make_pair(nl,start)); ++cnt_chunks; } /* pop the chunk length stack */ void pop_length() { ai_assert(lengths.size()); lengths.pop(); } /* access the current chunk length */ uint32_t get_latest_chunk_length() { ai_assert(lengths.size()); return lengths.top().first; } /* access the current chunk start offset */ uint32_t get_latest_chunk_start() { ai_assert(lengths.size()); return lengths.top().second; } /* total number of chunk headers passed so far*/ uint32_t get_num_chunks() { return cnt_chunks; } /* get ACTUAL file desc. != NULL */ FILE* get_actual() const { return actual; } /* get EXPECT file desc. != NULL */ FILE* get_expect() const { return expect; } /* compare next T from both streams, name occurs in error messages */ template<typename T> T cmp(const std::string& name) { T a,e; read(a,e); if(a != e) { std::stringstream ss; failure((ss<< "Expected " << e << ", but actual is " << a, ss.str()),name); } // std::cout << name << " " << std::hex << a << std::endl; return a; } /* compare next num T's from both streams, name occurs in error messages */ template<typename T> void cmp(size_t num,const std::string& name) { for(size_t n = 0; n < num; ++n) { std::stringstream ss; cmp<T>((ss<<name<<"["<<n<<"]",ss.str())); // std::cout << name << " " << std::hex << a << std::endl; } } /* Bounds of an aiVector3D array (separate function * because partial specializations of member functions are illegal--)*/ template<typename T> void cmp_bounds(const std::string& name) { cmp<T> (name+".<minimum-value>"); cmp<T> (name+".<maximum-value>"); } private: /* Report failure */ AI_WONT_RETURN void failure(const std::string& err, const std::string& name) AI_WONT_RETURN_SUFFIX { std::stringstream ss; throw compare_fails_exception((ss << "Files are different at " << history.back().first << "." << name << ".\nError is: " << err << ".\nCurrent position in scene hierarchy is " << print_hierarchy(),ss.str().c_str() )); } /** print our 'stack' */ std::string print_hierarchy() { std::stringstream ss; ss << std::endl; const char* last = history.back().first.c_str(); std::string pad; for(ChunkHistory::reverse_iterator rev = history.rbegin(), end = history.rend(); rev != end; ++rev, pad += " ") { ss << pad << (*rev).first << "(Index: " << (*rev).second[last] << ")" << std::endl; last = (*rev).first.c_str(); } ss << std::endl << "Debug trace: "<< std::endl; for (std::vector<std::string>::const_iterator it = debug_trace.begin(); it != debug_trace.end(); ++it) { ss << *it << std::endl; } return ss.str(); } /* read from both streams at the same time */ template <typename T> void read(T& filla,T& fille) { if(1 != fread(&filla,sizeof(T),1,actual)) { EOFActual(); } if(1 != fread(&fille,sizeof(T),1,expect)) { EOFExpect(); } } private: void EOFActual() { std::stringstream ss; throw compare_fails_exception((ss << "Unexpected EOF reading ACTUAL.\nCurrent position in scene hierarchy is " << print_hierarchy(),ss.str().c_str() )); } void EOFExpect() { std::stringstream ss; throw compare_fails_exception((ss << "Unexpected EOF reading EXPECT.\nCurrent position in scene hierarchy is " << print_hierarchy(),ss.str().c_str() )); } FILE *const actual, *const expect; typedef std::map<std::string,unsigned int> PerChunkCounter; typedef std::pair<std::string,PerChunkCounter> HistoryEntry; typedef std::deque<HistoryEntry> ChunkHistory; ChunkHistory history; std::vector<std::string> debug_trace; typedef std::stack<std::pair<uint32_t,uint32_t> > LengthStack; LengthStack lengths; uint32_t cnt_chunks; }; //////////////////////////////////////////////////////////////////////////////////////////////////// /* specialization for aiString (it needs separate handling because its on-disk representation * differs from its binary representation in memory and can't be treated as an array of n T's.*/ template <> void comparer_context :: read<aiString>(aiString& filla,aiString& fille) { uint32_t lena,lene; read(lena,lene); if(lena && 1 != fread(&filla.data,lena,1,actual)) { EOFActual(); } if(lene && 1 != fread(&fille.data,lene,1,expect)) { EOFExpect(); } fille.data[fille.length=static_cast<unsigned int>(lene)] = '\0'; filla.data[filla.length=static_cast<unsigned int>(lena)] = '\0'; } //////////////////////////////////////////////////////////////////////////////////////////////////// /* Specialization for float, uses epsilon for comparisons*/ template<> float comparer_context :: cmp<float>(const std::string& name) { float a,e,t; read(a,e); if((t=fabs(a-e)) > MY_FLT_EPSILON) { std::stringstream ss; failure((ss<< "Expected " << e << ", but actual is " << a << " (delta is " << t << ")", ss.str()),name); } return a; } //////////////////////////////////////////////////////////////////////////////////////////////////// /* Specialization for double, uses epsilon for comparisons*/ template<> double comparer_context :: cmp<double>(const std::string& name) { double a,e,t; read(a,e); if((t=fabs(a-e)) > MY_DBL_EPSILON) { std::stringstream ss; failure((ss<< "Expected " << e << ", but actual is " << a << " (delta is " << t << ")", ss.str()),name); } return a; } //////////////////////////////////////////////////////////////////////////////////////////////////// /* Specialization for aiVector3D */ template<> aiVector3D comparer_context :: cmp<aiVector3D >(const std::string& name) { const float x = cmp<float>(name+".x"); const float y = cmp<float>(name+".y"); const float z = cmp<float>(name+".z"); return aiVector3D(x,y,z); } //////////////////////////////////////////////////////////////////////////////////////////////////// /* Specialization for aiColor4D */ template<> aiColor4D comparer_context :: cmp<aiColor4D >(const std::string& name) { const float r = cmp<float>(name+".r"); const float g = cmp<float>(name+".g"); const float b = cmp<float>(name+".b"); const float a = cmp<float>(name+".a"); return aiColor4D(r,g,b,a); } //////////////////////////////////////////////////////////////////////////////////////////////////// /* Specialization for aiQuaternion */ template<> aiQuaternion comparer_context :: cmp<aiQuaternion >(const std::string& name) { const float w = cmp<float>(name+".w"); const float x = cmp<float>(name+".x"); const float y = cmp<float>(name+".y"); const float z = cmp<float>(name+".z"); return aiQuaternion(w,x,y,z); } //////////////////////////////////////////////////////////////////////////////////////////////////// /* Specialization for aiQuatKey */ template<> aiQuatKey comparer_context :: cmp<aiQuatKey >(const std::string& name) { const double mTime = cmp<double>(name+".mTime"); const aiQuaternion mValue = cmp<aiQuaternion>(name+".mValue"); return aiQuatKey(mTime,mValue); } //////////////////////////////////////////////////////////////////////////////////////////////////// /* Specialization for aiVectorKey */ template<> aiVectorKey comparer_context :: cmp<aiVectorKey >(const std::string& name) { const double mTime = cmp<double>(name+".mTime"); const aiVector3D mValue = cmp<aiVector3D>(name+".mValue"); return aiVectorKey(mTime,mValue); } //////////////////////////////////////////////////////////////////////////////////////////////////// /* Specialization for aiMatrix4x4 */ template<> aiMatrix4x4 comparer_context :: cmp<aiMatrix4x4 >(const std::string& name) { aiMatrix4x4 res; for(unsigned int i = 0; i < 4; ++i) { for(unsigned int j = 0; j < 4; ++j) { std::stringstream ss; res[i][j] = cmp<float>(name+(ss<<".m"<<i<<j,ss.str())); } } return res; } //////////////////////////////////////////////////////////////////////////////////////////////////// /* Specialization for aiVertexWeight */ template<> aiVertexWeight comparer_context :: cmp<aiVertexWeight >(const std::string& name) { const unsigned int mVertexId = cmp<unsigned int>(name+".mVertexId"); const float mWeight = cmp<float>(name+".mWeight"); return aiVertexWeight(mVertexId,mWeight); } //////////////////////////////////////////////////////////////////////////////////////////////////// /// @class sliced_chunk_iterator /// /// @brief Helper to iterate easily through corresponding chunks of two dumps simultaneously. /// /// Not a *real* iterator, doesn't fully conform to the isocpp iterator spec //////////////////////////////////////////////////////////////////////////////////////////////////// class sliced_chunk_iterator { friend class sliced_chunk_reader; sliced_chunk_iterator(comparer_context& ctx, long end) : ctx(ctx) , endit(false) , next(std::numeric_limits<long>::max()) , end(end) { load_next(); } public: ~sliced_chunk_iterator() { fseek(ctx.get_actual(),end,SEEK_SET); fseek(ctx.get_expect(),end,SEEK_SET); } public: /* get current chunk head */ typedef std::pair<uint32_t,uint32_t> Chunk; const Chunk& operator*() { return current; } /* get to next chunk head */ const sliced_chunk_iterator& operator++() { cleanup(); load_next(); return *this; } /* */ bool is_end() const { return endit; } private: /* get to the end of *this* chunk */ void cleanup() { if(next != std::numeric_limits<long>::max()) { fseek(ctx.get_actual(),next,SEEK_SET); fseek(ctx.get_expect(),next,SEEK_SET); ctx.pop_length(); } } /* advance to the next chunk */ void load_next() { Chunk actual; size_t res=0; const long cur = ftell(ctx.get_expect()); if(end-cur<8) { current = std::make_pair(0u,0u); endit = true; return; } res|=fread(&current.first,4,1,ctx.get_expect()); res|=fread(&current.second,4,1,ctx.get_expect()) <<1u; res|=fread(&actual.first,4,1,ctx.get_actual()) <<2u; res|=fread(&actual.second,4,1,ctx.get_actual()) <<3u; if(res!=0xf) { ctx.failure("IO Error reading chunk head, dumps are malformed","<ChunkHead>"); } if (current.first != actual.first) { std::stringstream ss; ctx.failure((ss <<"Chunk headers do not match. EXPECT: " << std::hex << current.first <<" ACTUAL: " << /*std::hex */actual.first, ss.str()), "<ChunkHead>"); } if (current.first != actual.first) { std::stringstream ss; ctx.failure((ss <<"Chunk lengths do not match. EXPECT: " <<current.second <<" ACTUAL: " << actual.second, ss.str()), "<ChunkHead>"); } next = cur+current.second+8; ctx.push_length(current.second,cur+8); } comparer_context& ctx; Chunk current; bool endit; long next,end; }; //////////////////////////////////////////////////////////////////////////////////////////////////// /// @class sliced_chunk_reader /// /// @brief Helper to iterate easily through corresponding chunks of two dumps simultaneously. //////////////////////////////////////////////////////////////////////////////////////////////////// class sliced_chunk_reader { public: // sliced_chunk_reader(comparer_context& ctx) : ctx(ctx) {} // ~sliced_chunk_reader() { } public: sliced_chunk_iterator begin() const { return sliced_chunk_iterator(ctx,ctx.get_latest_chunk_length()+ ctx.get_latest_chunk_start()); } private: comparer_context& ctx; }; //////////////////////////////////////////////////////////////////////////////////////////////////// /// @class scoped_chunk /// /// @brief Utility to simplify usage of comparer_context.push_elem/pop_elem //////////////////////////////////////////////////////////////////////////////////////////////////// class scoped_chunk { public: // scoped_chunk(comparer_context& ctx,const char* msg) : ctx(ctx) { ctx.push_elem(msg); } // ~scoped_chunk() { ctx.pop_elem(); } private: comparer_context& ctx; }; //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFlyMaterialProperty(comparer_context& comp) { scoped_chunk chunk(comp,"aiMaterialProperty"); comp.cmp<aiString>("mKey"); comp.cmp<uint32_t>("mSemantic"); comp.cmp<uint32_t>("mIndex"); const uint32_t length = comp.cmp<uint32_t>("mDataLength"); const aiPropertyTypeInfo type = static_cast<aiPropertyTypeInfo>( comp.cmp<uint32_t>("mType")); switch (type) { case aiPTI_Float: comp.cmp<float>(length/4,"mData"); break; case aiPTI_String: comp.cmp<aiString>("mData"); break; case aiPTI_Integer: comp.cmp<uint32_t>(length/4,"mData"); break; case aiPTI_Buffer: comp.cmp<uint8_t>(length,"mData"); break; default: break; }; } //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFlyMaterial(comparer_context& comp) { scoped_chunk chunk(comp,"aiMaterial"); comp.cmp<uint32_t>("aiMaterial::mNumProperties"); sliced_chunk_reader reader(comp); for(sliced_chunk_iterator it = reader.begin(); !it.is_end(); ++it) { if ((*it).first == ASSBIN_CHUNK_AIMATERIALPROPERTY) { CompareOnTheFlyMaterialProperty(comp); } } } //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFlyBone(comparer_context& comp) { scoped_chunk chunk(comp,"aiBone"); comp.cmp<aiString>("mName"); comp.cmp<uint32_t>("mNumWeights"); comp.cmp<aiMatrix4x4>("mOffsetMatrix"); comp.cmp_bounds<aiVertexWeight>("mWeights"); } //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFlyNodeAnim(comparer_context& comp) { scoped_chunk chunk(comp,"aiNodeAnim"); comp.cmp<aiString>("mNodeName"); comp.cmp<uint32_t>("mNumPositionKeys"); comp.cmp<uint32_t>("mNumRotationKeys"); comp.cmp<uint32_t>("mNumScalingKeys"); comp.cmp<uint32_t>("mPreState"); comp.cmp<uint32_t>("mPostState"); comp.cmp_bounds<aiVectorKey>("mPositionKeys"); comp.cmp_bounds<aiQuatKey>("mRotationKeys"); comp.cmp_bounds<aiVectorKey>("mScalingKeys"); } //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFlyMesh(comparer_context& comp) { scoped_chunk chunk(comp,"aiMesh"); comp.cmp<uint32_t>("mPrimitiveTypes"); comp.cmp<uint32_t>("mNumVertices"); const uint32_t nf = comp.cmp<uint32_t>("mNumFaces"); comp.cmp<uint32_t>("mNumBones"); comp.cmp<uint32_t>("mMaterialIndex"); const uint32_t present = comp.cmp<uint32_t>("<vertex-components-present>"); if(present & ASSBIN_MESH_HAS_POSITIONS) { comp.cmp_bounds<aiVector3D>("mVertices"); } if(present & ASSBIN_MESH_HAS_NORMALS) { comp.cmp_bounds<aiVector3D>("mNormals"); } if(present & ASSBIN_MESH_HAS_TANGENTS_AND_BITANGENTS) { comp.cmp_bounds<aiVector3D>("mTangents"); comp.cmp_bounds<aiVector3D>("mBitangents"); } for(unsigned int i = 0; present & ASSBIN_MESH_HAS_COLOR(i); ++i) { std::stringstream ss; comp.cmp_bounds<aiColor4D>((ss<<"mColors["<<i<<"]",ss.str())); } for(unsigned int i = 0; present & ASSBIN_MESH_HAS_TEXCOORD(i); ++i) { std::stringstream ss; comp.cmp<uint32_t>((ss<<"mNumUVComponents["<<i<<"]",ss.str())); comp.cmp_bounds<aiVector3D>((ss.clear(),ss<<"mTextureCoords["<<i<<"]",ss.str())); } for(unsigned int i = 0; i< ((nf+511)/512); ++i) { std::stringstream ss; comp.cmp<uint32_t>((ss<<"mFaces["<<i*512<<"-"<<std::min(static_cast< uint32_t>((i+1)*512),nf)<<"]",ss.str())); } sliced_chunk_reader reader(comp); for(sliced_chunk_iterator it = reader.begin(); !it.is_end(); ++it) { if ((*it).first == ASSBIN_CHUNK_AIBONE) { CompareOnTheFlyBone(comp); } } } //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFlyCamera(comparer_context& comp) { scoped_chunk chunk(comp,"aiCamera"); comp.cmp<aiString>("mName"); comp.cmp<aiVector3D>("mPosition"); comp.cmp<aiVector3D>("mLookAt"); comp.cmp<aiVector3D>("mUp"); comp.cmp<float>("mHorizontalFOV"); comp.cmp<float>("mClipPlaneNear"); comp.cmp<float>("mClipPlaneFar"); comp.cmp<float>("mAspect"); } //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFlyLight(comparer_context& comp) { scoped_chunk chunk(comp,"aiLight"); comp.cmp<aiString>("mName"); const aiLightSourceType type = static_cast<aiLightSourceType>( comp.cmp<uint32_t>("mType")); if(type!=aiLightSource_DIRECTIONAL) { comp.cmp<float>("mAttenuationConstant"); comp.cmp<float>("mAttenuationLinear"); comp.cmp<float>("mAttenuationQuadratic"); } comp.cmp<aiVector3D>("mColorDiffuse"); comp.cmp<aiVector3D>("mColorSpecular"); comp.cmp<aiVector3D>("mColorAmbient"); if(type==aiLightSource_SPOT) { comp.cmp<float>("mAngleInnerCone"); comp.cmp<float>("mAngleOuterCone"); } } //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFlyAnimation(comparer_context& comp) { scoped_chunk chunk(comp,"aiAnimation"); comp.cmp<aiString>("mName"); comp.cmp<double>("mDuration"); comp.cmp<double>("mTicksPerSecond"); comp.cmp<uint32_t>("mNumChannels"); sliced_chunk_reader reader(comp); for(sliced_chunk_iterator it = reader.begin(); !it.is_end(); ++it) { if ((*it).first == ASSBIN_CHUNK_AINODEANIM) { CompareOnTheFlyNodeAnim(comp); } } } //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFlyTexture(comparer_context& comp) { scoped_chunk chunk(comp,"aiTexture"); const uint32_t w = comp.cmp<uint32_t>("mWidth"); const uint32_t h = comp.cmp<uint32_t>("mHeight"); (void)w; (void)h; comp.cmp<char>("achFormatHint[0]"); comp.cmp<char>("achFormatHint[1]"); comp.cmp<char>("achFormatHint[2]"); comp.cmp<char>("achFormatHint[3]"); } //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFlyNode(comparer_context& comp) { scoped_chunk chunk(comp,"aiNode"); comp.cmp<aiString>("mName"); comp.cmp<aiMatrix4x4>("mTransformation"); comp.cmp<uint32_t>("mNumChildren"); comp.cmp<uint32_t>(comp.cmp<uint32_t>("mNumMeshes"),"mMeshes"); sliced_chunk_reader reader(comp); for(sliced_chunk_iterator it = reader.begin(); !it.is_end(); ++it) { if ((*it).first == ASSBIN_CHUNK_AINODE) { CompareOnTheFlyNode(comp); } } } //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFlyScene(comparer_context& comp) { scoped_chunk chunk(comp,"aiScene"); comp.cmp<uint32_t>("mFlags"); comp.cmp<uint32_t>("mNumMeshes"); comp.cmp<uint32_t>("mNumMaterials"); comp.cmp<uint32_t>("mNumAnimations"); comp.cmp<uint32_t>("mNumTextures"); comp.cmp<uint32_t>("mNumLights"); comp.cmp<uint32_t>("mNumCameras"); sliced_chunk_reader reader(comp); for(sliced_chunk_iterator it = reader.begin(); !it.is_end(); ++it) { if ((*it).first == ASSBIN_CHUNK_AIMATERIAL) { CompareOnTheFlyMaterial(comp); } else if ((*it).first == ASSBIN_CHUNK_AITEXTURE) { CompareOnTheFlyTexture(comp); } else if ((*it).first == ASSBIN_CHUNK_AIMESH) { CompareOnTheFlyMesh(comp); } else if ((*it).first == ASSBIN_CHUNK_AIANIMATION) { CompareOnTheFlyAnimation(comp); } else if ((*it).first == ASSBIN_CHUNK_AICAMERA) { CompareOnTheFlyCamera(comp); } else if ((*it).first == ASSBIN_CHUNK_AILIGHT) { CompareOnTheFlyLight(comp); } else if ((*it).first == ASSBIN_CHUNK_AINODE) { CompareOnTheFlyNode(comp); } } } //////////////////////////////////////////////////////////////////////////////////////////////////// void CompareOnTheFly(comparer_context& comp) { sliced_chunk_reader reader(comp); for(sliced_chunk_iterator it = reader.begin(); !it.is_end(); ++it) { if ((*it).first == ASSBIN_CHUNK_AISCENE) { CompareOnTheFlyScene(comp); break; } } } //////////////////////////////////////////////////////////////////////////////////////////////////// void CheckHeader(comparer_context& comp) { fseek(comp.get_actual(),ASSBIN_HEADER_LENGTH,SEEK_CUR); fseek(comp.get_expect(),ASSBIN_HEADER_LENGTH,SEEK_CUR); } //////////////////////////////////////////////////////////////////////////////////////////////////// int Assimp_CompareDump (const char* const* params, unsigned int num) { // --help if ((num == 1 && !strcmp( params[0], "-h")) || !strcmp( params[0], "--help") || !strcmp( params[0], "-?") ) { printf("%s",AICMD_MSG_CMPDUMP_HELP); return 0; } // assimp cmpdump actual expected if (num < 1) { std::cout << "assimp cmpdump: Invalid number of arguments. " "See \'assimp cmpdump --help\'\r\n" << std::endl; return 1; } if(!strcmp(params[0],params[1])) { std::cout << "assimp cmpdump: same file, same content." << std::endl; return 0; } class file_ptr { public: file_ptr(FILE *p) : m_file(p) {} ~file_ptr() { if (m_file) { fclose(m_file); m_file = NULL; } } operator FILE *() { return m_file; } private: FILE *m_file; }; file_ptr actual(fopen(params[0],"rb")); if (!actual) { std::cout << "assimp cmpdump: Failure reading ACTUAL data from " << params[0] << std::endl; return -5; } file_ptr expected(fopen(params[1],"rb")); if (!expected) { std::cout << "assimp cmpdump: Failure reading EXPECT data from " << params[1] << std::endl; return -6; } comparer_context comp(actual,expected); try { CheckHeader(comp); CompareOnTheFly(comp); } catch(const compare_fails_exception& ex) { printf("%s",ex.what()); return -1; } catch(...) { // we don't bother checking too rigourously here, so // we might end up here ... std::cout << "Unknown failure, are the input files well-defined?"; return -3; } std::cout << "Success (totally " << std::dec << comp.get_num_chunks() << " chunks)" << std::endl; return 0; }
{ "content_hash": "7b200ffc27d2ef22f7908bcda4460672", "timestamp": "", "source": "github", "line_count": 909, "max_line_length": 117, "avg_line_length": 30.84928492849285, "alnum_prop": 0.5074174452606804, "repo_name": "leyyin/university", "id": "8739c9364d4a18d2b09a6d789aef782d51a849bc", "size": "29843", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "computer-graphics/labs/lab3-glitter/Glitter/Vendor/assimp/tools/assimp_cmd/CompareDump.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "53578" }, { "name": "Awk", "bytes": "10006" }, { "name": "C", "bytes": "113260" }, { "name": "C++", "bytes": "171138" }, { "name": "CMake", "bytes": "1458" }, { "name": "Common Lisp", "bytes": "18679" }, { "name": "D", "bytes": "30122" }, { "name": "HTML", "bytes": "244869" }, { "name": "Java", "bytes": "113685" }, { "name": "Matlab", "bytes": "16233" }, { "name": "Objective-C", "bytes": "63" }, { "name": "PLSQL", "bytes": "6236" }, { "name": "Prolog", "bytes": "16706" }, { "name": "Python", "bytes": "290447" }, { "name": "QMake", "bytes": "327" }, { "name": "SQLPL", "bytes": "2649" }, { "name": "Shell", "bytes": "7981" }, { "name": "TeX", "bytes": "11421" } ], "symlink_target": "" }