text stringlengths 2 1.04M | meta dict |
|---|---|
namespace Zu.ChromeDevTools.Fetch
{
using Newtonsoft.Json;
/// <summary>
/// Continues loading of the paused response, optionally modifying the
/// response headers. If either responseCode or headers are modified, all of them
/// must be present.
/// </summary>
public sealed class ContinueResponseCommand : ICommand
{
private const string ChromeRemoteInterface_CommandName = "Fetch.continueResponse";
[JsonIgnore]
public string CommandName
{
get { return ChromeRemoteInterface_CommandName; }
}
/// <summary>
/// An id the client received in requestPaused event.
/// </summary>
[JsonProperty("requestId")]
public string RequestId
{
get;
set;
}
/// <summary>
/// An HTTP response code. If absent, original response code will be used.
/// </summary>
[JsonProperty("responseCode", DefaultValueHandling = DefaultValueHandling.Ignore)]
public long? ResponseCode
{
get;
set;
}
/// <summary>
/// A textual representation of responseCode.
/// If absent, a standard phrase matching responseCode is used.
/// </summary>
[JsonProperty("responsePhrase", DefaultValueHandling = DefaultValueHandling.Ignore)]
public string ResponsePhrase
{
get;
set;
}
/// <summary>
/// Response headers. If absent, original response headers will be used.
/// </summary>
[JsonProperty("responseHeaders", DefaultValueHandling = DefaultValueHandling.Ignore)]
public HeaderEntry[] ResponseHeaders
{
get;
set;
}
/// <summary>
/// Alternative way of specifying response headers as a \0-separated
/// series of name: value pairs. Prefer the above method unless you
/// need to represent some non-UTF8 values that can't be transmitted
/// over the protocol as text. (Encoded as a base64 string when passed over JSON)
/// </summary>
[JsonProperty("binaryResponseHeaders", DefaultValueHandling = DefaultValueHandling.Ignore)]
public string BinaryResponseHeaders
{
get;
set;
}
}
public sealed class ContinueResponseCommandResponse : ICommandResponse<ContinueResponseCommand>
{
}
} | {
"content_hash": "b33976463a377460e93d2796851dc74a",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 99,
"avg_line_length": 33.554054054054056,
"alnum_prop": 0.5972613773660894,
"repo_name": "ToCSharp/AsyncChromeDriver",
"id": "0084ffeaf13fee79a6ef9ce2bfd939ca1b857d7c",
"size": "2483",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ChromeDevToolsClient/generated/Fetch/ContinueResponseCommand.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP.NET",
"bytes": "927"
},
{
"name": "C#",
"bytes": "4143739"
},
{
"name": "CSS",
"bytes": "226"
},
{
"name": "HTML",
"bytes": "375538"
},
{
"name": "JavaScript",
"bytes": "3581"
},
{
"name": "XSLT",
"bytes": "1047"
}
],
"symlink_target": ""
} |
package collector
import (
"reflect"
"github.com/blevesearch/bleve/document"
"github.com/blevesearch/bleve/index"
"github.com/blevesearch/bleve/search"
)
type stubSearcher struct {
index int
matches []*search.DocumentMatch
}
func (ss *stubSearcher) Size() int {
sizeInBytes := int(reflect.TypeOf(*ss).Size())
for _, entry := range ss.matches {
if entry != nil {
sizeInBytes += entry.Size()
}
}
return sizeInBytes
}
func (ss *stubSearcher) Next(ctx *search.SearchContext) (*search.DocumentMatch, error) {
if ss.index < len(ss.matches) {
rv := ctx.DocumentMatchPool.Get()
rv.IndexInternalID = ss.matches[ss.index].IndexInternalID
rv.Score = ss.matches[ss.index].Score
ss.index++
return rv, nil
}
return nil, nil
}
func (ss *stubSearcher) Advance(ctx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) {
for ss.index < len(ss.matches) && ss.matches[ss.index].IndexInternalID.Compare(ID) < 0 {
ss.index++
}
if ss.index < len(ss.matches) {
rv := ctx.DocumentMatchPool.Get()
rv.IndexInternalID = ss.matches[ss.index].IndexInternalID
rv.Score = ss.matches[ss.index].Score
ss.index++
return rv, nil
}
return nil, nil
}
func (ss *stubSearcher) Close() error {
return nil
}
func (ss *stubSearcher) Weight() float64 {
return 0.0
}
func (ss *stubSearcher) SetQueryNorm(float64) {
}
func (ss *stubSearcher) Count() uint64 {
return uint64(len(ss.matches))
}
func (ss *stubSearcher) Min() int {
return 0
}
func (ss *stubSearcher) DocumentMatchPoolSize() int {
return 0
}
type stubReader struct{}
func (sr *stubReader) Size() int {
return 0
}
func (sr *stubReader) TermFieldReader(term []byte, field string, includeFreq, includeNorm, includeTermVectors bool) (index.TermFieldReader, error) {
return nil, nil
}
func (sr *stubReader) DocIDReaderAll() (index.DocIDReader, error) {
return nil, nil
}
func (sr *stubReader) DocIDReaderOnly(ids []string) (index.DocIDReader, error) {
return nil, nil
}
func (sr *stubReader) FieldDict(field string) (index.FieldDict, error) {
return nil, nil
}
func (sr *stubReader) FieldDictRange(field string, startTerm []byte, endTerm []byte) (index.FieldDict, error) {
return nil, nil
}
func (sr *stubReader) FieldDictPrefix(field string, termPrefix []byte) (index.FieldDict, error) {
return nil, nil
}
func (sr *stubReader) Document(id string) (*document.Document, error) {
return nil, nil
}
func (sr *stubReader) DocumentVisitFieldTerms(id index.IndexInternalID, fields []string, visitor index.DocumentFieldTermVisitor) error {
return nil
}
func (sr *stubReader) Fields() ([]string, error) {
return nil, nil
}
func (sr *stubReader) GetInternal(key []byte) ([]byte, error) {
return nil, nil
}
func (sr *stubReader) DocCount() (uint64, error) {
return 0, nil
}
func (sr *stubReader) ExternalID(id index.IndexInternalID) (string, error) {
return string(id), nil
}
func (sr *stubReader) InternalID(id string) (index.IndexInternalID, error) {
return []byte(id), nil
}
func (sr *stubReader) DumpAll() chan interface{} {
return nil
}
func (sr *stubReader) DumpDoc(id string) chan interface{} {
return nil
}
func (sr *stubReader) DumpFields() chan interface{} {
return nil
}
func (sr *stubReader) Close() error {
return nil
}
func (sr *stubReader) DocValueReader(fields []string) (index.DocValueReader, error) {
return &DocValueReader{i: sr, fields: fields}, nil
}
type DocValueReader struct {
i *stubReader
fields []string
}
func (dvr *DocValueReader) VisitDocValues(id index.IndexInternalID, visitor index.DocumentFieldTermVisitor) error {
return dvr.i.DocumentVisitFieldTerms(id, dvr.fields, visitor)
}
| {
"content_hash": "db68428d55a5830d36ddfbc35a95a9ba",
"timestamp": "",
"source": "github",
"line_count": 162,
"max_line_length": 148,
"avg_line_length": 22.59259259259259,
"alnum_prop": 0.7153005464480874,
"repo_name": "steveyen/bleve",
"id": "233bc971165af8145bd7cd3a1d65ca8d92c75fb4",
"size": "4256",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "search/collector/search_test.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "2495311"
},
{
"name": "Shell",
"bytes": "241"
},
{
"name": "Yacc",
"bytes": "7287"
}
],
"symlink_target": ""
} |
@class NSMPlayerRestoration;
@protocol NSMVideoPlayerProtocol <NSMPlayerProtocol>
- (void)restorePlayerWithRestoration:(NSMPlayerRestoration *)restoration;
- (NSMPlayerRestoration *)savePlayerState;
@end
| {
"content_hash": "1191c6935db3bc543da8f20b2eefd6ea",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 73,
"avg_line_length": 25.875,
"alnum_prop": 0.8357487922705314,
"repo_name": "xinpianchang/NSMPlayer-ObjC",
"id": "2df2b230cffeea514e02830a5e024d9f6db6a1b1",
"size": "1377",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NSMPlayer/Classes/Protocol/NSMVideoPlayerProtocol.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3566"
},
{
"name": "Objective-C",
"bytes": "773766"
},
{
"name": "Ruby",
"bytes": "917"
},
{
"name": "Shell",
"bytes": "18774"
},
{
"name": "Swift",
"bytes": "11302"
}
],
"symlink_target": ""
} |
'use strict';
var $ = require('../internals/export');
var IS_PURE = require('../internals/is-pure');
var anObject = require('../internals/an-object');
var getSetIterator = require('../internals/get-set-iterator');
var iterate = require('../internals/iterate');
// `Set.prototype.join` method
// https://github.com/tc39/proposal-collection-methods
$({ target: 'Set', proto: true, real: true, forced: IS_PURE }, {
join: function join(separator) {
var set = anObject(this);
var iterator = getSetIterator(set);
var sep = separator === undefined ? ',' : String(separator);
var result = [];
iterate(iterator, result.push, { that: result, IS_ITERATOR: true });
return result.join(sep);
}
});
| {
"content_hash": "55fadf0f631dda031869b5e5f6f1d95b",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 72,
"avg_line_length": 37.578947368421055,
"alnum_prop": 0.6582633053221288,
"repo_name": "cloudfoundry-community/asp.net5-buildpack",
"id": "d7bba501cbe9d4de53f915dc07463c2a5adc2e8e",
"size": "714",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "fixtures/node_apps/angular_dotnet/ClientApp/node_modules/core-js/modules/esnext.set.join.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ruby",
"bytes": "61792"
}
],
"symlink_target": ""
} |
'use strict';
import * as objects from 'vs/base/common/objects';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import { ICommandService } from 'vs/platform/commands/common/commands';
import { IContextKeyService } from 'vs/platform/contextkey/common/contextkey';
import { ICodeEditorService } from 'vs/editor/browser/services/codeEditorService';
import { ICodeEditor } from 'vs/editor/browser/editorBrowser';
import { CodeEditor } from 'vs/editor/browser/codeEditor';
import { IConfigurationChangedEvent, IEditorOptions, IDiffEditorOptions } from 'vs/editor/common/config/editorOptions';
import { IThemeService } from 'vs/platform/theme/common/themeService';
import { DiffEditorWidget } from 'vs/editor/browser/widget/diffEditorWidget';
import { IEditorWorkerService } from 'vs/editor/common/services/editorWorkerService';
import { IMessageService } from 'vs/platform/message/common/message';
export class EmbeddedCodeEditorWidget extends CodeEditor {
private _parentEditor: ICodeEditor;
private _overwriteOptions: IEditorOptions;
constructor(
domElement: HTMLElement,
options: IEditorOptions,
parentEditor: ICodeEditor,
@IInstantiationService instantiationService: IInstantiationService,
@ICodeEditorService codeEditorService: ICodeEditorService,
@ICommandService commandService: ICommandService,
@IContextKeyService contextKeyService: IContextKeyService,
@IThemeService themeService: IThemeService
) {
super(domElement, parentEditor.getRawConfiguration(), instantiationService, codeEditorService, commandService, contextKeyService, themeService);
this._parentEditor = parentEditor;
this._overwriteOptions = options;
// Overwrite parent's options
super.updateOptions(this._overwriteOptions);
this._register(parentEditor.onDidChangeConfiguration((e: IConfigurationChangedEvent) => this._onParentConfigurationChanged(e)));
}
getParentEditor(): ICodeEditor {
return this._parentEditor;
}
private _onParentConfigurationChanged(e: IConfigurationChangedEvent): void {
super.updateOptions(this._parentEditor.getRawConfiguration());
super.updateOptions(this._overwriteOptions);
}
updateOptions(newOptions: IEditorOptions): void {
objects.mixin(this._overwriteOptions, newOptions, true);
super.updateOptions(this._overwriteOptions);
}
}
export class EmbeddedDiffEditorWidget extends DiffEditorWidget {
private _parentEditor: ICodeEditor;
private _overwriteOptions: IDiffEditorOptions;
constructor(
domElement: HTMLElement,
options: IDiffEditorOptions,
parentEditor: ICodeEditor,
@IEditorWorkerService editorWorkerService: IEditorWorkerService,
@IContextKeyService contextKeyService: IContextKeyService,
@IInstantiationService instantiationService: IInstantiationService,
@ICodeEditorService codeEditorService: ICodeEditorService,
@IThemeService themeService: IThemeService,
@IMessageService messageService: IMessageService
) {
super(domElement, parentEditor.getRawConfiguration(), editorWorkerService, contextKeyService, instantiationService, codeEditorService, themeService, messageService);
this._parentEditor = parentEditor;
this._overwriteOptions = options;
// Overwrite parent's options
super.updateOptions(this._overwriteOptions);
this._register(parentEditor.onDidChangeConfiguration(e => this._onParentConfigurationChanged(e)));
}
getParentEditor(): ICodeEditor {
return this._parentEditor;
}
private _onParentConfigurationChanged(e: IConfigurationChangedEvent): void {
super.updateOptions(this._parentEditor.getRawConfiguration());
super.updateOptions(this._overwriteOptions);
}
updateOptions(newOptions: IEditorOptions): void {
objects.mixin(this._overwriteOptions, newOptions, true);
super.updateOptions(this._overwriteOptions);
}
}
| {
"content_hash": "9cd6491b00223e28fb24000116c2f92f",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 167,
"avg_line_length": 38.734693877551024,
"alnum_prop": 0.8074288724973656,
"repo_name": "Zalastax/vscode",
"id": "e1761f3b3e6ab31923e0a76976d23e7dd5c4e461",
"size": "4147",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "src/vs/editor/browser/widget/embeddedCodeEditorWidget.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5939"
},
{
"name": "C",
"bytes": "818"
},
{
"name": "C#",
"bytes": "1640"
},
{
"name": "C++",
"bytes": "1072"
},
{
"name": "CSS",
"bytes": "400935"
},
{
"name": "Clojure",
"bytes": "1206"
},
{
"name": "CoffeeScript",
"bytes": "590"
},
{
"name": "F#",
"bytes": "634"
},
{
"name": "Go",
"bytes": "628"
},
{
"name": "Groovy",
"bytes": "3928"
},
{
"name": "HLSL",
"bytes": "184"
},
{
"name": "HTML",
"bytes": "30932"
},
{
"name": "Inno Setup",
"bytes": "110496"
},
{
"name": "Java",
"bytes": "599"
},
{
"name": "JavaScript",
"bytes": "803906"
},
{
"name": "Lua",
"bytes": "252"
},
{
"name": "Makefile",
"bytes": "941"
},
{
"name": "Objective-C",
"bytes": "1387"
},
{
"name": "PHP",
"bytes": "998"
},
{
"name": "Perl",
"bytes": "857"
},
{
"name": "Perl 6",
"bytes": "1065"
},
{
"name": "PowerShell",
"bytes": "8604"
},
{
"name": "Python",
"bytes": "2119"
},
{
"name": "R",
"bytes": "362"
},
{
"name": "Ruby",
"bytes": "1703"
},
{
"name": "Rust",
"bytes": "532"
},
{
"name": "ShaderLab",
"bytes": "330"
},
{
"name": "Shell",
"bytes": "34978"
},
{
"name": "Swift",
"bytes": "220"
},
{
"name": "TypeScript",
"bytes": "15028375"
},
{
"name": "Visual Basic",
"bytes": "893"
}
],
"symlink_target": ""
} |
from __future__ import with_statement
from drmaa import *
def test_with_session():
"""'with' statement works with Session"""
with Session() as s:
print s.version
print s.contact
print s.drmsInfo
print s.drmaaImplementation
def test_with_jt():
"""'with' statement works with JobTemplate"""
s = Session()
s.initialize()
with s.createJobTemplate() as jt:
jt.remoteCommand = 'sleep'
jt.args = ['10']
jid = s.runJob(jt)
print s.wait(jid)
s.exit()
| {
"content_hash": "0bedd6dc4143e72f5d507992625150fb",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 49,
"avg_line_length": 24.40909090909091,
"alnum_prop": 0.5940409683426443,
"repo_name": "UCSantaCruzComputationalGenomicsLab/drmaa-python",
"id": "860a0b83537eadff8b5c034d5a16252481077575",
"size": "537",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "drmaa/test/testcontext.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
A Clojure library designed to ... well, that part is up to you.
## Usage
FIXME
## License
Copyright © 2014 FIXME
Distributed under the Eclipse Public License, the same as Clojure.
| {
"content_hash": "81f748c1034659b09e6422fa66dcebba",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 66,
"avg_line_length": 16.818181818181817,
"alnum_prop": 0.7351351351351352,
"repo_name": "mrijk/clj-zeromq",
"id": "0866e010ba9c740dac7c7dc0454a05331e2f9b72",
"size": "200",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Clojure",
"bytes": "4890"
}
],
"symlink_target": ""
} |
package com.robocubs4205.cubscout
import play.api.libs.json._
import play.api.libs.functional.syntax._
import play.api.http.Status._
sealed trait JsonWrapper[T]
final case class ResponseCtx(context: Option[String], id: Long)
sealed trait JsonResponseWrapper[T] extends JsonWrapper[T] {
def context: Option[String]
def responseId: Long
}
object JsonResponseWrapper {
def apply[T](data: T)(implicit ctx: ResponseCtx, ew: EtagWriter[T]) = JsonSingleResponseWrapper(data, ctx.context, ctx.id)
def apply[T](items: Iterable[T])(implicit ctx: ResponseCtx, ew: EtagWriter[T]) =
JsonArrayResponseWrapper(items, ctx.context, ctx.id)
implicit def JsonResponseWrapperWrites[T](implicit wt: Writes[T]): Writes[JsonResponseWrapper[T]] = {
case e: JsonErrorResponseWrapper => JsonErrorResponseWrapper.jsonErrorResponseWrapperWrites.writes(e)
case r: JsonSingleResponseWrapper[T] => JsonSingleResponseWrapper.jsonSingleResponseWrapperWrites[T].writes(r)
case r: JsonArrayResponseWrapper[T] => JsonArrayResponseWrapper.jsonArrayResponseWrapperWrites[T].writes(r)
}
}
sealed trait JsonRequestWrapper[T] extends JsonWrapper[T]
object JsonRequestWrapper {
implicit def JsonRequestWrapperReads[T](implicit rt: Reads[T]): Reads[JsonRequestWrapper[T]] =
JsonSingleRequestWrapper.jsonSingleRequestWrapperReads[T].map[JsonRequestWrapper[T]](x => x) orElse
JsonArrayRequestWrapper.jsonArrayRequestWrapperReads[T].map[JsonRequestWrapper[T]](x => x)
}
sealed trait JsonSingleWrapper[T] extends JsonWrapper[T] {
def data: T
}
sealed trait JsonArrayWrapper[T] extends JsonWrapper[T] {
def items: Iterable[T]
}
final case class JsonSingleResponseWrapper[T](data: T, context: Option[String], responseId: Long)
(implicit val ew: EtagWriter[T])
extends JsonResponseWrapper[T] with JsonSingleWrapper[T]
object JsonSingleResponseWrapper {
def jsonSingleResponseWrapperWrites[T](implicit wt: Writes[T]): Writes[JsonSingleResponseWrapper[T]] = (
(JsPath \ "data").write[T] and
(JsPath \ "context").writeNullable[String] and
(JsPath \ "responseId").write[Long] and
(JsPath \ "data" \ "etag").write[String]
) (v => (v.data, v.context, v.responseId, v.ew.etag(v.data)))
}
final case class JsonArrayResponseWrapper[T](items: Iterable[T], context: Option[String],
responseId: Long)(implicit val ew: EtagWriter[T])
extends JsonResponseWrapper[T] with JsonArrayWrapper[T]
object JsonArrayResponseWrapper {
private case class JsonDataWithEtagWrapper[T](t: T, etag: String)
implicit private def JsonDataWithEtagWrapperWrites[T](implicit wt: Writes[T]): Writes[JsonDataWithEtagWrapper[T]] = (
JsPath.write[T] and
(JsPath \ "etag").write[String]
) (unlift(JsonDataWithEtagWrapper.unapply[T]))
def jsonArrayResponseWrapperWrites[T](implicit wt: Writes[T]): Writes[JsonArrayResponseWrapper[T]] = (
(JsPath \ "data" \ "items").write[Iterable[JsonDataWithEtagWrapper[T]]] and
(JsPath \ "context").writeNullable[String] and
(JsPath \ "responseId").write[Long]
) (v => (v.items.map(i => JsonDataWithEtagWrapper(i, v.ew.etag(i))), v.context, v.responseId))
}
final case class JsonSingleRequestWrapper[T](data: T)
extends JsonRequestWrapper[T] with JsonSingleWrapper[T]
object JsonSingleRequestWrapper {
implicit def jsonSingleRequestWrapperReads[T](implicit rt: Reads[T]): Reads[JsonSingleRequestWrapper[T]] =
(JsPath \ "data").read[T].map(JsonSingleRequestWrapper.apply[T])
}
final case class JsonArrayRequestWrapper[T](items: Iterable[T])
extends JsonRequestWrapper[T] with JsonArrayWrapper[T]
object JsonArrayRequestWrapper {
implicit def jsonArrayRequestWrapperReads[T](implicit rt: Reads[T]): Reads[JsonArrayRequestWrapper[T]] =
(JsPath \ "data" \ "items").read[Iterable[T]].map(JsonArrayRequestWrapper.apply[T])
}
final case class JsonErrorResponseWrapper(errors: Iterable[JsonErrorWrapper], code: Long, message: String,
context: Option[String], responseId: Long)
extends JsonResponseWrapper[Nothing]
trait JsonErrorWrapper {
def json: JsValue
}
object JsonErrorResponseWrapper {
def apply(errors: Iterable[JsonErrorWrapper], code: Long, message: String)
(implicit ctx: ResponseCtx): JsonErrorResponseWrapper =
JsonErrorResponseWrapper(errors, code, message, ctx.context, ctx.id)
def apply(error: JsonErrorWrapper, code: Long, message: String)
(implicit ctx: ResponseCtx): JsonErrorResponseWrapper = apply(Seq(error), code, message)
def apply(code: Long, message: String)(implicit ctx: ResponseCtx): JsonErrorResponseWrapper =
apply(Seq(), code, message)
def apply(errors: JsError)(implicit ctx: ResponseCtx): JsonErrorResponseWrapper = apply(
errors.errors.flatMap(e => e._2.map(f => (e._1, f))).map(e => ParseErrorWrapper(e._1, e._2)),
UNPROCESSABLE_ENTITY, "There were parse errors when processing the request")
def apply(code:Long,exception:Exception)(implicit ctx: ResponseCtx):JsonErrorResponseWrapper =
apply(code,exception.getMessage)
final case class ParseErrorWrapper(path: JsPath, error: JsonValidationError) extends JsonErrorWrapper {
override def json: JsValue = w.writes(this)
val w: Writes[ParseErrorWrapper] = (
(JsPath \ "path").write[String] and
(JsPath \ "message").write[String] and
(JsPath \ "reason").write[String]
) (e => (e.path.toJsonString, e.error.message, "parse error"))
}
implicit def jsonErrorResponseWrapperWrites[T]: Writes[JsonErrorResponseWrapper] = (
(JsPath \ "error" \ "errors").writeNullable[Iterable[JsValue]] and
(JsPath \ "error" \ "code").write[Long] and
(JsPath \ "error" \ "message").write[String] and
(JsPath \ "context").writeNullable[String] and
(JsPath \ "responseId").write[Long]
) { e =>
val es = e.errors.map(_.json).toSeq
(if (es.isEmpty) None else Some(es), e.code, e.message, e.context, e.responseId)
}
}
| {
"content_hash": "d1f1007a6f9d522b99da3a587e5f846e",
"timestamp": "",
"source": "github",
"line_count": 151,
"max_line_length": 124,
"avg_line_length": 40.11258278145695,
"alnum_prop": 0.7193330031368664,
"repo_name": "robocubs4205/cubscout-server",
"id": "ea3e02aae9c4e77836e328c2aa21a6a54268f002",
"size": "6057",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "common/src/main/scala/com/robocubs4205/cubscout/JsonWrappers.scala",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "716"
},
{
"name": "JavaScript",
"bytes": "383"
},
{
"name": "Scala",
"bytes": "179905"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("klasses", "0001_initial")]
operations = [
migrations.AlterField(
model_name="klass", name="klass_id", field=models.IntegerField(unique=True)
)
]
| {
"content_hash": "1c28bc4889b6e60c3d9d720f2d33fc14",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 87,
"avg_line_length": 23.428571428571427,
"alnum_prop": 0.6585365853658537,
"repo_name": "mitodl/bootcamp-ecommerce",
"id": "6e2e2d48620e3d3169df29c898218c87a3698841",
"size": "401",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "klasses/migrations/0002_klass_id_unique.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "325"
},
{
"name": "Dockerfile",
"bytes": "998"
},
{
"name": "HTML",
"bytes": "70605"
},
{
"name": "JavaScript",
"bytes": "491664"
},
{
"name": "Procfile",
"bytes": "293"
},
{
"name": "Python",
"bytes": "1236492"
},
{
"name": "SCSS",
"bytes": "72463"
},
{
"name": "Shell",
"bytes": "7329"
}
],
"symlink_target": ""
} |
NavBar demonstrates using UINavigationController and UIViewController classes together as building blocks to your application's user interface. Use it as a reference when starting the development of your new application. The various pages in this sample exhibit different ways of how to modify the navigation bar directly, using the appearance proxy, and by modifying the view controller's UINavigationItem. Among the levels of customization are varying appearance styles, and applying custom left and right buttons known as UIBarButtonItems.
#### Custom Right View ####
This example demonstrates placing three kinds of UIBarButtonItems on the right side of the navigation bar: a button with a title, a button with an image, and a button with a UISegmentedControl. An additional segmented control allows the user to toggle between the three. The initial bar button is defined in the storyboard, by dragging a Bar Button Item out of the object library and into the navigation bar. CustomRightViewController.m also shows how to create and add each button type using code.
NOTE: At the time of writing, Xcode (6.1) does not allow you to add multiple bar button items to either side of a navigation bar in a storyboard. See the comments in CustomRightViewController.m for a description of how to workaround this.
#### Custom Title View ####
This example demonstrates adding a UISegmentedControl as the custom title view (center) of the navigation bar.
#### Navigation Prompt ####
This example demonstrates customizing the 'prompt' property of a UINavigationItem to display a custom line of text above the navigation bar.
#### Extended Navigation Bar ####
This example demonstrates placing a custom view underneath the navigation bar in such a manner that view appears to be part of the navigation bar itself. This technique may be used to create an interface similar to the iOS Calendar app.
#### Custom Appearance ####
This example demonstrates customizing the background of a navigation bar, applying a custom bar tint color or background image.
#### Custom Back Button ####
This example demonstrates using an image as the back button without any back button text and without the chevron that normally appears next to the back button.
#### Custom Navigation Bar ####
This example demonstrates using your own UINavigationBar subclass as the navigation bar of a UINavigationController.
## Using the sample ##
The sample launches to a list of examples, each focusing on a different aspect of customizing the navigation bar.
#### Bar Style ####
Click the "Style" button to the left of the main page to change the navigation bar's style or UIBarStyle. This will take you to an action sheet where you can change the background's appearance (default, black-opaque, or black-translucent).
NOTE: A navigation controller determines its preferredStatusBarStyle based upon the navigation bar style. This is why the status bar always appears correct after changing the bar style, without any extra code required.
REQUIREMENTS
--------------------------------------------------------------------------------
### BUILD ###
Xcode 6 or later
### RUNTIME ###
iOS 7.0 or later
PACKAGING LIST
--------------------------------------------------------------------------------
**AppDelegate**: The application delegate class.
**NavigationController**: A UINavigationController subclass that always defers queries about its preferred status bar style and supported interface orientations to its child view controllers.
**MainViewController**: The application's main (initial) view controller.
**CustomRightViewController**: Demonstrates configuring various types of controls as the right bar item of the navigation bar.
**CustomTitleViewController**: Demonstrates configuring the navigation bar to use a UIView as the title.
**NavigationPromptViewController**: Demonstrates displaying text above the navigation bar.
**ExtendedNavBarView**: A UIView subclass that draws a gray hairline along its bottom border, similar to a navigation bar. This view is used as the navigation bar extension view in the Extended Navigation Bar example.
**ExtendedNavBarViewController**: Demonstrates vertically extending the navigation bar.
**CustomAppearanceViewController**: Demonstrates applying a custom background to a navigation bar.
**CustomBackButtonNavController**: UINavigationController subclass used for targeting appearance proxy changes in the Custom Back Button example.
**CustomBackButtonDetailViewController**: The detail view controller in the Custom Back Button example.
**CustomBackButtonViewController**: Demonstrates using a custom back button image with no chevron and not text.
CHANGES FROM PREVIOUS VERSIONS:
--------------------------------------------------------------------------------
+ Version 6.0
- Updated for iOS 8, the iPhone 6, and the iPhone 6 Plus.
- Added a 'Custom Navigation Bar' example.
+ Version 1.12
- Updated for iOS 7.
- Expanded the number of examples.
+ Version 1.11
- Upgraded Xcode project for iOS 5.0,.
- Removed all compiler warnings/errors.
+ Version 1.9
- Upgraded project to build with the iOS 4.0 SDK.
+ Version 1.8
- Upgraded for 3.0 SDK due to deprecated APIs.
- In "cellForRowAtIndexPath" it now uses UITableViewCell's initWithStyle.
- Now uses viewDidUnload.
+ Version 1.7
- Updated for and tested with iPhone OS 2.0.
- First public release.
+ Version 1.6
- Changed bundle identifier.
+ Version 1.5
- Beta 6 Release.
- Minor UI improvements.
+ Version 1.4
- Updated for Beta 5.
- changes to UITableViewDelegate.
- Upgraded to use xib files for each UIViewController.
+ Version 1.3
- Updated for Beta 4.
- Changed to use Interface Builder xib file.
+ Version 1.2
- Updated for Beta 3: reusable UITableView cells.
- Added new use of UIViewController "presentModalViewController".
+ Version 1.1
- Minor update to the latest SDK API changes.
+ Version 1.0
- First release.
================================================================================
Copyright (C) 2008-2015 Apple Inc. All rights reserved.
| {
"content_hash": "42e3f6829f37b2389de598ab3be69166",
"timestamp": "",
"source": "github",
"line_count": 136,
"max_line_length": 545,
"avg_line_length": 45.97794117647059,
"alnum_prop": 0.7246121861506477,
"repo_name": "hayasilin/iOS-snippets",
"id": "c7f8356214f08d7df3f6fbf304fbbe0e6953f14a",
"size": "6286",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Objc/Other projects/Apple offical sample code/CustomizingUINavigationBar/ReadMe.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2870"
},
{
"name": "Objective-C",
"bytes": "872522"
},
{
"name": "Ruby",
"bytes": "265"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [('auvsi_suas', '0006_target_blank'), ]
operations = [
migrations.AddField(model_name='target',
name='thumbnail',
field=models.ImageField(upload_to=b'targets',
blank=True), ),
]
| {
"content_hash": "6f1de77868b3db5ed3c18d2c94b6b136",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 73,
"avg_line_length": 29.733333333333334,
"alnum_prop": 0.5291479820627802,
"repo_name": "justineaster/interop",
"id": "70a52b45e89c2177d4bd99364bcab05c2d2cdf18",
"size": "470",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/auvsi_suas/migrations/0007_target_thumbnail.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1299"
},
{
"name": "HTML",
"bytes": "10541"
},
{
"name": "JavaScript",
"bytes": "871592"
},
{
"name": "Puppet",
"bytes": "12490"
},
{
"name": "Python",
"bytes": "472511"
},
{
"name": "Ruby",
"bytes": "1207"
},
{
"name": "Shell",
"bytes": "4173"
},
{
"name": "TeX",
"bytes": "3748"
}
],
"symlink_target": ""
} |
<!-- Generated by pkgdown: do not edit by hand -->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>The Log class. — Log-class • CVXR</title>
<!-- jquery -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.3.1/jquery.min.js" integrity="sha256-FgpCb/KJQlLNfOu91ta32o/NMZxltwRo8QtmkMRdAu8=" crossorigin="anonymous"></script>
<!-- Bootstrap -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.3.7/css/bootstrap.min.css" integrity="sha256-916EbMg70RQy9LHiGkXzG8hSg9EdNy97GazNG/aiY1w=" crossorigin="anonymous" />
<script src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.3.7/js/bootstrap.min.js" integrity="sha256-U5ZEeKfGNOja007MMD3YBI0A3OSZOQbeG6z2f2Y0hu8=" crossorigin="anonymous"></script>
<!-- Font Awesome icons -->
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.min.css" integrity="sha256-eZrrJcwDc/3uDhsdt61sL2oOBY362qM3lon1gyExkL0=" crossorigin="anonymous" />
<!-- clipboard.js -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/clipboard.js/2.0.4/clipboard.min.js" integrity="sha256-FiZwavyI2V6+EXO1U+xzLG3IKldpiTFf3153ea9zikQ=" crossorigin="anonymous"></script>
<!-- sticky kit -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/sticky-kit/1.1.3/sticky-kit.min.js" integrity="sha256-c4Rlo1ZozqTPE2RLuvbusY3+SU1pQaJC0TjuhygMipw=" crossorigin="anonymous"></script>
<!-- pkgdown -->
<link href="../pkgdown.css" rel="stylesheet">
<script src="../pkgdown.js"></script>
<meta property="og:title" content="The Log class. — Log-class" />
<meta property="og:description" content="This class represents the elementwise natural logarithm \(\log(x)\)." />
<meta name="twitter:card" content="summary" />
<!-- mathjax -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/MathJax.js" integrity="sha256-nvJJv9wWKEm88qvoQl9ekL2J+k/RWIsaSScxxlsrv8k=" crossorigin="anonymous"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/config/TeX-AMS-MML_HTMLorMML.js" integrity="sha256-84DKXVJXs0/F8OTMzX4UR909+jtl4G7SPypPavF+GfA=" crossorigin="anonymous"></script>
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container template-reference-topic">
<header>
<div class="navbar navbar-default navbar-fixed-top" role="navigation">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<span class="navbar-brand">
<a class="navbar-link" href="../index.html">CVXR</a>
<span class="version label label-default" data-toggle="tooltip" data-placement="bottom" title="Released version">0.99.4</span>
</span>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li>
<a href="../index.html">
<span class="fa fa-home fa-lg"></span>
</a>
</li>
<li>
<a href="../reference/index.html">Reference</a>
</li>
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-expanded="false">
Articles
<span class="caret"></span>
</a>
<ul class="dropdown-menu" role="menu">
<li>
<a href="../articles/cvxr_intro.html">Disciplined Convex Optimization in R</a>
</li>
</ul>
</li>
<li>
<a href="../news/index.html">Changelog</a>
</li>
</ul>
<ul class="nav navbar-nav navbar-right">
</ul>
</div><!--/.nav-collapse -->
</div><!--/.container -->
</div><!--/.navbar -->
</header>
<div class="row">
<div class="col-md-9 contents">
<div class="page-header">
<h1>The Log class.</h1>
<div class="hidden name"><code>Log-class.Rd</code></div>
</div>
<div class="ref-description">
<p>This class represents the elementwise natural logarithm \(\log(x)\).</p>
</div>
<pre class="usage"><span class='fu'>Log</span>(<span class='no'>x</span>)
<span class='co'># S4 method for Log</span>
<span class='fu'><a href='to_numeric.html'>to_numeric</a></span>(<span class='no'>object</span>, <span class='no'>values</span>)
<span class='co'># S4 method for Log</span>
<span class='fu'><a href='sign_from_args.html'>sign_from_args</a></span>(<span class='no'>object</span>)
<span class='co'># S4 method for Log</span>
<span class='fu'><a href='curvature-atom.html'>is_atom_convex</a></span>(<span class='no'>object</span>)
<span class='co'># S4 method for Log</span>
<span class='fu'><a href='curvature-atom.html'>is_atom_concave</a></span>(<span class='no'>object</span>)
<span class='co'># S4 method for Log</span>
<span class='fu'><a href='curvature-comp.html'>is_incr</a></span>(<span class='no'>object</span>, <span class='no'>idx</span>)
<span class='co'># S4 method for Log</span>
<span class='fu'><a href='curvature-comp.html'>is_decr</a></span>(<span class='no'>object</span>, <span class='no'>idx</span>)
<span class='co'># S4 method for Log</span>
<span class='fu'><a href='graph_implementation.html'>graph_implementation</a></span>(<span class='no'>object</span>, <span class='no'>arg_objs</span>, <span class='no'>size</span>,
<span class='kw'>data</span> <span class='kw'>=</span> <span class='fl'>NA_real_</span>)</pre>
<h2 class="hasAnchor" id="arguments"><a class="anchor" href="#arguments"></a>Arguments</h2>
<table class="ref-arguments">
<colgroup><col class="name" /><col class="desc" /></colgroup>
<tr>
<th>x</th>
<td><p>An <a href='Expression-class.html'>Expression</a> or numeric constant.</p></td>
</tr>
<tr>
<th>object</th>
<td><p>A Log object.</p></td>
</tr>
<tr>
<th>values</th>
<td><p>A list of arguments to the atom.</p></td>
</tr>
<tr>
<th>idx</th>
<td><p>An index into the atom.</p></td>
</tr>
<tr>
<th>arg_objs</th>
<td><p>A list of linear expressions for each argument.</p></td>
</tr>
<tr>
<th>size</th>
<td><p>A vector with two elements representing the size of the resulting expression.</p></td>
</tr>
<tr>
<th>data</th>
<td><p>A list of additional data required by the atom.</p></td>
</tr>
</table>
<h2 class="hasAnchor" id="methods-by-generic-"><a class="anchor" href="#methods-by-generic-"></a>Methods (by generic)</h2>
<ul>
<li><p><code>to_numeric</code>: The elementwise natural logarithm of the input value.</p></li>
<li><p><code>sign_from_args</code>: The sign of the atom is unknown.</p></li>
<li><p><code>is_atom_convex</code>: The atom is not convex.</p></li>
<li><p><code>is_atom_concave</code>: The atom is concave.</p></li>
<li><p><code>is_incr</code>: The atom is weakly increasing.</p></li>
<li><p><code>is_decr</code>: The atom is not weakly decreasing.</p></li>
<li><p><code>graph_implementation</code>: The graph implementation of the atom.</p></li>
</ul>
<h2 class="hasAnchor" id="slots"><a class="anchor" href="#slots"></a>Slots</h2>
<dl class='dl-horizontal'>
<dt><code>x</code></dt><dd><p>An <a href='Expression-class.html'>Expression</a> or numeric constant.</p></dd>
</dl>
</div>
<div class="col-md-3 hidden-xs hidden-sm" id="sidebar">
<h2>Contents</h2>
<ul class="nav nav-pills nav-stacked">
<li><a href="#arguments">Arguments</a></li>
<li><a href="#methods-by-generic-">Methods (by generic)</a></li>
<li><a href="#slots">Slots</a></li>
</ul>
</div>
</div>
<footer>
<div class="copyright">
<p>Developed by <a href='https://web.stanford.edu/~anqif/'>Anqi Fu</a>, <a href='https://statistics.stanford.edu/people/balasubramanian-narasimhan'>Balasubramanian Narasimhan</a>, <a href='https://web.stanford.edu/~stevend2/'>Steven Diamond</a>, <a href='https://people.eecs.berkeley.edu/~miller_john/'>John Miller</a>.</p>
</div>
<div class="pkgdown">
<p>Site built with <a href="https://pkgdown.r-lib.org/">pkgdown</a> 1.3.0.</p>
</div>
</footer>
</div>
</body>
</html>
| {
"content_hash": "49b27169a24e42f17ecaa9d847321120",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 325,
"avg_line_length": 37.48908296943232,
"alnum_prop": 0.6403028538147932,
"repo_name": "anqif/cvxr",
"id": "3df5129276ea7c811128a9873b731cb59ca1d907",
"size": "8591",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/reference/Log-class.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "87390"
},
{
"name": "R",
"bytes": "721435"
},
{
"name": "Shell",
"bytes": "36"
}
],
"symlink_target": ""
} |
This sample application shows the [basic workflow](https://voucherify.readme.io/docs/voucher-lifecycle) of Voucherify and presents the supported [voucher types](https://voucherify.readme.io/docs/vouchers). You can try out the [Live Version](https://voucherify-sample-nodejs.herokuapp.com/) or deploy an instance bound to your [account](https://app.voucherify.io/#/signup?plan=standard) through Heroku button.
Implemented with our [Node.js SDK](https://voucherify.readme.io/docs/nodejs-tutorial) and [Voucheriy.js](https://voucherify.readme.io/docs/voucherifyjs) client library.
---
[Voucherify](http://voucherify.io?utm_source=github&utm_medium=demo&utm_campaign=acq) is an API-first platform for software developers who are dissatisfied with high-maintenance custom coupon software. Our product is a coupon infrastructure through API that provides a quicker way to build coupon generation, distribution and tracking. Unlike legacy coupon software we have:
* an API-first SaaS platform that enables customisation of every aspect of coupon campaigns
* a management console that helps cut down maintenance and reporting overhead
* an infrastructure to scale up coupon activity in no time

## Setup
It is really simple to setup this app. Only what you need to do is follow the steps listed below:
1. You need a set of *Application Keys* and *Client-side Keys* to connect with **Voucherify Platform**. Visit App.
2. After signing up you need also add your domain to Voucherify's whitelist.
When you go to configuration view of Voucherify account, "Your website URL" is used for allowing client requests only from given domain. You have to put there your website url or set * if you want to enable requests from any origin.

3. Press this button to create a Heroku app
[](https://heroku.com/deploy?template=https://github.com/voucherifyio/voucherify-nodejs-example)
Wait until the Deploy Window is open.
4. After opening the Deploy Window, please go to the [**Configuration**](https://app.voucherify.io/#/app/configuration) page.
Copy App Keys from the Configuration page and paste these keys into proper input fields in the Deploy Window.

5. In the Deploy Window after filling all required inputs click a Deploy Button located on the end of page. Wait until the Deploying Process is finish.

6. After finishing process you can go to the Manage Panel or visit the Voucherify Example page.

## Commands
* `$ npm run start` - runs the application
## Help
* Found a bug? Have a suggestion for improvement? Want to tell us we're awesome? [**Submit an issue**](https://github.com/voucherifyio/voucherify-nodejs-example/issues/new)
* Trouble with your integration? Contact [**Voucherify Support**](https://voucherify.readme.io/docs/support) / [**support@voucherify.io**](mailto:support@voucherify.io)
* Want to contribute? [**Submit a pull request**](https://github.com/voucherifyio/voucherify-nodejs-example/compare)
## Disclaimer
This code is provided as is and is only intended to be used for illustration purposes. This code is not production-ready and is not meant to be used in a production environment. This repository is to be used as a tool to help developers learn how to integrate with Voucherify. Any use of this repository or any of its code in a production environment is highly discouraged.
| {
"content_hash": "6f56ab90ba97ce1674498e19eb8edab8",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 408,
"avg_line_length": 61.08620689655172,
"alnum_prop": 0.7713801862828111,
"repo_name": "voucherifyio/voucherify-nodejs-example",
"id": "af4f7ebc6e46b3af64ff8d4b2b7e927cf3969530",
"size": "3603",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6844"
},
{
"name": "HTML",
"bytes": "5636"
},
{
"name": "JavaScript",
"bytes": "13993"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_29) on Mon Nov 26 17:21:40 MSK 2012 -->
<TITLE>
org.apache.poi.xssf.usermodel.helpers (POI API Documentation)
</TITLE>
<META NAME="date" CONTENT="2012-11-26">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
</HEAD>
<BODY BGCOLOR="white">
<FONT size="+1" CLASS="FrameTitleFont">
<A HREF="../../../../../../org/apache/poi/xssf/usermodel/helpers/package-summary.html" target="classFrame">org.apache.poi.xssf.usermodel.helpers</A></FONT>
<TABLE BORDER="0" WIDTH="100%" SUMMARY="">
<TR>
<TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont">
Classes</FONT>
<FONT CLASS="FrameItemFont">
<BR>
<A HREF="ColumnHelper.html" title="class in org.apache.poi.xssf.usermodel.helpers" target="classFrame">ColumnHelper</A>
<BR>
<A HREF="HeaderFooterHelper.html" title="class in org.apache.poi.xssf.usermodel.helpers" target="classFrame">HeaderFooterHelper</A>
<BR>
<A HREF="XSSFFormulaUtils.html" title="class in org.apache.poi.xssf.usermodel.helpers" target="classFrame">XSSFFormulaUtils</A>
<BR>
<A HREF="XSSFRowShifter.html" title="class in org.apache.poi.xssf.usermodel.helpers" target="classFrame">XSSFRowShifter</A>
<BR>
<A HREF="XSSFSingleXmlCell.html" title="class in org.apache.poi.xssf.usermodel.helpers" target="classFrame">XSSFSingleXmlCell</A>
<BR>
<A HREF="XSSFXmlColumnPr.html" title="class in org.apache.poi.xssf.usermodel.helpers" target="classFrame">XSSFXmlColumnPr</A></FONT></TD>
</TR>
</TABLE>
</BODY>
</HTML>
| {
"content_hash": "a7e83e2b877ee7aa9a5531d4eb0574eb",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 155,
"avg_line_length": 40.11904761904762,
"alnum_prop": 0.6985163204747774,
"repo_name": "HRKN2245/CameraSunmoku",
"id": "ddaa84ea0b74b65fcf038027e3f075404e10d6f0",
"size": "1685",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ExtractSample/poi-3.9/docs/apidocs/org/apache/poi/xssf/usermodel/helpers/package-frame.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16091"
},
{
"name": "Java",
"bytes": "126912"
}
],
"symlink_target": ""
} |
class CommentCache(object):
def __init__(self):
self._comments = []
def add(self, comment):
self._comments.append(comment)
def consume_with(self, function):
map(function, self._comments)
self.__init__()
class Comments(object):
def __init__(self):
self._comments = []
def add(self, row):
if row.comments:
self._comments.extend(c.strip() for c in row.comments if c.strip())
@property
def value(self):
return self._comments
class Comment(object):
def __init__(self, comment_data):
if isinstance(comment_data, basestring):
comment_data = [comment_data] if comment_data else []
self._comment = comment_data or []
def __len__(self):
return len(self._comment)
def as_list(self):
if self._not_commented():
self._comment[0] = '# ' + self._comment[0]
return self._comment
def _not_commented(self):
return self._comment and self._comment[0] and self._comment[0][0] != '#'
| {
"content_hash": "6e68f73199e43888026831b3038f2df9",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 80,
"avg_line_length": 24.068181818181817,
"alnum_prop": 0.5712936732766761,
"repo_name": "yamateh/robotframework",
"id": "dab42b1ce35c39a73d253687a633eda112e1af21",
"size": "1666",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/robot/parsing/comments.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
"""Operations for feeding input data using TensorFlow placeholders."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
def prepare_feed_dict(model, features, labels=None, is_training=None):
"""Prepares a feed_dict for sess.run() given a batch of features and labels.
Args:
model: An instance of AstroModel.
features: Dictionary containing "time_series_features" and "aux_features".
Each is a dictionary of named numpy arrays of shape [batch_size, length].
labels: (Optional). Numpy array of shape [batch_size].
is_training: (Optional). Python boolean to feed to the model.is_training
Tensor (if None, no value is fed).
Returns:
feed_dict: A dictionary of input Tensor to numpy array.
"""
feed_dict = {}
for feature, tensor in model.time_series_features.items():
feed_dict[tensor] = features["time_series_features"][feature]
for feature, tensor in model.aux_features.items():
feed_dict[tensor] = features["aux_features"][feature]
if labels is not None:
feed_dict[model.labels] = labels
if is_training is not None:
feed_dict[model.is_training] = is_training
return feed_dict
def build_feature_placeholders(config):
"""Builds tf.Placeholder ops for feeding model features and labels.
Args:
config: ConfigDict containing the feature configurations.
Returns:
features: A dictionary containing "time_series_features" and "aux_features",
each of which is a dictionary of tf.Placeholders of features from the
input configuration. All features have dtype float32 and shape
[batch_size, length].
"""
batch_size = None # Batch size will be dynamically specified.
features = {"time_series_features": {}, "aux_features": {}}
for feature_name, feature_spec in config.items():
placeholder = tf.placeholder(
dtype=tf.float32,
shape=[batch_size, feature_spec.length],
name=feature_name)
if feature_spec.is_time_series:
features["time_series_features"][feature_name] = placeholder
else:
features["aux_features"][feature_name] = placeholder
return features
def build_labels_placeholder():
"""Builds a tf.Placeholder op for feeding model labels.
Returns:
labels: An int64 tf.Placeholder with shape [batch_size].
"""
batch_size = None # Batch size will be dynamically specified.
return tf.placeholder(dtype=tf.int64, shape=[batch_size], name="labels")
| {
"content_hash": "007f0513287715f2befce58cae30b530",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 80,
"avg_line_length": 33.972972972972975,
"alnum_prop": 0.7064439140811456,
"repo_name": "google-research/exoplanet-ml",
"id": "464e67bd07de88d6889a128528f965f46c5745c3",
"size": "3104",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "exoplanet-ml/astronet/ops/input_ops.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "139752"
},
{
"name": "Python",
"bytes": "570659"
},
{
"name": "Starlark",
"bytes": "21880"
}
],
"symlink_target": ""
} |
layout: post
title: rsnapshot on Raspberry Pi
tags:
- raspberrypi
- pickup.local
---
Let's backup the synology backup !
# Prerequisites
- [Backup a Synology NAS to a raspberry pi]({% post_url 2014-12-22-backup-synology-on-raspberrypi %})
# Setup new raspberry
First:
- [Headless Raspberry Pi setup]({% post_url 2013-09-17-headless_raspberrypi_setup %})
- [Send email with gmail on raspberry pi](% post_url 2015-08-26-send-email-with-gmail-on-raspberry-pi %)
Then add a user for `synology`:
{% highlight bash %}
$ sudo useradd synology -m -G users
$ sudo passwd synology
{% endhighlight %}
Mount USB drive on `/mnt/usbdrive`:
{% highlight bash %}
$ sudo mkdir /mnt/usbdrive
$ sudo fdisk -l
$ sudo emacs /etc/fstab
{% endhighlight %}
```
/dev/sdb1 /mnt/usbdrive ext4 defaults,user 0 0
```
{% highlight bash %}
$ sudo mount -a
{% endhighlight %}
Create backup directory:
{% highlight bash %}
$ sudo mkdir /mnt/usbdrive/backup
$ sudo chmod 700 /mnt/usbdrive/backup
{% endhighlight %}
# Setup rsnapshot
Install:
{% highlight bash %}
$ sudo apt-get install rsnapshot
{% endhighlight %}
Configure:
{% highlight bash %}
$ sudo cp /etc/rsnapshot.conf /etc/rsnapshot.conf.BAK
$ sudo emacs /etc/rsnapshot.conf
{% endhighlight %}
```
snapshot_root /mnt/usbdrive/backup/
no_create_root 1
cmd_ssh /usr/bin/ssh
cmd_du /usr/bin/du
cmd_rsnapshot_diff /usr/bin/rsnapshot-diff
#retain hourly 6
#retain daily 7
retain weekly 4
retain monthly 2
verbose 3
logfile /var/log/rsnapshot.log
ssh_args -i /root/.ssh/rsnapshot_dsa
rsync_long_args --delete --numeric-ids --delete-excluded --stats
exclude /@app
exclude /incoming
exclude /installs
exclude /video
exclude @eaDir
#backup /home/ localhost/
#backup /etc/ localhost/
#backup /usr/local/ localhost/
backup synology@pickup.local:/mnt/usbdrive/chezak_backup/ chezak/
#backup synology@<REMOTE_SERVER>:/mnt/usbdrive/chezak_backup/ chezak/
```
Meaning:
- keep the 4 last "weekly" backups
- keep the 2 last "monthly" backups
# Setup reporting
{% highlight bash %}
$ sudo cp /usr/share/doc/rsnapshot/examples/utils/rsnapreport.pl.gz /usr/local/bin
$ sudo gunzip /usr/local/bin/rsnapreport.pl.gz
$ sudo chmod a+x /usr/local/bin/rsnapreport.pl
{% endhighlight %}
# Setup CRON jobs
{% highlight bash %}
$ sudo emacs /etc/cron.d/rsnapshot
{% endhighlight %}
```
# 0 */4 * * * root /usr/bin/rsnapshot hourly
# 30 3 * * * root /usr/bin/rsnapshot daily
30 1 * * 1 root /usr/bin/rsnapshot weekly 2>&1 | /usr/local/bin/rsnapreport.pl | mail -s "[pipote] rsnapshot weekly" MY@EMAIL
35 3 1 * * root /usr/bin/rsnapshot monthly 2>&1 | /usr/local/bin/rsnapreport.pl | mail -s "[pipote] rsnapshot monthly" MY@EMAIL
```
Meaning:
- launch "weekly" backup every monday at 1:30 AM
- launch "monthly" backup every first day of the month at 3:35
# Test config
{% highlight bash %}
$ sudo rsnapshot configtest
{% endhighlight %}
# Setup ssh keys
{% highlight bash %}
$ sudo ssh-keygen -N "" -f /root/.ssh/rsnapshot_dsa
$ sudo ssh-copy-id -i /root/.ssh/rsnapshot_dsa.pub synology@pickup.local
{% endhighlight %}
Test with:
{% highlight bash %}
$ sudo ssh synology@pickup.local -i /root/.ssh/rsnapshot_dsa
{% endhighlight %}
# Manually sync
First, to speed up first sync, plug the hard drive on `pickup.local` then:
{% highlight bash %}
$ sudo mkdir /mnt/usbdrive2
$ sudo fdisk -l
$ sudo emacs /etc/fstab
{% endhighlight %}
```
/dev/sdc1 /mnt/usbdrive2 ext4 defaults,user 0 0
```
{% highlight bash %}
$ sudo mount -a
{% endhighlight %}
Launch sync:
{% highlight bash %}
$ sudo mkdir -m 0755 -p /mnt/usbdrive2/backup/weekly.0/
$ screen
$ sudo /usr/bin/rsync -av --delete --numeric-ids --delete-excluded --no-relative /mnt/usbdrive/chezak_backup/ /mnt/usbdrive2/backup/weekly.0/chezak/
{% endhighlight %}
Then, when sync is finished, remove `usbdrive2` mount point from `/etc/fstab` file.
{% highlight bash %}
$ sudo umount /mnt/usbdrive2
{% endhighlight %}
Plug back the hard drive on new raspberry.
# Trigger first backup
{% highlight bash %}
$ sudo rsnapshot -t weekly
{% endhighlight %}
Then trigger rsnapshot backup:
{% highlight bash %}
$ sudo apt-get install screen
$ screen
$ sudo rsnapshot -v weekly
{% endhighlight %}
Check disk usage:
{% highlight bash %}
$ sudo rsnapshot du
{% endhighlight %}
| {
"content_hash": "c5fc8015eb504635a78457d31babfb51",
"timestamp": "",
"source": "github",
"line_count": 213,
"max_line_length": 148,
"avg_line_length": 20.671361502347416,
"alnum_prop": 0.6856688621394503,
"repo_name": "aymerick/aymerick.github.io",
"id": "8dbc4106afa842c46ba4b0648210bc66d49aab8e",
"size": "4407",
"binary": false,
"copies": "1",
"ref": "refs/heads/source",
"path": "_drafts/rsnapshot-on-raspberry-pi.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "17233"
},
{
"name": "CoffeeScript",
"bytes": "2275"
},
{
"name": "HTML",
"bytes": "35817"
},
{
"name": "Ruby",
"bytes": "6203"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="ascii"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>eicpy.encrypt</title>
<link rel="stylesheet" href="epydoc.css" type="text/css" />
<script type="text/javascript" src="epydoc.js"></script>
</head>
<body bgcolor="white" text="black" link="blue" vlink="#204080"
alink="#204080">
<!-- ==================== NAVIGATION BAR ==================== -->
<table class="navbar" border="0" width="100%" cellpadding="0"
bgcolor="#a0c0ff" cellspacing="0">
<tr valign="middle">
<!-- Tree link -->
<th> <a
href="module-tree.html">Trees</a> </th>
<!-- Index link -->
<th> <a
href="identifier-index.html">Indices</a> </th>
<!-- Help link -->
<th> <a
href="help.html">Help</a> </th>
<!-- Project homepage -->
<th class="navbar" align="right" width="100%">
<table border="0" cellpadding="0" cellspacing="0">
<tr><th class="navbar" align="center"
><a class="navbar" target="_top" href="file:///G:/Web%20Development/epydoc/">EIC Site-packages</a></th>
</tr></table></th>
</tr>
</table>
<table width="100%" cellpadding="0" cellspacing="0">
<tr valign="top">
<td width="100%">
<span class="breadcrumbs">
<a href="eicpy-module.html">Package eicpy</a> ::
Module encrypt
</span>
</td>
<td>
<table cellpadding="0" cellspacing="0">
<!-- hide/show private -->
<tr><td align="right"><span class="options">[<a href="javascript:void(0);" class="privatelink"
onclick="toggle_private();">hide private</a>]</span></td></tr>
<tr><td align="right"><span class="options"
>[<a href="frames.html" target="_top">frames</a
>] | <a href="eicpy.encrypt-module.html"
target="_top">no frames</a>]</span></td></tr>
</table>
</td>
</tr>
</table>
<!-- ==================== MODULE DESCRIPTION ==================== -->
<h1 class="epydoc">Module encrypt</h1><p class="nomargin-top"><span class="codelink"><a href="eicpy.encrypt-pysrc.html">source code</a></span></p>
<p>A replacement for SUBR.ENCRYPT; reverse-engineering ENCRYPT:</p>
<p>READY >LET A$=$00$ >CALL "ENCRYPT",A$,1 >? HTA(A$)
28 >CALL "ENCRYPT",A$,-1 >? HTA(A$) 00 >LET
A$=$00010203$ >CALL "ENCRYPT",A$,1 >? HTA(A$) 28292A2B
>CALL "ENCRYPT",A$,-1 >? HTA(A$) 00010203</p>
<!-- ==================== FUNCTIONS ==================== -->
<a name="section-Functions"></a>
<table class="summary" border="1" cellpadding="3"
cellspacing="0" width="100%" bgcolor="white">
<tr bgcolor="#70b0f0" class="table-header">
<td colspan="2" class="table-header">
<table border="0" cellpadding="0" cellspacing="0" width="100%">
<tr valign="top">
<td align="left"><span class="table-header">Functions</span></td>
<td align="right" valign="top"
><span class="options">[<a href="#section-Functions"
class="privatelink" onclick="toggle_private();"
>hide private</a>]</span></td>
</tr>
</table>
</td>
</tr>
<tr>
<td width="15%" align="right" valign="top" class="summary">
<span class="summary-type"> </span>
</td><td class="summary">
<table width="100%" cellpadding="0" cellspacing="0" border="0">
<tr>
<td><span class="summary-sig"><a href="eicpy.encrypt-module.html#encrypt" class="summary-sig-name">encrypt</a>(<span class="summary-sig-arg">data</span>)</span><br />
"Encrypt" the data as the ENCRYPT BBx subroutine does</td>
<td align="right" valign="top">
<span class="codelink"><a href="eicpy.encrypt-pysrc.html#encrypt">source code</a></span>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td width="15%" align="right" valign="top" class="summary">
<span class="summary-type"> </span>
</td><td class="summary">
<table width="100%" cellpadding="0" cellspacing="0" border="0">
<tr>
<td><span class="summary-sig"><a href="eicpy.encrypt-module.html#decrypt" class="summary-sig-name">decrypt</a>(<span class="summary-sig-arg">data</span>)</span><br />
Decode the data as the ENCRYPT BBx subroutine does</td>
<td align="right" valign="top">
<span class="codelink"><a href="eicpy.encrypt-pysrc.html#decrypt">source code</a></span>
</td>
</tr>
</table>
</td>
</tr>
</table>
<!-- ==================== VARIABLES ==================== -->
<a name="section-Variables"></a>
<table class="summary" border="1" cellpadding="3"
cellspacing="0" width="100%" bgcolor="white">
<tr bgcolor="#70b0f0" class="table-header">
<td colspan="2" class="table-header">
<table border="0" cellpadding="0" cellspacing="0" width="100%">
<tr valign="top">
<td align="left"><span class="table-header">Variables</span></td>
<td align="right" valign="top"
><span class="options">[<a href="#section-Variables"
class="privatelink" onclick="toggle_private();"
>hide private</a>]</span></td>
</tr>
</table>
</td>
</tr>
<tr>
<td width="15%" align="right" valign="top" class="summary">
<span class="summary-type"> </span>
</td><td class="summary">
<a name="__package__"></a><span class="summary-name">__package__</span> = <code title="None">None</code><br />
hash(x)
</td>
</tr>
</table>
<!-- ==================== FUNCTION DETAILS ==================== -->
<a name="section-FunctionDetails"></a>
<table class="details" border="1" cellpadding="3"
cellspacing="0" width="100%" bgcolor="white">
<tr bgcolor="#70b0f0" class="table-header">
<td colspan="2" class="table-header">
<table border="0" cellpadding="0" cellspacing="0" width="100%">
<tr valign="top">
<td align="left"><span class="table-header">Function Details</span></td>
<td align="right" valign="top"
><span class="options">[<a href="#section-FunctionDetails"
class="privatelink" onclick="toggle_private();"
>hide private</a>]</span></td>
</tr>
</table>
</td>
</tr>
</table>
<a name="encrypt"></a>
<div>
<table class="details" border="1" cellpadding="3"
cellspacing="0" width="100%" bgcolor="white">
<tr><td>
<table width="100%" cellpadding="0" cellspacing="0" border="0">
<tr valign="top"><td>
<h3 class="epydoc"><span class="sig"><span class="sig-name">encrypt</span>(<span class="sig-arg">data</span>)</span>
</h3>
</td><td align="right" valign="top"
><span class="codelink"><a href="eicpy.encrypt-pysrc.html#encrypt">source code</a></span>
</td>
</tr></table>
<p>"Encrypt" the data as the ENCRYPT BBx subroutine does</p>
<pre class="py-doctest">
<span class="py-prompt">>>> </span>data = <span class="py-string">"\x00"</span>
<span class="py-prompt">>>> </span>hex(ord(encrypt(data)))
<span class="py-output">'0x28'</span>
<span class="py-output"></span><span class="py-prompt">>>> </span>data = <span class="py-string">"\x00\x01\x02\x03"</span>
<span class="py-prompt">>>> </span>encrypt(data)
<span class="py-output">'()*+'</span></pre>
<dl class="fields">
</dl>
</td></tr></table>
</div>
<a name="decrypt"></a>
<div>
<table class="details" border="1" cellpadding="3"
cellspacing="0" width="100%" bgcolor="white">
<tr><td>
<table width="100%" cellpadding="0" cellspacing="0" border="0">
<tr valign="top"><td>
<h3 class="epydoc"><span class="sig"><span class="sig-name">decrypt</span>(<span class="sig-arg">data</span>)</span>
</h3>
</td><td align="right" valign="top"
><span class="codelink"><a href="eicpy.encrypt-pysrc.html#decrypt">source code</a></span>
</td>
</tr></table>
<p>Decode the data as the ENCRYPT BBx subroutine does</p>
<pre class="py-doctest">
<span class="py-prompt">>>> </span>data = <span class="py-string">"\x28"</span>
<span class="py-prompt">>>> </span>hex(ord(decrypt(data)))
<span class="py-output">'0x0'</span>
<span class="py-output"></span><span class="py-prompt">>>> </span>data = <span class="py-string">"\x28\x29\x2a\x2b"</span>
<span class="py-prompt">>>> </span>decrypt(data)
<span class="py-output">'\x00\x01\x02\x03'</span></pre>
<dl class="fields">
</dl>
</td></tr></table>
</div>
<br />
<!-- ==================== NAVIGATION BAR ==================== -->
<table class="navbar" border="0" width="100%" cellpadding="0"
bgcolor="#a0c0ff" cellspacing="0">
<tr valign="middle">
<!-- Tree link -->
<th> <a
href="module-tree.html">Trees</a> </th>
<!-- Index link -->
<th> <a
href="identifier-index.html">Indices</a> </th>
<!-- Help link -->
<th> <a
href="help.html">Help</a> </th>
<!-- Project homepage -->
<th class="navbar" align="right" width="100%">
<table border="0" cellpadding="0" cellspacing="0">
<tr><th class="navbar" align="center"
><a class="navbar" target="_top" href="file:///G:/Web%20Development/epydoc/">EIC Site-packages</a></th>
</tr></table></th>
</tr>
</table>
<table border="0" cellpadding="0" cellspacing="0" width="100%%">
<tr>
<td align="left" class="footer">
<a href="epydoc-log.html">Generated by Epydoc
3.0.1 on Mon Oct 8 11:15:35 2012</a>
</td>
<td align="right" class="footer">
<a target="mainFrame" href="http://epydoc.sourceforge.net"
>http://epydoc.sourceforge.net</a>
</td>
</tr>
</table>
<script type="text/javascript">
<!--
// Private objects are initially displayed (because if
// javascript is turned off then we want them to be
// visible); but by default, we want to hide them. So hide
// them unless we have a cookie that says to show them.
checkCookie();
// -->
</script>
</body>
</html>
| {
"content_hash": "939435edd4feed20a1d415912e442eea",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 176,
"avg_line_length": 39.43181818181818,
"alnum_prop": 0.581075888568684,
"repo_name": "chauncey/there-be-docs",
"id": "d682ba0f9621a43f0e04bc1c43ae868f6c7f76e6",
"size": "10410",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "static/docs/epydocs/eicpy.encrypt-module.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "436813"
},
{
"name": "HTML",
"bytes": "10639963"
},
{
"name": "JavaScript",
"bytes": "8661843"
},
{
"name": "Python",
"bytes": "4581"
}
],
"symlink_target": ""
} |
package org.egordorichev.lasttry.entity.entities.item;
import com.badlogic.gdx.utils.JsonValue;
import org.egordorichev.lasttry.entity.Entity;
import org.egordorichev.lasttry.entity.asset.Assets;
import org.egordorichev.lasttry.entity.component.DescriptionComponent;
import org.egordorichev.lasttry.entity.component.IdComponent;
import org.egordorichev.lasttry.entity.component.TextureComponent;
import org.egordorichev.lasttry.util.log.Log;
/**
* Represents an item in the game
*/
public class Item extends Entity {
public Item(String id) {
super(IdComponent.class, DescriptionComponent.class, TextureComponent.class, ItemUseComponent.class,
StackComponent.class);
this.getComponent(IdComponent.class).id = id;
}
/**
* Sets field according to the json asset
*
* @param asset Json asset
*/
public void loadFields(JsonValue asset) {
TextureComponent texture = this.getComponent(TextureComponent.class);
texture.texture = Assets.getTexture("icons/" + asset.name().replace(':', '_'));
if (texture.texture == null) {
Log.warning("Failed to load texture for " + asset.name());
texture.texture = Assets.getTexture("missing_texture");
}
}
/**
* Updates item use time
*
* @param delta Time, since the last frame
*/
public void update(float delta) {
ItemUseComponent data = this.getComponent(ItemUseComponent.class);
if (data.currentTime > 0.0f) {
data.currentTime -= delta;
if (data.currentTime < 0.0f) {
data.currentTime = 0.0f;
}
}
}
/**
* Uses the item
*
* @param entity Item owner
* @return Item should be removed from inventory
*/
public boolean use(Entity entity) {
ItemUseComponent data = this.getComponent(ItemUseComponent.class);
if (data.currentTime == 0.0f && this.canBeUsed()) {
data.currentTime = data.useTime;
return this.onUse(entity);
}
return false;
}
/**
* @return Item can be used
*/
protected boolean canBeUsed() {
return true;
}
/**
* The actual item use function
*
* @param entity Item owner
* @return Item should be removed from inventory
*/
protected boolean onUse(Entity entity) {
return false;
}
} | {
"content_hash": "697ad421d179ab8765c8d1b9ba95141c",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 102,
"avg_line_length": 24.655172413793103,
"alnum_prop": 0.7132867132867133,
"repo_name": "LastTryR/LastTry",
"id": "9924804388b1dbcb5e65be32f95020c50896b982",
"size": "2145",
"binary": false,
"copies": "1",
"ref": "refs/heads/remake",
"path": "core/src/org/egordorichev/lasttry/entity/entities/item/Item.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "24"
},
{
"name": "Java",
"bytes": "191872"
}
],
"symlink_target": ""
} |
<?php namespace App\Http\Controllers;
use App\Posts;
use App\User;
use Redirect;
use App\Http\Controllers\Controller;
use App\Http\Requests\PostFormRequest;
use Illuminate\Http\Request;
// note: use true and false for active posts in postgresql database
// here '0' and '1' are used for active posts because of mysql database
class EnPostController extends Controller {
/**
* Display a listing of the resource.
*
* @return Response
*/
public function index()
{
$posts = Posts::where('active','1')->orderBy('created_at','desc')->paginate(5);
$title = 'Latest Posts';
return view('en/home')->withPosts($posts)->withTitle($title);
}
/**
* Show the form for creating a new resource.
*
* @return Response
*/
public function create(Request $request)
{
//
//dd('tes');
if($request->user()->can_post())
{
return view('posts.create');
}
else
{
return redirect('/en/')->withErrors('You have not sufficient permissions for writing post');
}
}
/**
* Store a newly created resource in storage.
*
* @return Response
*/
public function store(PostFormRequest $request)
{
$post = new Posts();
$post->title = $request->get('title');
$post->body = $request->get('body');
$post->slug = str_slug($post->title);
$post->author_id = $request->user()->id;
if($request->has('save'))
{
$post->active = 0;
$message = 'Post saved successfully';
}
else
{
$post->active = 1;
$message = 'Post published successfully';
}
$post->save();
return redirect('en/blog/'.$post->slug)->withMessage($message);
}
/**
* Display the specified resource.
*
* @param int $id
* @return Response
*/
public function show($slug)
{
$post = Posts::where('slug',$slug)->first();
if($post)
{
if($post->active == false)
return redirect('/')->withErrors('requested page not found');
$comments = $post->comments;
}
else
{
return redirect('/en/')->withErrors('requested page not found');
}
return view('en.posts.show')->withPost($post)->withComments($comments);
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return Response
*/
public function edit(Request $request,$slug)
{
$post = Posts::where('slug',$slug)->first();
if($post && ($request->user()->id == $post->author_id || $request->user()->is_admin()))
return view('posts.edit')->with('post',$post);
else
{
return redirect('/en/')->withErrors('you have not sufficient permissions');
}
}
/**
* Update the specified resource in storage.
*
* @param int $id
* @return Response
*/
public function update(Request $request)
{
//
$post_id = $request->input('post_id');
$post = Posts::find($post_id);
if($post && ($post->author_id == $request->user()->id || $request->user()->is_admin()))
{
$title = $request->input('title');
$slug = str_slug($title);
$duplicate = Posts::where('slug',$slug)->first();
if($duplicate)
{
if($duplicate->id != $post_id)
{
return redirect('en/edit/'.$post->slug)->withErrors('Title already exists.')->withInput();
}
else
{
$post->slug = $slug;
}
}
$post->title = $title;
$post->body = $request->input('body');
if($request->has('save'))
{
$post->active = 0;
$message = 'Post saved successfully';
$landing = 'en/edit/'.$post->slug;
}
else {
$post->active = 1;
$message = 'Post updated successfully';
$landing = 'en/edit/'.$post->slug;
}
$post->save();
return redirect($landing)->withMessage($message);
}
else
{
return redirect('/en/')->withErrors('you have not sufficient permissions');
}
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return Response
*/
public function destroy(Request $request, $id)
{
//
$post = Posts::find($id);
if($post && ($post->author_id == $request->user()->id || $request->user()->is_admin()))
{
$post->delete();
$data['message'] = 'Post deleted Successfully';
}
else
{
$data['errors'] = 'Invalid Operation. You have not sufficient permissions';
}
return redirect('/en/')->with($data);
}
}
| {
"content_hash": "5aa631653d6754e8ef84e499f8e27e2e",
"timestamp": "",
"source": "github",
"line_count": 187,
"max_line_length": 95,
"avg_line_length": 22.28342245989305,
"alnum_prop": 0.6064314854811615,
"repo_name": "tealinuxos/tealinuxos-blog",
"id": "4f0651f7dec7424352c46ea9f034ab2362345e80",
"size": "4167",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/Http/Controllers/EnPostController.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "382"
},
{
"name": "CSS",
"bytes": "204993"
},
{
"name": "HTML",
"bytes": "401064"
},
{
"name": "JavaScript",
"bytes": "27695"
},
{
"name": "PHP",
"bytes": "416293"
}
],
"symlink_target": ""
} |
package com.blazeloader.util.version;
import com.blazeloader.util.config.IStringable;
/**
* Abstract Specialisation of MultiPartVersion with an IStringable implementation.
*
* @param <T> This version to be returned from/given to factory functions.
*/
public abstract class SaveableVersion<T extends SaveableVersion<T>> extends AbstractVersion<T> implements IStringable<T> {
public SaveableVersion(String id, String name, BuildType buildType, int... versionParts) {
super(id, name, buildType, versionParts);
}
protected SaveableVersion(AbstractVersion other) {
super(other);
}
@Override
public T fromString(String string) {
string = string.trim();
if (string.startsWith("{")) string = string.substring(1);
if (string.endsWith("}")) string = string.substring(0, string.length() - 1);
string = string.trim();
String[] parts = string.split(", ");
if (parts.length == 3) {
String[] versionString = parts[0].split(".");
int[] versionParts = new int[versionString.length - 1];
for (int i = 1; i < versionString.length; i++) {
try {
versionParts[i - 1] = Integer.valueOf(versionString[i]);
} catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid string", e);
}
}
BuildType buildType = BuildType.STABLE;
try {
buildType = BuildType.valueOf(parts[2].toUpperCase());
} catch (Throwable e) {
throw new IllegalArgumentException("Invalid string", e);
}
return createVersion(versionString[0], parts[1], buildType, versionParts);
}
return null;
}
protected abstract T createVersion(String id, String name, BuildType buildType, int... parts);
}
| {
"content_hash": "88f84255d5932f0e3d33edcc13c20f07",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 122,
"avg_line_length": 34.604166666666664,
"alnum_prop": 0.6977724262492474,
"repo_name": "Sollace/BlazeLoader",
"id": "5c52d13feadb148acb5a7d1cca8fc3f383ae4079",
"size": "1661",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/com/blazeloader/util/version/SaveableVersion.java",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "9181"
},
{
"name": "Java",
"bytes": "617455"
},
{
"name": "Python",
"bytes": "2819"
}
],
"symlink_target": ""
} |
ASP.NET Boilerplate provides background jobs and workers that are used
to execute some tasks in the **background threads** of an application.
### Background Jobs
In a **queued and persistent** manner, background jobs are used to queue some tasks to be executed in the
background. You may need background jobs for several reasons. Here are some examples:
- To perform **long-running tasks** without having the users wait. For example, a
user presses a 'report' button to start a long-running reporting
job. You add this job to the **queue** and send the report's result to
your user via email when it's completed.
- To create **re-trying** and **persistent tasks** to **guarantee** code will be **successfully
executed**. For example, you can send emails in a background job to
overcome **temporary failures** and **guarantee** that it
eventually will be sent. That way users do not wait while sending
emails.
#### About Job Persistence
See the *Background Job Store* section for more information on job
persistence.
#### Create a Background Job
We can create a background job class by either inheriting from the
**BackgroundJob<TArgs>** class or by directly implementing the
**IBackgroundJob<TArgs> interface**.
Here is the most simple background job:
public class TestJob : BackgroundJob<int>, ITransientDependency
{
public override void Execute(int number)
{
Logger.Debug(number.ToString());
}
}
A background job defines an **Execute** method that gets an input
**argument**. The argument **type** is defined as a **generic** class
parameter as shown in the example.
A background job must be registered via [dependency
injection](/Pages/Documents/Dependency-Injection). Implementing
**ITransientDependency** is the simplest way.
Let's define a more realistic job which sends emails in a background
queue:
public class SimpleSendEmailJob : BackgroundJob<SimpleSendEmailJobArgs>, ITransientDependency
{
private readonly IRepository<User, long> _userRepository;
private readonly IEmailSender _emailSender;
public SimpleSendEmailJob(IRepository<User, long> userRepository, IEmailSender emailSender)
{
_userRepository = userRepository;
_emailSender = emailSender;
}
[UnitOfWork]
public override void Execute(SimpleSendEmailJobArgs args)
{
var senderUser = _userRepository.Get(args.SenderUserId);
var targetUser = _userRepository.Get(args.TargetUserId);
_emailSender.Send(senderUser.EmailAddress, targetUser.EmailAddress, args.Subject, args.Body);
}
}
We
[injected](/Pages/Documents/Dependency-Injection#constructor-injection-pattern) the
user [repository](/Pages/Documents/Repositories) to get user emails,
and injected the email sender (a service to send emails) and simply sent the email.
**SimpleSendEmailJobArgs** is the job argument here and defined as shown
below:
[Serializable]
public class SimpleSendEmailJobArgs
{
public long SenderUserId { get; set; }
public long TargetUserId { get; set; }
public string Subject { get; set; }
public string Body { get; set; }
}
A job argument should be **serializable**, because it's **serialized and
stored** in the database. While ASP.NET Boilerplate's default background
job manager uses **JSON** serialization (which does not need the
\[Serializable\] attribute), it's better to define the **\[Serializable\]**
attribute since we may switch to another job manager in the future, for which we may use
something like .NET's built-in binary serialization.
**Keep your arguments simple** (like
[DTO](Data-Transfer-Objects.md)s), do not include
[entities](/Pages/Documents/Entities) or other non-serializable objects.
As shown in the SimpleSendEmailJob sample, we can only store the **Id** of
an entity and get the entity from the repository inside the job.
#### Add a New Job To the Queue
After defining a background job, we can inject and use
**IBackgroundJobManager** to add a job to the queue. See this example for
TestJob as defined above:
public class MyService
{
private readonly IBackgroundJobManager _backgroundJobManager;
public MyService(IBackgroundJobManager backgroundJobManager)
{
_backgroundJobManager = backgroundJobManager;
}
public void Test()
{
_backgroundJobManager.Enqueue<TestJob, int>(42);
}
}
We sent 42 as an argument while enqueuing. IBackgroundJobManager will
instantiate and execute the TestJob with 42 as the argument.
Let's add a new job for SimpleSendEmailJob, as we defined before:
[AbpAuthorize]
public class MyEmailAppService : ApplicationService, IMyEmailAppService
{
private readonly IBackgroundJobManager _backgroundJobManager;
public MyEmailAppService(IBackgroundJobManager backgroundJobManager)
{
_backgroundJobManager = backgroundJobManager;
}
public async Task SendEmail(SendEmailInput input)
{
await _backgroundJobManager.EnqueueAsync<SimpleSendEmailJob, SimpleSendEmailJobArgs>(
new SimpleSendEmailJobArgs
{
Subject = input.Subject,
Body = input.Body,
SenderUserId = AbpSession.GetUserId(),
TargetUserId = input.TargetUserId
});
}
}
Enqueue (or EnqueueAsync) method has other parameters such as
**priority** and **delay**.
#### Default Background Job Manager
IBackgroundJobManager is implemented by **BackgroundJobManager**,
by default. It can be replaced by another background job provider (see
[hangfire integration](Hangfire-Integration.md)). Some information on the
default BackgroundJobManager:
- It's a simple job queue that works as **FIFO** in a **single thread**. It
uses **IBackgroundJobStore** to persist jobs (see the next section).
- It **retries** job execution until the job **successfully runs** (if it does
not throw any exceptions, but logs them) or **timeouts**. Default
timeout is 2 days for a job.
- It **deletes** a job from the store (database) when it's successfully
executed. If it's timed out, it sets it as **abandoned**, and leaves it in the
database.
- It **increasingly waits between retries** for a job. It waits 1 minute
for the first retry, 2 minutes for the second retry, 4 minutes for the third
retry and so on.
- It **polls** the store for jobs in fixed intervals. It queries jobs,
ordering by priority (asc) and then by try count (asc).
##### Background Job Store
The default BackgroundJobManager needs a data store to save and get jobs. If
you do not implement **IBackgroundJobStore** then it uses
**InMemoryBackgroundJobStore** which does not save jobs in a persistent
database. You can simply implement it to store jobs in a database or you
can use **[module-zero](/Pages/Documents/Zero/Overall)** which already
implements it.
If you are using a 3rd party job manager (like
[Hangfire](Hangfire-Integration.md)), there is no need to implement
IBackgroundJobStore.
#### Configuration
You can use **Configuration.BackgroundJobs** in the
[PreInitialize](/Pages/Documents/Module-System) method of your module to
configure the background job system.
##### Disabling Job Execution
You may want to disable background job execution for your application:
public class MyProjectWebModule : AbpModule
{
public override void PreInitialize()
{
Configuration.BackgroundJobs.IsJobExecutionEnabled = false;
}
//...
}
This is rarely needed. An example of this is if you're running multiple
instances of your application working on the same database (in a web
farm). In this case, each application will query the same database for jobs
and execute them. This leads to multiple executions of the same jobs and
other problems. To prevent it, you have two options:
- You can enable job execution for only one instance of the
application.
- You can disable job execution for all instances of the web
application and create a separated, standalone application (example:
a Windows Service) that executes background jobs.
#### Exception Handling
Since the default background job manager should re-try failed jobs, it
handles (and logs) all exceptions. In case you want to be informed when
an exception occurred, you can create an event handler to handle
[AbpHandledExceptionData](Handling-Exceptions.md). The background manager
triggers this event with a BackgroundJobException exception object which
wraps the real exception (get InnerException for the actual exception).
#### Hangfire Integration
The background job manager is designed to be **replaceable** by another
background job manager. See [hangfire integration
document](/Pages/Documents/Hangfire-Integration) to replace it with
[**Hangfire**](http://hangfire.io/).
### Background Workers
Background workers are different than background jobs. They are simple
**independent threads** in the application running in the background.
Generally, they run **periodically** to perform some tasks. Examples;
- A background worker can run **periodically** to **delete old logs**.
- A background worker can run **periodically** to **determine inactive users** and send emails to get users to return to your application.
#### Create a Background Worker
To create a background worker, we implement the **IBackgroundWorker**
interface. Alternatively, we can inherit from the **BackgroundWorkerBase**
or **PeriodicBackgroundWorkerBase** based on our needs.
Assume that we want to make a user passive, if he did not login to the
application in last 30 days. See the code:
public class MakeInactiveUsersPassiveWorker : PeriodicBackgroundWorkerBase, ISingletonDependency
{
private readonly IRepository<User, long> _userRepository;
public MakeInactiveUsersPassiveWorker(AbpTimer timer, IRepository<User, long> userRepository)
: base(timer)
{
_userRepository = userRepository;
Timer.Period = 5000; //5 seconds (good for tests, but normally will be more)
}
[UnitOfWork]
protected override void DoWork()
{
using (CurrentUnitOfWork.DisableFilter(AbpDataFilters.MayHaveTenant))
{
var oneMonthAgo = Clock.Now.Subtract(TimeSpan.FromDays(30));
var inactiveUsers = _userRepository.GetAllList(u =>
u.IsActive &&
((u.LastLoginTime < oneMonthAgo && u.LastLoginTime != null) || (u.CreationTime < oneMonthAgo && u.LastLoginTime == null))
);
foreach (var inactiveUser in inactiveUsers)
{
inactiveUser.IsActive = false;
Logger.Info(inactiveUser + " made passive since he/she did not login in last 30 days.");
}
CurrentUnitOfWork.SaveChanges();
}
}
}
This real code directly works in ASP.NET Boilerplate with
[module-zero](/Pages/Documents/Zero/Overall).
- If you derive from **PeriodicBackgroundWorkerBase** (as in this
sample), you should implement the **DoWork** method to perform your
periodic working code.
- If you derive from the **BackgroundWorkerBase** or directly implement
**IBackgroundWorker**, you will override/implement the **Start**,
**Stop** and **WaitToStop** methods. Start and Stop methods should
be **non-blocking**, the WaitToStop method should **wait** for the worker to
finish its current critical job.
#### Register Background Workers
After creating a background worker, add it to the
**IBackgroundWorkerManager**. The most common place is the PostInitialize
method of your module:
public class MyProjectWebModule : AbpModule
{
//...
public override void PostInitialize()
{
var workManager = IocManager.Resolve<IBackgroundWorkerManager>();
workManager.Add(IocManager.Resolve<MakeInactiveUsersPassiveWorker>());
}
}
While we generally add workers in PostInitialize, there are no
restrictions on that. You can inject IBackgroundWorkerManager anywhere
and add workers at runtime. IBackgroundWorkerManager will stop and
release all registered workers when your application is being shut down.
#### Background Worker Lifestyles
Background workers are generally implemented as a **singleton**, but there
are no restrictions to this. If you need multiple instances of the same worker class,
you can make it transient and add more than one instance to the
IBackgroundWorkerManager. In this case, your worker will probably be
parametric (say that you have a single LogCleaner class but two
LogCleaner worker instances and they watch and clear different log folders).
#### Advanced Scheduling
ASP.NET Boilerplate's background worker systems are simple. It does not have a
schedule system, except for periodic running workers as demonstrated above.
If you need more advanced scheduling features, we suggest you
check out [Quartz](Quartz-Integration.md) or another library.
### Making Your Application Always Run
Background jobs and workers only work if your application is running.
An ASP.NET application **shuts down** by default if no request is
performed to the web application for a long period of time. So, if you host the
background job execution in your web application (this is the default
behavior), you should ensure that your web application is configured to always
be running. Otherwise, background jobs only work while your
application is in use.
There are some techniques to accomplish that. The most simple way is to make
periodic requests to your web application from an external application.
Thus, you can also check if your web application is up and running. The
[Hangfire
documentation](http://docs.hangfire.io/en/latest/deployment-to-production/making-aspnet-app-always-running.html)
explains some other ways to accomplish this.
| {
"content_hash": "abfa623dfc09189fbedc6edf0044eb9a",
"timestamp": "",
"source": "github",
"line_count": 355,
"max_line_length": 141,
"avg_line_length": 40.07887323943662,
"alnum_prop": 0.7223081248242902,
"repo_name": "oceanho/aspnetboilerplate",
"id": "9c890a281c120e682ca1259d90abbd80112129c3",
"size": "14246",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "doc/WebSite/Background-Jobs-And-Workers.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "2458747"
},
{
"name": "CSS",
"bytes": "9773"
},
{
"name": "HTML",
"bytes": "347001"
},
{
"name": "JavaScript",
"bytes": "111885"
},
{
"name": "PLpgSQL",
"bytes": "1567"
},
{
"name": "PowerShell",
"bytes": "1478"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html xmlns:msxsl="urn:schemas-microsoft-com:xslt">
<head>
<meta content="en-us" http-equiv="Content-Language" />
<meta content="text/html; charset=utf-16" http-equiv="Content-Type" />
<title _locid="PortabilityAnalysis0">.NET Portability Report</title>
<style>
/* Body style, for the entire document */
body {
background: #F3F3F4;
color: #1E1E1F;
font-family: "Segoe UI", Tahoma, Geneva, Verdana, sans-serif;
padding: 0;
margin: 0;
}
/* Header1 style, used for the main title */
h1 {
padding: 10px 0px 10px 10px;
font-size: 21pt;
background-color: #E2E2E2;
border-bottom: 1px #C1C1C2 solid;
color: #201F20;
margin: 0;
font-weight: normal;
}
/* Header2 style, used for "Overview" and other sections */
h2 {
font-size: 18pt;
font-weight: normal;
padding: 15px 0 5px 0;
margin: 0;
}
/* Header3 style, used for sub-sections, such as project name */
h3 {
font-weight: normal;
font-size: 15pt;
margin: 0;
padding: 15px 0 5px 0;
background-color: transparent;
}
h4 {
font-weight: normal;
font-size: 12pt;
margin: 0;
padding: 0 0 0 0;
background-color: transparent;
}
/* Color all hyperlinks one color */
a {
color: #1382CE;
}
/* Paragraph text (for longer informational messages) */
p {
font-size: 10pt;
}
/* Table styles */
table {
border-spacing: 0 0;
border-collapse: collapse;
font-size: 10pt;
}
table th {
background: #E7E7E8;
text-align: left;
text-decoration: none;
font-weight: normal;
padding: 3px 6px 3px 6px;
}
table td {
vertical-align: top;
padding: 3px 6px 5px 5px;
margin: 0px;
border: 1px solid #E7E7E8;
background: #F7F7F8;
}
.NoBreakingChanges {
color: darkgreen;
font-weight:bold;
}
.FewBreakingChanges {
color: orange;
font-weight:bold;
}
.ManyBreakingChanges {
color: red;
font-weight:bold;
}
.BreakDetails {
margin-left: 30px;
}
.CompatMessage {
font-style: italic;
font-size: 10pt;
}
.GoodMessage {
color: darkgreen;
}
/* Local link is a style for hyperlinks that link to file:/// content, there are lots so color them as 'normal' text until the user mouse overs */
.localLink {
color: #1E1E1F;
background: #EEEEED;
text-decoration: none;
}
.localLink:hover {
color: #1382CE;
background: #FFFF99;
text-decoration: none;
}
/* Center text, used in the over views cells that contain message level counts */
.textCentered {
text-align: center;
}
/* The message cells in message tables should take up all avaliable space */
.messageCell {
width: 100%;
}
/* Padding around the content after the h1 */
#content {
padding: 0px 12px 12px 12px;
}
/* The overview table expands to width, with a max width of 97% */
#overview table {
width: auto;
max-width: 75%;
}
/* The messages tables are always 97% width */
#messages table {
width: 97%;
}
/* All Icons */
.IconSuccessEncoded, .IconInfoEncoded, .IconWarningEncoded, .IconErrorEncoded {
min-width: 18px;
min-height: 18px;
background-repeat: no-repeat;
background-position: center;
}
/* Success icon encoded */
.IconSuccessEncoded {
/* Note: Do not delete the comment below. It is used to verify the correctness of the encoded image resource below before the product is released */
/* [---XsltValidateInternal-Base64EncodedImage:IconSuccess#Begin#background-image: url(data:image/png;base64,#Separator#);#End#] */
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABPElEQVR4Xp1Tv0vDUBi8FqeA4NpBcBLcWnQSApncOnTo4FSnjP0DsnXpH5CxiwbHDg4Zuj4oOEXiJgiC4FDcCkLWmIMc1Pfw+eMgQ77v3Xf3Pe51YKGqqisAEwCR1TIAsiAIblSo6xrdHeJR85Xle3mdmCQKb0PsfqyxxzM8K15HZADl/H5+sHpZwYfxyRjTs+kWwKBx8yoHd2mRiuzF8mkJniWH/13u3Fjrs/EdhsdDFHGB/DLXEJBDLh1MWPAhPo1BLB4WX5yQywHR+m3tVe/t97D52CB/ziG0nIgD/qDuYg8WuCcVZ2YGwlJ3YDugkpR/VNcAEx6GEKhERSr71FuO4YCM4XBdwKvecjIlkSnsO0Hyp/GxSeJAdzBKzpOtnPwyyiPdAZhpZptT04tU+zk7s8czeges//s5C5+CwqrR4/gw+AAAAABJRU5ErkJggg==);
}
/* Information icon encoded */
.IconInfoEncoded {
/* Note: Do not delete the comment below. It is used to verify the correctness of the encoded image resource below before the product is released */
/* [---XsltValidateInternal-Base64EncodedImage:IconInformation#Begin#background-image: url(data:image/png;base64,#Separator#);#End#] */
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABHElEQVR4Xs2TsUoDQRRF7wwoziokjZUKadInhdhukR9YP8DMX1hYW+QvdsXa/QHBbcXC7W0CamWTQnclFutceIQJwwaWNLlwm5k5d94M76mmaeCrrmsLYOocY12FcxZFUeozCqKqqgYA8uevv1H6VuPxcwlfk5N92KHBxfFeCSAxxswlYAW/Xr989x/mv9gkhtyMDhcAxgzRsp7flj8B/HF1RsMXq+NZMkopaHe7lbKxQUEIGbKsYNoGn969060hZBkQex/W8oRQwsQaW2o3Ago2SVcJUzAgY3N0lTCZZm+zPS8HB51gMmS1DEYyOz9acKO1D8JWTlafKIMxdhvlfdyT94Vv5h7P8Ky7nQzACmhvKq3zk3PjW9asz9D/1oigecsioooAAAAASUVORK5CYII=);
}
/* Warning icon encoded */
.IconWarningEncoded {
/* Note: Do not delete the comment below. It is used to verify the correctness of the encoded image resource below before the product is released */
/* [---XsltValidateInternal-Base64EncodedImage:IconWarning#Begin#background-image: url(data:image/png;base64,#Separator#);#End#] */
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAA7EAAAOxAGVKw4bAAAAx0lEQVR4XpWSMQ7CMAxFf4xAyBMLCxMrO8dhaBcuwdCJS3RJBw7SA/QGTCxdWJgiQYWKXJWKIXHIlyw5lqr34tQgEOdcBsCOx5yZK3hCCKdYXneQkh4pEfqzLfu+wVDSyyzFoJjfz9NB+pAF+eizx2Vruts0k15mPgvS6GYvpVtQhB61IB/dk6AF6fS4Ben0uIX5odtFe8Q/eW1KvFeH4e8khT6+gm5B+t3juyDt7n0jpe+CANTd+oTUjN/U3yVaABnSUjFz/gFq44JaVSCXeQAAAABJRU5ErkJggg==);
}
/* Error icon encoded */
.IconErrorEncoded {
/* Note: Do not delete the comment below. It is used to verify the correctness of the encoded image resource below before the product is released */
/* [---XsltValidateInternal-Base64EncodedImage:IconError#Begin#background-image: url(data:image/png;base64,#Separator#);#End#] */
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABQElEQVR4XqWTvUoEQRCE6wYPZUA80AfwAQz23uCMjA7MDRQEIzPBVEyNTQUFIw00vcQTTMzuAh/AxEQQT8HF/3G/oGGnEUGuoNnd6qoZuqltyKEsyzVJq5I6rnUp6SjGeGhESikzzlc1eL7opfuVbrqbU1Zw9NCgtQMaZpY0eNnaaL2fHusvTK5vKu7sjSS1Y4y3QUA6K3e3Mau5UFDyMP7tYF9o8cAHZv68vipoIJg971PZIZ5HiwdvYGGvFVFHmGmZ2MxwmQYPXubPl9Up0tfoMQGetXd6mRbvhBw+boZ6WF7Mbv1+GsHRk0fQmPAH1GfmZirbCfDJ61tw3Px8/8pZsPAG4jlVhcPgZ7adwNWBB68lkRQWFiTgFlbnLY3DGGM7izIJIyT/jjIvEJw6fdJTc6krDzh6aMwMP9bvDH4ADSsa9uSWVJkAAAAASUVORK5CYII=);
}
</style>
</head>
<body>
<h1 _locid="PortabilityReport">.NET Portability Report</h1>
<div id="content">
<div id="submissionId" style="font-size:8pt;">
<p>
<i>
Submission Id
2084d6ef-b7e1-4674-8c60-d33f38348958
</i>
</p>
</div>
<h2 _locid="SummaryTitle">
<a name="Portability Summary"></a>Portability Summary
</h2>
<div id="summary">
<table>
<tbody>
<tr>
<th>Assembly</th>
<th>ASP.NET 5,Version=v1.0</th>
<th>Windows,Version=v8.1</th>
<th>.NET Framework,Version=v4.6</th>
<th>Windows Phone,Version=v8.1</th>
</tr>
<tr>
<td><strong><a href="#protobuf-net">protobuf-net</a></strong></td>
<td class="text-center">94.25 %</td>
<td class="text-center">89.89 %</td>
<td class="text-center">100.00 %</td>
<td class="text-center">89.89 %</td>
</tr>
</tbody>
</table>
</div>
<div id="details">
<a name="protobuf-net"><h3>protobuf-net</h3></a>
<table>
<tbody>
<tr>
<th>Target type</th>
<th>ASP.NET 5,Version=v1.0</th>
<th>Windows,Version=v8.1</th>
<th>.NET Framework,Version=v4.6</th>
<th>Windows Phone,Version=v8.1</th>
<th>Recommended changes</th>
</tr>
<tr>
<td>System.IO.MemoryStream</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use an overload that supplies a buffer and maintain that buffer outside of memory stream. If you need the internal buffer, create a copy via .ToArray();</td>
</tr>
<tr>
<td style="padding-left:2em">GetBuffer</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use an overload that supplies a buffer and maintain that buffer outside of memory stream. If you need the internal buffer, create a copy via .ToArray();</td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
<tr>
<td>System.Reflection.Assembly</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetCallingAssembly</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetCustomAttributes(System.Boolean)</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
<tr>
<td>System.Reflection.Binder</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use an overload that does not take a Binder.</td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
<tr>
<td>System.Reflection.BindingFlags</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
<tr>
<td>System.Reflection.FieldInfo</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Call FieldInfo.GetValue(object) instead.</td>
</tr>
<tr>
<td style="padding-left:2em">GetRawConstantValue</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Call FieldInfo.GetValue(object) instead.</td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
<tr>
<td>System.Reflection.MemberInfo</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">get_MemberType</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>use "is FieldInfo", "is PropertyInfo", etc</td>
</tr>
<tr>
<td style="padding-left:2em">get_ReflectedType</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Remove usage: Use reference to object being reflected on to get this value instead.</td>
</tr>
<tr>
<td style="padding-left:2em">IsDefined(System.Type,System.Boolean)</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
<tr>
<td>System.Reflection.MemberTypes</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use is operator. Example: if (membertype == membertypes.Field) --> if (member is FieldInfo)</td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
<tr>
<td>System.Reflection.ParameterModifier</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use an overload that does not take a ParameterModifier array.</td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
<tr>
<td>System.Reflection.PropertyInfo</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use PropertyInfo.GetMethod property</td>
</tr>
<tr>
<td style="padding-left:2em">GetGetMethod(System.Boolean)</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use PropertyInfo.GetMethod property</td>
</tr>
<tr>
<td style="padding-left:2em">GetSetMethod(System.Boolean)</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use PropertyInfo.SetMethod property</td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
<tr>
<td>System.Threading.Thread</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">get_CurrentThread</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">get_ManagedThreadId</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
<tr>
<td>System.Type</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">EmptyTypes</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>new Type[0] (or create your own static property which returns a cached version of this)</td>
</tr>
<tr>
<td style="padding-left:2em">get_Assembly</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>.GetTypeInfo().Assembly</td>
</tr>
<tr>
<td style="padding-left:2em">get_BaseType</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>.GetTypeInfo().BaseType</td>
</tr>
<tr>
<td style="padding-left:2em">get_IsAbstract</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>.GetTypeInfo().IsAbstract</td>
</tr>
<tr>
<td style="padding-left:2em">get_IsClass</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>.GetTypeInfo().IsClass</td>
</tr>
<tr>
<td style="padding-left:2em">get_IsEnum</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>.GetTypeInfo().IsEnum</td>
</tr>
<tr>
<td style="padding-left:2em">get_IsGenericType</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>.GetTypeInfo().IsGenericType</td>
</tr>
<tr>
<td style="padding-left:2em">get_IsInterface</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>.GetTypeInfo().IsInterface</td>
</tr>
<tr>
<td style="padding-left:2em">get_IsSealed</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>.GetTypeInfo().IsSealed</td>
</tr>
<tr>
<td style="padding-left:2em">get_IsValueType</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>.GetTypeInfo().IsValueType</td>
</tr>
<tr>
<td style="padding-left:2em">GetConstructor(System.Reflection.BindingFlags,System.Reflection.Binder,System.Type[],System.Reflection.ParameterModifier[])</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use GetConstructor(Type[]) to search for public constructors by parameter type or filter the results of GetConstructors(BindingFlags) using LINQ for other queries.</td>
</tr>
<tr>
<td style="padding-left:2em">GetConstructors(System.Reflection.BindingFlags)</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Equivalent available: Add using for System.Reflection, and reference System.Reflection.TypeExtensions </td>
</tr>
<tr>
<td style="padding-left:2em">GetFields</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetFields(System.Reflection.BindingFlags)</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetGenericArguments</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetInterfaces</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetMember(System.String,System.Reflection.BindingFlags)</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetMembers(System.Reflection.BindingFlags)</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetMethod(System.String,System.Reflection.BindingFlags)</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Equivalent available: Add using for System.Reflection, and reference System.Reflection.TypeExtensions </td>
</tr>
<tr>
<td style="padding-left:2em">GetMethod(System.String,System.Reflection.BindingFlags,System.Reflection.Binder,System.Type[],System.Reflection.ParameterModifier[])</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use GetMethod(string, Type[]) to search for public methods by name and parameter type or filter the results of GetMethods(BindingFlags) using LINQ for other queries.</td>
</tr>
<tr>
<td style="padding-left:2em">GetMethods</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetMethods(System.Reflection.BindingFlags)</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetProperties(System.Reflection.BindingFlags)</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetProperty(System.String,System.Reflection.BindingFlags)</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">GetTypeCode(System.Type)</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use Type or RuntimeTypeHandle instead.</td>
</tr>
<tr>
<td style="padding-left:2em">IsAssignableFrom(System.Type)</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td></td>
</tr>
<tr>
<td style="padding-left:2em">IsSubclassOf(System.Type)</td>
<td class="IconErrorEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>.GetTypeInfo().IsSubclassOf</td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
<tr>
<td>System.TypeCode</td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td class="IconSuccessEncoded"></td>
<td class="IconErrorEncoded"></td>
<td>Use Type or RuntimeTypeHandle instead.</td>
</tr>
<tr>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
<td> </td>
</tr>
</tbody>
</table>
<p>
<a href="#Portability Summary">Back to Summary</a>
</p>
</div>
</div>
</body>
</html> | {
"content_hash": "27b5991409faf42d9360c5d11bc8cb5b",
"timestamp": "",
"source": "github",
"line_count": 853,
"max_line_length": 562,
"avg_line_length": 45.31887456037515,
"alnum_prop": 0.4035750316889567,
"repo_name": "kuhlenh/port-to-core",
"id": "7dffb60629fc7fbc9c7b99586979367cd06af89f",
"size": "38657",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "Reports/pr/protobuf-net.2.0.0.668/protobuf-net-sl3-wp.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2323514650"
}
],
"symlink_target": ""
} |
package scalaxy.dsl
import scala.language.experimental.macros
import scala.collection.GenTraversableOnce
import scala.collection.generic.CanBuildFrom
trait ReifiedFilterMonadic[A, Repr] {
self =>
def reifiedForeach[U](
f: ReifiedFunction[A, U],
filters: List[ReifiedFunction[A, Boolean]]): Unit
def reifiedFlatMap[B, That](
f: ReifiedFunction[A, GenTraversableOnce[B]],
filters: List[ReifiedFunction[A, Boolean]])(
implicit bf: CanBuildFrom[Repr, B, That]): That
def reifiedFilters: List[ReifiedFunction[A, Boolean]] = Nil
def foreach[U](f: A => U): Unit =
macro ReifiedFilterMonadicMacros.foreachImpl[A, Repr, U]
def withFilter(f: A => Boolean): ReifiedFilterMonadic[A, Repr] =
macro ReifiedFilterMonadicMacros.withFilterImpl[A, Repr]
def flatMap[B, That](
f: A => GenTraversableOnce[B])(
implicit bf: CanBuildFrom[Repr, B, That]): That =
macro ReifiedFilterMonadicMacros.flatMapImpl[A, Repr, B, That]
def withFilters(filters: List[ReifiedFunction[A, Boolean]]) =
new WithFilters(filters)
class WithFilters(filters: List[ReifiedFunction[A, Boolean]])
extends ReifiedFilterMonadic[A, Repr] {
override def reifiedFilters = filters
override def reifiedForeach[U](
f: ReifiedFunction[A, U],
filters: List[ReifiedFunction[A, Boolean]]) {
self.reifiedForeach(f, filters)
}
override def reifiedFlatMap[B, That](
f: ReifiedFunction[A, GenTraversableOnce[B]],
filters: List[ReifiedFunction[A, Boolean]])(
implicit bf: CanBuildFrom[Repr, B, That]): That =
self.reifiedFlatMap(f, filters)
}
}
| {
"content_hash": "ead81e9d75f94c9a6ad8a0c4c36c7563",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 67,
"avg_line_length": 33.56,
"alnum_prop": 0.6847437425506555,
"repo_name": "nativelibs4java/Scalaxy",
"id": "5db1f815825d0e3bf7f9306829ce482567a5f169",
"size": "1678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DSL/ReifiedFilterMonadic.scala",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "5687"
},
{
"name": "CSS",
"bytes": "192"
},
{
"name": "HTML",
"bytes": "4925"
},
{
"name": "JavaScript",
"bytes": "200313"
},
{
"name": "Scala",
"bytes": "862308"
},
{
"name": "Shell",
"bytes": "7913"
}
],
"symlink_target": ""
} |
/**
* Created by ManonLoki1 on 15/8/8.
*/
//另外一个场景
var AnotherBGViewController=mw.ViewController.extend({
ctor:function(){
this._super();
},
viewDidLoad:function(){
//加载场景
var bg=new cc.Sprite("res/another_bg.jpg");
bg.setPosition(cc.director.getWinSize().width/2,cc.director.getWinSize().height/2);
this.view().addChild(bg);
mw.log("AnotherBGViewController viewDidLoad");
},
viewDidUnload:function(){
//卸载场景
mw.log("AnotherBGViewController viewDidUnLoad");
}
}); | {
"content_hash": "b77edd764270c0387cfd0a13a8f36ed3",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 91,
"avg_line_length": 26.38095238095238,
"alnum_prop": 0.6191335740072202,
"repo_name": "wanmaple/PokemonRPG",
"id": "a8eb66c22fcfa4c1e0a1b05a7e159de1fb0a1880",
"size": "582",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/asset/src/scenes/demo/AnotherBGViewController.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "2595"
},
{
"name": "C",
"bytes": "8380610"
},
{
"name": "C++",
"bytes": "20283487"
},
{
"name": "CMake",
"bytes": "232936"
},
{
"name": "GLSL",
"bytes": "56787"
},
{
"name": "HTML",
"bytes": "4386"
},
{
"name": "Java",
"bytes": "643771"
},
{
"name": "JavaScript",
"bytes": "5895543"
},
{
"name": "Lua",
"bytes": "16064"
},
{
"name": "Makefile",
"bytes": "48929"
},
{
"name": "Objective-C",
"bytes": "2936879"
},
{
"name": "Objective-C++",
"bytes": "446393"
},
{
"name": "Python",
"bytes": "360880"
},
{
"name": "Shell",
"bytes": "26867"
}
],
"symlink_target": ""
} |
<add>
<doc>
<field name="uid">doi:10.1371/journal.pntd.0001687</field>
<field name="doi">10.1371/journal.pntd.0001687</field>
<field name="data_source">PLoS Open Access XML</field>
<field name="authors">Daniel R. Abánades, Leonardo V. Arruda, Elaine S. Arruda, José Roberto A. S. Pinto, Mario S. Palma, Dorlene Aquino, Arlene J. Caldas, Manuel Soto, Aldina Barral, Manoel Barral-Netto</field>
<field name="title">Immunodominant Antigens of Leishmania chagasi Associated with Protection against Human Visceral Leishmaniasis</field>
<field name="journal">PLoS Neglected Tropical Diseases</field>
<field name="year">2012</field>
<field name="volume">6</field>
<field name="number">6</field>
<field name="pages">e1687</field>
<field name="license">Creative Commons Attribution (CC BY)</field>
<field name="license_url">http://creativecommons.org/licenses/by/3.0/</field>
<field name="fulltext">
Abstract
Background
Protection and recovery from visceral leishmaniasis (VL) have been associated
with cell-mediated immune (CMI) responses, whereas no protective role has been
attributed to humoral responses against specific parasitic antigens. In this
report, we compared carefully selected groups of individuals with distinct
responses to Leishmania chagasi to explore antigen-recognizing IgG present in
resistant individuals.
Methodology and Principal Findings
VL patients with negative delayed-type hypersensitivity (DTH) were classified
into the susceptible group. Individuals who had recovered from VL and
converted to a DTH+ response, as well as asymptomatic infected individuals
(DTH+), were categorized into the resistant group. Sera from these groups were
used to detect antigens from L. chagasi by conventional and 2D Western blot
assays. Despite an overall reduction in the reactivity of several proteins
after DTH conversion, a specific group of proteins (approximately 110–130 kDa)
consistently reacted with sera from DTH converters. Other antigens that
specifically reacted with sera from DTH+ individuals were isolated and tandem
mass spectrometry followed by database query with the protein search engine
MASCO were used to identify antigens. The serological properties of
recombinant version of the selected antigens were tested by ELISA. Sera from
asymptomatic infected people (DTH+) reacted more strongly with a mixture of
selected recombinant antigens than with total soluble Leishmania antigen
(SLA), with less cross-reactivity against Chagas disease patients' sera.
Significance
Our results are the first evidence of leishmania proteins that are
specifically recognized by sera from individuals who are putatively resistant
to VL. In addition, these data highlight the possibility of using specific
proteins in serological tests for the identification of asymptomatic infected
individuals.
Author Summary
One of the most striking features of infection by Leishmania chagasi is that
infection leads to a spectrum of clinical outcomes ranging from asymptomatic
infection to active disease. The existence of asymptomatic infected people has
served as an incentive to believe that an effective vaccine is possible, but
unfortunately no successful immunological characterization of such cases was
obtained. Patients recovered from visceral leishmaniasis show a similar
immunological profile to asymptomatic infected individuals and both exhibit a
strong cell-mediated immune response against Leishmania antigens and are
resistant to disease. Since the past decade several approaches were undertaken
to try to shed light on the immunological profile associated with such
“resistance” to infections, notwithstanding antigenic recognition profile
associated to resistance to infection was not successfully explored. In the
present manuscript we describe a specific IgG recognizing pattern associated
with resistant individuals (asymptomatic infected people and recovery patients
to visceral leishmaniasis). These data highlight the possibility of using
specific proteins in serological tests for the identification of asymptomatic
infected individuals.
Introduction
Viscer</field></doc>
</add>
| {
"content_hash": "b4ec26d10b2ad9442d65b72fdb63196c",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 211,
"avg_line_length": 54.1578947368421,
"alnum_prop": 0.8219144800777454,
"repo_name": "rletters/solr-example",
"id": "836f30c4324c972177965f817dfc81e37148e0f8",
"size": "4124",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data/pntd_0001687.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "291"
},
{
"name": "Batchfile",
"bytes": "70942"
},
{
"name": "CSS",
"bytes": "237111"
},
{
"name": "HTML",
"bytes": "213687"
},
{
"name": "JavaScript",
"bytes": "1164747"
},
{
"name": "Shell",
"bytes": "104212"
}
],
"symlink_target": ""
} |
using Microsoft.VisualStudio.Text.Classification;
using Microsoft.VisualStudio.Utilities;
using Papyrus.Common;
using Papyrus.Features;
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Diagnostics;
using System.Linq;
using System.Windows.Media;
namespace Papyrus.Language.Components.Tokens {
[DebuggerStepThrough]
[Export(typeof(EditorFormatDefinition))]
[ClassificationType(ClassificationTypeNames = Name)]
[Name(Name)]
[UserVisible(true)]
[Order(Before = Priority.Default)]
internal sealed class NumericLiteralColorFormat : ClassificationFormatDefinition {
internal const string Name = "PapyrusLiteral";
internal NumericLiteralColorFormat() {
DisplayName = "Papyrus Literal";
ForegroundColor = Color.FromRgb(181, 206, 168);
}
}
[DebuggerStepThrough]
internal static class NumericLiteralColorClassificationDefinition {
[Export(typeof(ClassificationTypeDefinition))]
[Name(NumericLiteralColorFormat.Name)]
private static ClassificationTypeDefinition typeDefinition;
}
//[DebuggerStepThrough]
public sealed class NumericLiteral : Token, ISyntaxColorable {
private string value;
public NumericLiteral(string value) {
this.value = value;
}
public override string Text {
get { return value; }
}
public override TokenTypeID TypeID {
get { return TokenTypeID.NumericLiteral; }
}
public override bool IsCompileTimeConstant {
get { return true; }
}
IClassificationType ISyntaxColorable.GetClassificationType(IClassificationTypeRegistryService registry) {
return registry.GetClassificationType(NumericLiteralColorFormat.Name);
}
public override int GetHashCode() {
return value.GetHashCode();
}
public override bool Equals(object obj) {
return obj is NumericLiteral && String.Equals(this.value, ((NumericLiteral)obj).value, StringComparison.OrdinalIgnoreCase);
}
public static bool operator ==(NumericLiteral x, NumericLiteral y) {
return Equals(x, y);
}
public static bool operator !=(NumericLiteral x, NumericLiteral y) {
return !Equals(x, y);
}
public static bool operator ==(NumericLiteral x, Token y) {
return x == y as NumericLiteral;
}
public static bool operator !=(NumericLiteral x, Token y) {
return x != y as NumericLiteral;
}
public static bool operator ==(Token x, NumericLiteral y) {
return x as NumericLiteral == y;
}
public static bool operator !=(Token x, NumericLiteral y) {
return x as NumericLiteral != y;
}
public static NumericLiteral Parse(string source) {
int length;
if (!String.IsNullOrWhiteSpace(source)) {
if (TryParseHexNumber(source, out length) ||
TryParseFloat(source, out length) ||
TryParseInteger(source, out length)) {
return new NumericLiteral(length == source.Length ? source : source.Remove(length));
}
}
return null;
}
private const string IntegerDigits = "0123456789";
private const string HexNumberDigits = IntegerDigits + "aAbBcCdDeEfF";
private const string HexNumberIdentifier = "xX";
private static bool TryParseInteger(string source, out int length) {
bool negative = source.StartsWith("-");
for (length = negative ? 1 : 0; length < source.Length; ++length) {
if (!IntegerDigits.Contains(source[length])) {
break;
}
}
return length > (negative ? 1 : 0);
}
private static bool TryParseHexNumber(string source, out int length) {
if (source.StartsWith("0x", StringComparison.OrdinalIgnoreCase)) {
for (length = 2; length < source.Length; ++length) {
if (!HexNumberDigits.Contains(source[length])) {
break;
}
}
return length > 2;
}
length = 0;
return false;
}
private static bool TryParseFloat(string source, out int length) {
bool negative = source.StartsWith("-");
bool decimalPoint = false;
for (length = negative ? 1 : 0; length < source.Length; ++length) {
if (!decimalPoint && length > (negative ? 1 : 0) && source[length] == '.') {
decimalPoint = true;
}
else if (!IntegerDigits.Contains(source[length])) {
break;
}
}
return length >= (negative ? 4 : 3);
}
}
internal sealed class NumericLiteralParser : TokenParser {
public override bool TryParse(string sourceTextSpan, TokenScanner scanner, IEnumerable<Token> previousTokens, out Token token) {
if (scanner.TopLevelState == ScannerStates.GlobalScope) {
NumericLiteral numericLiteral = NumericLiteral.Parse(sourceTextSpan);
if (numericLiteral != null) {
token = numericLiteral;
return true;
}
}
token = null;
return false;
}
}
}
| {
"content_hash": "4cb825e0bab3f935d3b620918b39b511",
"timestamp": "",
"source": "github",
"line_count": 151,
"max_line_length": 136,
"avg_line_length": 37.158940397350996,
"alnum_prop": 0.5883086793797897,
"repo_name": "Zebrina/PapyrusScriptEditorVSIX",
"id": "e7905c8d58126b99a855da3a8c5cd0f9fb1e6fb4",
"size": "5613",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PapyrusScriptEditorVSIX_OLD/PapyrusScriptEditorVSIX/Language/Components/Tokens/NumericLiteral.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "320"
},
{
"name": "C#",
"bytes": "473609"
},
{
"name": "CSS",
"bytes": "3736"
},
{
"name": "HTML",
"bytes": "7034"
},
{
"name": "Papyrus",
"bytes": "278"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
Index Fungorum
#### Published in
Annls mycol. 19(1/2): 174 (1921)
#### Original name
Ontotelium Syd.
### Remarks
null | {
"content_hash": "b9d815f87ac15aa1ad2644a208f6260d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 32,
"avg_line_length": 11.307692307692308,
"alnum_prop": 0.673469387755102,
"repo_name": "mdoering/backbone",
"id": "f6e405b33d9837f60d41ddf406e5fb094657155e",
"size": "184",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Basidiomycota/Pucciniomycetes/Pucciniales/Pucciniaceae/Uromyces/ Syn. Ontotelium/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package org.zkoss.zss.essential.util;
import java.io.*;
import org.zkoss.lang.SystemException;
import org.zkoss.zk.ui.WebApps;
import org.zkoss.zss.api.*;
import org.zkoss.zss.api.model.*;
import org.zkoss.zss.api.model.Book.BookType;
public class BookUtil {
static public Book newBook(String bookName, BookType type) {
try {
return loadBlankBook(bookName, type);
} catch (IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
static private Book loadBlankBook(String bookName, BookType type)
throws IOException {
Importer importer = Importers.getImporter();
if(importer==null){
throw new RuntimeException("importer for excel not found");
}
InputStream is = null;
switch (type) {
case XLS:
is = WebApps.getCurrent().getResourceAsStream("/WEB-INF/books/blank.xls");
break;
case XLSX:
is = WebApps.getCurrent().getResourceAsStream("/WEB-INF/books/blank.xlsx");
break;
default :
throw new IllegalArgumentException("Unknow book type" + type);
}
return importer.imports(is,bookName);
}
static File workingFolder;
static public File getWorkingFolder() {
if (workingFolder == null) {
synchronized (BookUtil.class) {
if (workingFolder == null) {
workingFolder = new File(
System.getProperty("java.io.tmpdir"), "zsswrk");
if (!workingFolder.exists()) {
if (!workingFolder.mkdirs()) {
throw new SystemException(
"Can't get working folder:"
+ workingFolder.getAbsolutePath());
}
}
}
}
}
return workingFolder;
}
static public String suggestName(Book book) {
String bn = book.getBookName();
BookType type = book.getType();
String ext = type==BookType.XLS?".xls":".xlsx";
int i = bn.lastIndexOf('.');
if(i==0){
bn = "book";
}else if(i>0){
bn = bn.substring(0,i);
}
return bn+ext;
}
static public File saveBookToTemp(Book book) throws IOException{
Exporter exporter = Exporters.getExporter("excel");
String bn = book.getBookName();
String ext = book.getType()==BookType.XLS?".xls":".xlsx";
int i = bn.lastIndexOf('.');
if(i==0){
bn = "book";
}else if(i>0){
bn = bn.substring(0,i);
}
File f = File.createTempFile(Long.toString(System.currentTimeMillis()),ext,getWorkingFolder());
FileOutputStream fos = null;
try {
fos = new FileOutputStream(f);
exporter.export(book, fos);
}finally{
if(fos!=null){
fos.close();
}
}
return f;
}
static public Book copySheetToNewBook(String bookName, Sheet sheet){
Book newBook = newBook(bookName, BookType.XLSX);
Ranges.range(newBook).cloneSheetFrom(sheet.getSheetName(), sheet);
Ranges.range(newBook.getSheetAt(0)).deleteSheet();
return newBook;
}
}
| {
"content_hash": "7637489155e68994085f613b6bfcb3e2",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 97,
"avg_line_length": 25.045871559633028,
"alnum_prop": 0.671062271062271,
"repo_name": "zkoss/zssessentials",
"id": "04be605dcd3da1dd52d91aeee79b92930d29acbc",
"size": "2730",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/zkoss/zss/essential/util/BookUtil.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "251"
},
{
"name": "Java",
"bytes": "142735"
},
{
"name": "JavaScript",
"bytes": "10004"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<!--199 Real Estate Settlement Information-->
<TransactionSpecification TransactionSetIdentifierCode="199" xmlns="http://tempuri.org/X12ParserSpecification.xsd">
<Segment SegmentId="BGN" />
<Loop LoopId="N1">
<Name>Name</Name>
<StartingSegment SegmentId="N1" />
<Segment SegmentId="N2" />
<Segment SegmentId="N3" />
<Segment SegmentId="N4" />
<Segment SegmentId="REF" />
<Segment SegmentId="PER" />
</Loop>
<Loop LoopId="LX">
<Name>Assigned Number</Name>
<StartingSegment SegmentId="LX" />
<Segment SegmentId="REF" />
<Segment SegmentId="LRQ" />
<Segment SegmentId="LN1" />
<Segment SegmentId="DTM" />
<Segment SegmentId="III" />
<Segment SegmentId="NTE" />
<Loop LoopId="NX1">
<Name>Property or Entity Identification</Name>
<StartingSegment SegmentId="NX1" />
<Segment SegmentId="NX2" />
<Segment SegmentId="PDS" />
<Segment SegmentId="PDE" />
</Loop>
<Loop LoopId="IN1">
<Name>Individual Identification</Name>
<StartingSegment SegmentId="IN1" />
<Segment SegmentId="IN2" />
<Segment SegmentId="N3" />
<Segment SegmentId="N4" />
<Segment SegmentId="PER" />
</Loop>
<Loop LoopId="FGS">
<Name>Form Group</Name>
<StartingSegment SegmentId="FGS" />
<Loop LoopId="AMT">
<Name>Monetary Amount</Name>
<StartingSegment SegmentId="AMT" />
<Segment SegmentId="YNQ" />
<Segment SegmentId="NM1" />
<Segment SegmentId="NTE" />
<Segment SegmentId="DTP" />
<Segment SegmentId="QTY" />
<Segment SegmentId="PCT" />
</Loop>
</Loop>
</Loop>
</TransactionSpecification> | {
"content_hash": "483fe33648bcf5f4bb93502c6a2422ad",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 115,
"avg_line_length": 32.79245283018868,
"alnum_prop": 0.6064441887226697,
"repo_name": "ygrinev/ygrinev",
"id": "08871600ed8fd73ad10a2eb407c946d1f9ad339d",
"size": "1740",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Study/EDI/x12parser/x12parser/trunk/src/OopFactory.X12/Specifications/Ansi-199-4010Specification.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ASP",
"bytes": "52500"
},
{
"name": "Batchfile",
"bytes": "55480"
},
{
"name": "C#",
"bytes": "4763975"
},
{
"name": "CSS",
"bytes": "873906"
},
{
"name": "HTML",
"bytes": "2429022"
},
{
"name": "Java",
"bytes": "9095"
},
{
"name": "JavaScript",
"bytes": "8614632"
},
{
"name": "PHP",
"bytes": "4319"
},
{
"name": "Pascal",
"bytes": "402824"
},
{
"name": "PowerShell",
"bytes": "1379876"
},
{
"name": "Puppet",
"bytes": "2916"
},
{
"name": "Shell",
"bytes": "306"
},
{
"name": "TypeScript",
"bytes": "549603"
},
{
"name": "Visual Basic",
"bytes": "9214"
},
{
"name": "XSLT",
"bytes": "110282"
}
],
"symlink_target": ""
} |
<?php namespace models\admin;
class Posts extends \core\model {
public function getposts(){
return $this->_db->select("
SELECT
".PREFIX."posts.postID,
".PREFIX."posts.postTitle,
".PREFIX."posts.postDate,
".PREFIX."post_cats.catTitle
FROM
".PREFIX."posts,
".PREFIX."post_cats
WHERE
".PREFIX."posts.catID = ".PREFIX."post_cats.catID
ORDER BY
postID DESC");
}
public function getpost($id){
return $this->_db->select("SELECT * FROM ".PREFIX."posts WHERE postID = :id",array(':id' => $id));
}
public function insert_post($data){
$this->_db->insert(PREFIX."posts",$data);
}
public function update_post($data,$where){
$this->_db->update(PREFIX."posts",$data, $where);
}
public function delete_post($where){
$this->_db->delete(PREFIX."posts",$where);
}
} | {
"content_hash": "0a2ccc79ebbe301129956e40da88fa6a",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 100,
"avg_line_length": 22.324324324324323,
"alnum_prop": 0.6295399515738499,
"repo_name": "simple-mvc-framework/build-a-blog",
"id": "a250d5790039e4bbaa54b07b0838ff2464632a49",
"size": "826",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/models/admin/posts.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11701"
},
{
"name": "JavaScript",
"bytes": "12146"
},
{
"name": "PHP",
"bytes": "369520"
}
],
"symlink_target": ""
} |
define(['../internal/createCompounder'], function(createCompounder) {
/**
* Converts `string` to camel case.
* See [Wikipedia](https://en.wikipedia.org/wiki/CamelCase) for more details.
*
* @static
* @memberOf _
* @category String
* @param {string} [string=''] The string to convert.
* @returns {string} Returns the camel cased string.
* @example
*
* _.camelCase('Foo Bar');
* // => 'fooBar'
*
* _.camelCase('--foo-bar');
* // => 'fooBar'
*
* _.camelCase('__foo_bar__');
* // => 'fooBar'
*/
var camelCase = createCompounder(function(result, word, index) {
word = word.toLowerCase();
return result + (index ? (word.charAt(0).toUpperCase() + word.slice(1)) : word);
});
return camelCase;
});
| {
"content_hash": "241bd44d32b3590e436a7818641d8e71",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 84,
"avg_line_length": 26.379310344827587,
"alnum_prop": 0.5843137254901961,
"repo_name": "tomek-f/garbage",
"id": "88f374b537991d0d2c6897bf880785466b78270b",
"size": "765",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "require-js-amd/require-zepto-lodash/static/js/lib/lodash-amd/string/camelCase.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "24"
},
{
"name": "CSS",
"bytes": "33788"
},
{
"name": "HTML",
"bytes": "215980"
},
{
"name": "JavaScript",
"bytes": "5306994"
},
{
"name": "Shell",
"bytes": "273"
}
],
"symlink_target": ""
} |
(function($) {
var Alpaca = $.alpaca;
Alpaca.CloudCmsConnector = Alpaca.Connector.extend(
/**
* @lends Alpaca.CloudCmsConnector.prototype
*/
{
/**
* @constructs
* @class Connects Alpaca to Cloud CMS
* @param {String} id Connector ID
* @param {Object} config Connector Config
*/
constructor: function(id, config)
{
if (!config) {
config = {};
}
// if we're not otherwise configured to use a cache, we default to a memory cache with a 5 minute TTL
if (!config.cache) {
config.cache = {
"type": "memory",
"config": {
"ttl": 1000 * 60 * 5 // five minutes
}
};
}
this.base(id, config);
},
/**
* Makes initial connections to data source.
*
* @param {Function} onSuccess onSuccess callback.
* @param {Function} onError onError callback.
*/
connect: function (onSuccess, onError)
{
var self = this;
var cfn = function(err, branch)
{
if (err)
{
onError(err);
return;
}
if (branch)
{
self.branch = Chain(branch);
self.bindHelperFunctions(self.branch);
}
onSuccess();
};
if (Alpaca.globalContext && Alpaca.globalContext.branch)
{
cfn(null, Alpaca.globalContext.branch);
}
else
{
self.branch = null;
self.doConnect(function (err, branch) {
cfn(err, branch);
});
}
},
doConnect: function(callback)
{
var self = this;
if (!this.config.key) {
this.config.key = "default";
}
Gitana.connect(this.config, function(err) {
if (err) {
callback(err);
return;
}
if (this.getDriver().getOriginalConfiguration().loadAppHelper)
{
this.datastore("content").readBranch("master").then(function() {
callback(null, this);
});
}
else
{
callback();
}
});
},
bindHelperFunctions: function(branch)
{
var self = this;
if (!branch.loadAlpacaSchema)
{
branch.loadAlpacaSchema = function(schemaIdentifier, resources, callback)
{
var cachedDocument = self.cache(schemaIdentifier);
if (cachedDocument) {
return callback.call(this, null, cachedDocument);
}
var uriFunction = function()
{
return branch.getUri() + "/alpaca/schema";
};
var params = {};
params["id"] = schemaIdentifier;
return this.chainGetResponse(this, uriFunction, params).then(function(response) {
self.cache(schemaIdentifier, response);
callback.call(this, null, response);
});
};
}
if (!branch.loadAlpacaOptions)
{
branch.loadAlpacaOptions = function(optionsIdentifier, resources, callback)
{
var cachedDocument = self.cache(optionsIdentifier);
if (cachedDocument) {
return callback.call(this, null, cachedDocument);
}
var uriFunction = function()
{
return branch.getUri() + "/alpaca/options";
};
var params = {};
params["schemaId"] = resources.schemaSource;
params["id"] = optionsIdentifier;
return this.chainGetResponse(this, uriFunction, params).then(function(response) {
self.cache(optionsIdentifier, response);
callback.call(this, null, response);
});
};
}
if (!branch.loadAlpacaData)
{
branch.loadAlpacaData = function(dataIdentifier, resources, callback)
{
var uriFunction = function()
{
return branch.getUri() + "/alpaca/data";
};
var params = {};
params["id"] = dataIdentifier;
return this.chainGetResponse(this, uriFunction, params).then(function(response) {
callback.call(this, null, response);
});
};
}
if (!branch.loadAlpacaDataSource)
{
branch.loadAlpacaDataSource = function(config, pagination, callback)
{
var params = {};
if (pagination)
{
Alpaca.copyInto(params, pagination);
}
var uriFunction = function()
{
return branch.getUri() + "/alpaca/datasource";
};
return this.chainPostResponse(this, uriFunction, params, config).then(function(response) {
callback.call(this, null, response.datasource);
});
};
}
},
/**
* Loads data from Cloud CMS.
*
* @param {String} nodeId the node id to load
* @param {Object} resources Map of resources
* @param {Function} onSuccess onSuccess callback
* @param {Function} onError onError callback
*/
loadData: function (nodeId, resources, successCallback, errorCallback)
{
var self = this;
// if we didn't connect to a branch, then use the default method
if (!self.branch)
{
return this.base(nodeId, resources, successCallback, errorCallback);
}
// load from cloud cms
self.branch.loadAlpacaData(nodeId, resources, function(err, data) {
if (err)
{
errorCallback(err);
return;
}
var obj = null;
if (data)
{
obj = JSON.parse(JSON.stringify(data));
}
successCallback(obj);
});
},
/**
* Loads json schema from Cloud CMS.
*
* @param {Object|String} schemaIdentifier the definition qname to load
* @param {Object} resources Map of resources
* @param {Function} onSuccess onSuccess callback.
* @param {Function} onError onError callback.
*/
loadSchema: function (schemaIdentifier, resources, successCallback, errorCallback)
{
var self = this;
// if we didn't connect to a branch, then use the default method
if (!self.branch)
{
return this.base(schemaIdentifier, resources, successCallback, errorCallback);
}
// load from cloud cms
self.branch.loadAlpacaSchema(schemaIdentifier, resources, function(err, schema) {
if (err)
{
return errorCallback(err);
}
// TODO: cleanup schema
successCallback(schema);
});
},
/**
* Loads json options from Cloud CMS.
*
* @param {Object|String} optionsIdentifier the form key to load
* @param {Object} resources Map of resources
* @param {Function} onSuccess onSuccess callback.
* @param {Function} onError onError callback.
*/
loadOptions: function (optionsIdentifier, resources, successCallback, errorCallback)
{
var self = this;
// if we didn't connect to a branch, then use the default method
if (!self.branch)
{
return this.base(optionsIdentifier, resources, successCallback, errorCallback);
}
// load from cloud cms
self.branch.loadAlpacaOptions(optionsIdentifier, resources, function(err, options) {
if (err)
{
return errorCallback(err);
}
if (!options) {
options = {};
}
// TODO: cleanup options
// mix in buttons onto form
options.form.buttons = {
"submit": {
"title": "Submit",
"click": function(e) {
var form = this;
var value = this.getValue();
if (!value) {
value = {};
}
var promise = this.ajaxSubmit({
"xhrFields": {
"withCredentials": true
},
"crossDomain": true,
"processData": false,
"data": JSON.stringify(value),
"contentType": "application/json; charset=utf-8"
});
promise.done(function () {
form.topControl.trigger("formSubmitSuccess");
});
promise.fail(function () {
form.topControl.trigger("formSubmitFail");
});
}
}
};
if (typeof(options.focus) === "undefined")
{
options.focus = Alpaca.defaultFocus;
}
// adjust the action handler relative to baseURL
options.form.attributes.action = self.config.baseURL + options.form.attributes.action;
successCallback(options);
});
},
/**
* Loads a referenced JSON schema.
*
* Supports qname://{namespace}/{localName}
*
* Otherwise, falls back to default implementation.
*
* @param {Object|String} schemaIdentifier schema to load
* @param {Function} onSuccess onSuccess callback.
* @param {Function} onError onError callback.
*/
loadReferenceSchema: function (schemaIdentifier, successCallback, errorCallback)
{
var self = this;
// if the reference comes in form "qname://{namespace}/{localName}" (which is the Cloud CMS official format)
// then convert to basic QName which we support here within Alpaca Cloud CMS connector
if (schemaIdentifier.indexOf("qname://") === 0)
{
var parts = schemaIdentifier.substring(8).split("/");
schemaIdentifier = parts[0] + ":" + parts[1];
}
// is it HTTP or HTTPS?
if ((schemaIdentifier.toLowerCase().indexOf("http://") === 0) || (schemaIdentifier.toLowerCase().indexOf("https://") === 0))
{
// load JSON from endpoint
return this._handleLoadJsonResource(schemaIdentifier, successCallback, errorCallback);
}
var resources = null;
// otherwise assume it is a QName
return self.loadSchema(schemaIdentifier, resources, successCallback, errorCallback);
},
/**
* Loads referenced JSON options.
*
* // Supports qname://{namespace}/{localName}/{formKey}
*
* At present, this ignores QName.
*
* Otherwise, falls back to default implementation.
*
* @param {Object|String} optionsIdentifier form to load.
* @param {Function} onSuccess onSuccess callback.
* @param {Function} onError onError callback.
*/
loadReferenceOptions: function (optionsIdentifier, successCallback, errorCallback)
{
var self = this;
// is it HTTP or HTTPS?
if ((optionsIdentifier.toLowerCase().indexOf("http://") === 0) || (optionsIdentifier.toLowerCase().indexOf("https://") === 0))
{
// load JSON from endpoint
return this._handleLoadJsonResource(optionsIdentifier, successCallback, errorCallback);
}
var resources = null;
// if the reference comes in form "qname://{namespace}/{localName}/{formKey}" (which is the Cloud CMS official format)
// then convert to basic QName which we support here within Alpaca Cloud CMS connector
if (optionsIdentifier.indexOf("qname://") === 0)
{
var parts = optionsIdentifier.substring(8).split("/");
if (parts.length > 2)
{
// qname
resources = {};
resources.schemaSource = parts[0] + ":" + parts[1];
// form id
optionsIdentifier = parts[2];
return self.loadOptions(optionsIdentifier, resources, successCallback, errorCallback);
}
}
successCallback(null);
},
/**
* Loads data source elements based on a content query to Cloud CMS.
*
* @param config
* @param successCallback
* @param errorCallback
* @returns {*}
*/
loadDataSource: function (config, successCallback, errorCallback)
{
var self = this;
// if we didn't connect to a branch, then use the default method
if (!self.branch)
{
return this.base(config, successCallback, errorCallback);
}
var pagination = config.pagination;
delete config.pagination;
return self.branch.loadAlpacaDataSource(config, pagination, function(err, array) {
if (err) {
errorCallback(err);
return;
}
successCallback(array);
});
}
});
Alpaca.registerConnectorClass("cloudcms", Alpaca.CloudCmsConnector);
})(jQuery);
| {
"content_hash": "07571a1b6927c5ed0db532c695121491",
"timestamp": "",
"source": "github",
"line_count": 465,
"max_line_length": 138,
"avg_line_length": 32.83010752688172,
"alnum_prop": 0.4572907113847766,
"repo_name": "equinton/prototypephp",
"id": "69d986237f3e7f98cf51f3a495481ba0442b3854",
"size": "15266",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "display/node_modules/alpaca/src/js/connectors/cloudcms.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "22"
},
{
"name": "CSS",
"bytes": "245930"
},
{
"name": "HTML",
"bytes": "6971441"
},
{
"name": "JavaScript",
"bytes": "531684"
},
{
"name": "PHP",
"bytes": "698384"
},
{
"name": "SCSS",
"bytes": "80205"
},
{
"name": "Shell",
"bytes": "14526"
},
{
"name": "Smarty",
"bytes": "73392"
}
],
"symlink_target": ""
} |
package user
import (
"context"
"github.com/ezbuy/ezorm/v2/pkg/orm"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/primitive"
"go.mongodb.org/mongo-driver/mongo"
"go.mongodb.org/mongo-driver/mongo/options"
)
var UserIndexes = []mongo.IndexModel{
{
Keys: UserIndexKey_UsernameAge,
},
{
Keys: UserIndexKey_Username,
},
{
Keys: UserIndexKey_Age,
},
}
var UserIndexesFunc = func() {
orm.SetupIndexModel(Col("test_user"), UserIndexes)
}
var UserIndexKey_UsernameAge = bson.D{
{Key: "Username", Value: 1},
{Key: "Age", Value: 1},
}
var UserIndexKey_Username = bson.D{
{Key: "Username", Value: 1},
}
var UserIndexKey_Age = bson.D{
{Key: "Age", Value: 1},
}
func init() {
orm.RegisterEzOrmObjByID("mongo_e2e", "User", newUserFindByID)
orm.RegisterEzOrmObjRemove("mongo_e2e", "User", newUserRemoveByID)
}
func newUserFindByID(id string) (result orm.EzOrmObj, err error) {
return UserMgr.FindByID(context.TODO(), id)
}
func newUserRemoveByID(id string) error {
return UserMgr.RemoveByID(context.TODO(), id)
}
// =====================================
// INSERT METHODS
// =====================================
var (
insertUserCBs []func(obj orm.EzOrmObj)
updateUserCBs []func(obj orm.EzOrmObj)
)
func UserAddInsertCallback(cb func(obj orm.EzOrmObj)) {
insertUserCBs = append(insertUserCBs, cb)
}
func UserAddUpdateCallback(cb func(obj orm.EzOrmObj)) {
updateUserCBs = append(updateUserCBs, cb)
}
func (o *User) Id() string {
return o.ID.Hex()
}
func (o *User) Save(ctx context.Context) (*mongo.UpdateResult, error) {
isNew := o.isNew
if o.ID == primitive.NilObjectID {
o.ID = primitive.NewObjectID()
}
filter := bson.M{"_id": o.ID}
update := bson.M{
"$set": bson.M{
UserMgoFieldUserId: o.UserId,
UserMgoFieldUsername: o.Username,
UserMgoFieldAge: o.Age,
UserMgoFieldRegisterDate: o.RegisterDate,
},
}
opts := options.Update().SetUpsert(true)
col := UserMgr.GetCol()
ret, err := col.UpdateOne(ctx, filter, update, opts)
if err != nil {
return ret, err
}
if ret.UpsertedID != nil {
if id, ok := ret.UpsertedID.(primitive.ObjectID); ok {
o.ID = id
}
}
o.isNew = false
if isNew {
UserInsertCallback(o)
} else {
UserUpdateCallback(o)
}
return ret, err
}
func (o *User) InsertUnique(ctx context.Context, query interface{}) (saved bool, err error) {
update := bson.M{
"$setOnInsert": bson.M{
UserMgoFieldID: o.ID,
UserMgoFieldUserId: o.UserId,
UserMgoFieldUsername: o.Username,
UserMgoFieldAge: o.Age,
UserMgoFieldRegisterDate: o.RegisterDate,
},
}
opts := options.Update().SetUpsert(true)
col := UserMgr.GetCol()
ret, err := col.UpdateOne(ctx, query, update, opts)
if err != nil {
return false, err
}
if ret.UpsertedCount != 0 {
saved = true
}
o.isNew = false
if saved {
UserInsertCallback(o)
}
return saved, nil
}
func UserInsertCallback(o *User) {
for _, cb := range insertUserCBs {
cb(o)
}
}
func UserUpdateCallback(o *User) {
for _, cb := range updateUserCBs {
cb(o)
}
}
// =====================================
// FOREIGN KEYS
// =====================================
// =====================================
// COLLECTION
// =====================================
func (o *_UserMgr) FindOne(ctx context.Context, query interface{}, sortFields interface{}) (result *User, err error) {
col := o.GetCol()
opts := options.FindOne()
if sortFields != nil {
opts.SetSort(sortFields)
}
ret := col.FindOne(ctx, query, opts)
if err = ret.Err(); err != nil {
return nil, err
}
err = ret.Decode(&result)
return
}
func (o *_UserMgr) Query(ctx context.Context, query interface{}, limit, offset int, sortFields interface{}) (*mongo.Cursor, error) {
col := o.GetCol()
opts := options.Find()
if limit > 0 {
opts.SetLimit(int64(limit))
}
if offset > 0 {
opts.SetSkip(int64(offset))
}
if sortFields != nil {
opts.SetSort(sortFields)
}
return col.Find(ctx, query, opts)
}
func (o *_UserMgr) FindByUsernameAge(ctx context.Context, Username string, Age int32, limit int, offset int, sortFields interface{}) (result []*User, err error) {
query := bson.M{
"Username": Username,
"Age": Age,
}
cursor, err := o.Query(ctx, query, limit, offset, sortFields)
if err != nil {
return nil, err
}
err = cursor.All(ctx, &result)
return
}
func (o *_UserMgr) FindByUsername(ctx context.Context, Username string, limit int, offset int, sortFields interface{}) (result []*User, err error) {
query := bson.M{
"Username": Username,
}
cursor, err := o.Query(ctx, query, limit, offset, sortFields)
if err != nil {
return nil, err
}
err = cursor.All(ctx, &result)
return
}
func (o *_UserMgr) FindByAge(ctx context.Context, Age int32, limit int, offset int, sortFields interface{}) (result []*User, err error) {
query := bson.M{
"Age": Age,
}
cursor, err := o.Query(ctx, query, limit, offset, sortFields)
if err != nil {
return nil, err
}
err = cursor.All(ctx, &result)
return
}
func (o *_UserMgr) Find(ctx context.Context, query interface{}, limit int, offset int, sortFields interface{}) (result []*User, err error) {
cursor, err := o.Query(ctx, query, limit, offset, sortFields)
if err != nil {
return nil, err
}
err = cursor.All(ctx, &result)
return
}
func (o *_UserMgr) FindAll(ctx context.Context, query interface{}, sortFields interface{}) (result []*User, err error) {
cursor, err := o.Query(ctx, query, -1, -1, sortFields)
if err != nil {
return nil, err
}
err = cursor.All(ctx, &result)
return
}
func (o *_UserMgr) Has(ctx context.Context, query interface{}) bool {
count, err := o.CountE(ctx, query)
if err != nil || count == 0 {
return false
}
return true
}
func (o *_UserMgr) Count(ctx context.Context, query interface{}) int {
count, _ := o.CountE(ctx, query)
return count
}
func (o *_UserMgr) CountE(ctx context.Context, query interface{}) (int, error) {
col := o.GetCol()
count, err := col.CountDocuments(ctx, query)
return int(count), err
}
func (o *_UserMgr) FindByIDs(ctx context.Context, id []string, sortFields interface{}) (result []*User, err error) {
ids := make([]primitive.ObjectID, 0, len(id))
for _, i := range id {
if oid, err := primitive.ObjectIDFromHex(i); err == nil {
ids = append(ids, oid)
}
}
return o.FindAll(ctx, bson.M{"_id": bson.M{"$in": ids}}, sortFields)
}
func (o *_UserMgr) FindByID(ctx context.Context, id string) (result *User, err error) {
oid, err := primitive.ObjectIDFromHex(id)
if err != nil {
return nil, mongo.ErrNoDocuments
}
col := o.GetCol()
ret := col.FindOne(ctx, bson.M{"_id": oid})
if err = ret.Err(); err != nil {
return nil, err
}
err = ret.Decode(&result)
return
}
func (o *_UserMgr) RemoveAll(ctx context.Context, query interface{}) (int64, error) {
if query == nil {
query = bson.M{}
}
col := o.GetCol()
ret, err := col.DeleteMany(ctx, query)
if err != nil {
return 0, err
}
return ret.DeletedCount, nil
}
func (o *_UserMgr) RemoveByID(ctx context.Context, id string) (err error) {
oid, err := primitive.ObjectIDFromHex(id)
if err != nil {
return mongo.ErrNoDocuments
}
col := o.GetCol()
_, err = col.DeleteOne(ctx, bson.M{"_id": oid})
return err
}
func (m *_UserMgr) GetCol() *mongo.Collection {
return Col("test_user")
}
| {
"content_hash": "95b87600c8245e66507449b81cadb9c1",
"timestamp": "",
"source": "github",
"line_count": 317,
"max_line_length": 162,
"avg_line_length": 23.066246056782333,
"alnum_prop": 0.6396334792122538,
"repo_name": "ezbuy/ezorm",
"id": "134a64f2e568a3eae5f10b206e7e1f93f5c01701",
"size": "7312",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "e2e/mongo/user/gen_User_mongo_orm.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "251109"
},
{
"name": "Makefile",
"bytes": "1280"
}
],
"symlink_target": ""
} |
package com.google.api.client.xml;
import com.google.api.client.util.Key;
import com.google.api.client.xml.atom.Atom;
import com.google.common.collect.ImmutableMap;
import java.io.StringWriter;
import java.util.Collection;
import java.util.TreeSet;
import junit.framework.TestCase;
import org.xmlpull.v1.XmlSerializer;
/**
* Tests {@link XmlNamespaceDictionary}.
*
* @author Yaniv Inbar
*/
public class XmlNamespaceDictionaryTest extends TestCase {
private static final String EXPECTED =
"<?xml version=\"1.0\"?><feed xmlns=\"http://www.w3"
+ ".org/2005/Atom\" xmlns:gd=\"http://schemas.google.com/g/2005\"><entry gd:etag=\"abc"
+ "\"><title>One</title></entry><entry gd:etag=\"def\"><title>Two</title></entry></feed>";
private static final String EXPECTED_EMPTY_MAP =
"<?xml version=\"1.0\"?><entry xmlns" + "=\"http://www.w3.org/2005/Atom\" />";
private static final String EXPECTED_EMPTY_MAP_NS_UNDECLARED =
"<?xml version=\"1.0\"?><n1" + ":entry xmlns:n1=\"http://www.w3.org/2005/Atom\" />";
private static final String EXPECTED_EMPTY_MAP_ATOM_NS =
"<?xml version=\"1.0\"?><atom" + ":entry xmlns:atom=\"http://www.w3.org/2005/Atom\" />";
private static final String EXPECTED_UNKNOWN_NS =
"<?xml version=\"1.0\"?><feed xmlns"
+ "=\"http://unknown/\" xmlns:gd=\"http://unknown/gd\"><entry gd:etag=\"abc\"><title>One"
+ "</title></entry><entry gd:etag=\"def\"><title>Two</title></entry></feed>";
public XmlNamespaceDictionaryTest() {}
public XmlNamespaceDictionaryTest(String name) {
super(name);
}
public void testSet() {
XmlNamespaceDictionary dictionary = new XmlNamespaceDictionary();
dictionary.set("", "http://www.w3.org/2005/Atom").set("gd", "http://schemas.google.com/g/2005");
assertEquals("http://www.w3.org/2005/Atom", dictionary.getUriForAlias(""));
assertEquals("", dictionary.getAliasForUri("http://www.w3.org/2005/Atom"));
dictionary.set("", "http://www.w3.org/2006/Atom");
assertEquals("http://www.w3.org/2006/Atom", dictionary.getUriForAlias(""));
assertNull(dictionary.getAliasForUri("http://www.w3.org/2005/Atom"));
assertEquals("", dictionary.getAliasForUri("http://www.w3.org/2006/Atom"));
dictionary.set("foo", "http://www.w3.org/2006/Atom");
assertEquals("http://www.w3.org/2006/Atom", dictionary.getUriForAlias("foo"));
assertNull(dictionary.getUriForAlias(""));
assertEquals("foo", dictionary.getAliasForUri("http://www.w3.org/2006/Atom"));
dictionary.set("foo", "http://schemas.google.com/g/2005");
assertEquals("http://schemas.google.com/g/2005", dictionary.getUriForAlias("foo"));
assertNull(dictionary.getUriForAlias("gd"));
assertNull(dictionary.getAliasForUri("http://www.w3.org/2006/Atom"));
dictionary.set(null, null);
assertEquals("http://schemas.google.com/g/2005", dictionary.getUriForAlias("foo"));
dictionary.set("foo", null);
assertTrue(dictionary.getAliasToUriMap().isEmpty());
dictionary
.set("foo", "http://schemas.google.com/g/2005")
.set(null, "http://schemas.google.com/g/2005");
assertTrue(dictionary.getAliasToUriMap().isEmpty());
}
public void testSerialize() throws Exception {
Feed feed = new Feed();
feed.entries = new TreeSet<Entry>();
feed.entries.add(new Entry("One", "abc"));
feed.entries.add(new Entry("Two", "def"));
StringWriter writer = new StringWriter();
XmlSerializer serializer = Xml.createSerializer();
serializer.setOutput(writer);
XmlNamespaceDictionary namespaceDictionary = new XmlNamespaceDictionary();
namespaceDictionary.set("", Atom.ATOM_NAMESPACE);
namespaceDictionary.set("gd", "http://schemas.google.com/g/2005");
namespaceDictionary.serialize(serializer, Atom.ATOM_NAMESPACE, "feed", feed);
assertEquals(EXPECTED, writer.toString());
}
public void testSerializeByName() throws Exception {
Feed feed = new Feed();
feed.entries = new TreeSet<Entry>();
feed.entries.add(new Entry("One", "abc"));
feed.entries.add(new Entry("Two", "def"));
StringWriter writer = new StringWriter();
XmlSerializer serializer = Xml.createSerializer();
serializer.setOutput(writer);
XmlNamespaceDictionary namespaceDictionary = new XmlNamespaceDictionary();
namespaceDictionary.set("", Atom.ATOM_NAMESPACE);
namespaceDictionary.set("gd", "http://schemas.google.com/g/2005");
namespaceDictionary.serialize(serializer, "feed", feed);
assertEquals(EXPECTED, writer.toString());
}
public void testSerialize_emptyMap() throws Exception {
ImmutableMap<String, String> map = ImmutableMap.of();
StringWriter writer = new StringWriter();
XmlSerializer serializer = Xml.createSerializer();
serializer.setOutput(writer);
XmlNamespaceDictionary namespaceDictionary = new XmlNamespaceDictionary();
namespaceDictionary.set("", Atom.ATOM_NAMESPACE);
namespaceDictionary.serialize(serializer, Atom.ATOM_NAMESPACE, "entry", map);
assertEquals(EXPECTED_EMPTY_MAP, writer.toString());
}
public void testSerializeByName_emptyMap() throws Exception {
ImmutableMap<String, String> map = ImmutableMap.of();
StringWriter writer = new StringWriter();
XmlSerializer serializer = Xml.createSerializer();
serializer.setOutput(writer);
XmlNamespaceDictionary namespaceDictionary = new XmlNamespaceDictionary();
namespaceDictionary.set("", Atom.ATOM_NAMESPACE);
namespaceDictionary.serialize(serializer, "entry", map);
assertEquals(EXPECTED_EMPTY_MAP, writer.toString());
}
public void testSerializeByName_emptyMapAtomNs() throws Exception {
ImmutableMap<String, String> map = ImmutableMap.of();
StringWriter writer = new StringWriter();
XmlSerializer serializer = Xml.createSerializer();
serializer.setOutput(writer);
XmlNamespaceDictionary namespaceDictionary = new XmlNamespaceDictionary();
namespaceDictionary.set("atom", Atom.ATOM_NAMESPACE);
namespaceDictionary.serialize(serializer, "atom:entry", map);
assertEquals(EXPECTED_EMPTY_MAP_ATOM_NS, writer.toString());
}
public void testSerialize_emptyMapNsUndeclared() throws Exception {
ImmutableMap<String, String> map = ImmutableMap.of();
StringWriter writer = new StringWriter();
XmlSerializer serializer = Xml.createSerializer();
serializer.setOutput(writer);
XmlNamespaceDictionary namespaceDictionary = new XmlNamespaceDictionary();
namespaceDictionary.serialize(serializer, Atom.ATOM_NAMESPACE, "entry", map);
assertEquals(EXPECTED_EMPTY_MAP_NS_UNDECLARED, writer.toString());
}
public void testSerialize_errorOnUnknown() throws Exception {
Entry entry = new Entry("One", "abc");
StringWriter writer = new StringWriter();
XmlSerializer serializer = Xml.createSerializer();
serializer.setOutput(writer);
XmlNamespaceDictionary namespaceDictionary = new XmlNamespaceDictionary();
try {
namespaceDictionary.serialize(serializer, Atom.ATOM_NAMESPACE, "entry", entry);
fail("expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
// expected
assertEquals("unrecognized alias: (default)", e.getMessage());
}
}
public void testSerializeByName_errorOnUnknown() throws Exception {
Entry entry = new Entry("One", "abc");
StringWriter writer = new StringWriter();
XmlSerializer serializer = Xml.createSerializer();
serializer.setOutput(writer);
XmlNamespaceDictionary namespaceDictionary = new XmlNamespaceDictionary();
try {
namespaceDictionary.serialize(serializer, "entry", entry);
fail("expected IllegalArgumentException");
} catch (IllegalArgumentException e) {
// expected
assertEquals("unrecognized alias: (default)", e.getMessage());
}
}
public void testSerialize_unknown() throws Exception {
Feed feed = new Feed();
feed.entries = new TreeSet<Entry>();
feed.entries.add(new Entry("One", "abc"));
feed.entries.add(new Entry("Two", "def"));
StringWriter writer = new StringWriter();
XmlSerializer serializer = Xml.createSerializer();
serializer.setOutput(writer);
XmlNamespaceDictionary namespaceDictionary = new XmlNamespaceDictionary();
assertEquals(EXPECTED_UNKNOWN_NS, namespaceDictionary.toStringOf("feed", feed));
}
public static class Entry implements Comparable<Entry> {
@Key public String title;
@Key("@gd:etag")
public String etag;
public Entry(String title, String etag) {
super();
this.title = title;
this.etag = etag;
}
public int compareTo(Entry other) {
return title.compareTo(other.title);
}
}
public static class Feed {
@Key("entry")
public Collection<Entry> entries;
}
}
| {
"content_hash": "5c20532f3ebec3a704fd9ffbb31f796f",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 100,
"avg_line_length": 42.858536585365854,
"alnum_prop": 0.7029364898702481,
"repo_name": "googleapis/google-http-java-client",
"id": "569477010dc634d441ce489944e87213872621c2",
"size": "9374",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google-http-client-xml/src/test/java/com/google/api/client/xml/XmlNamespaceDictionaryTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "801"
},
{
"name": "HTML",
"bytes": "769"
},
{
"name": "Java",
"bytes": "1588457"
},
{
"name": "Python",
"bytes": "952"
},
{
"name": "Shell",
"bytes": "25995"
}
],
"symlink_target": ""
} |
namespace ApplicationGateway
{
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// VirtualNetworkGatewaysOperations operations.
/// </summary>
internal partial class VirtualNetworkGatewaysOperations : IServiceOperations<NetworkClient>, IVirtualNetworkGatewaysOperations
{
/// <summary>
/// Initializes a new instance of the VirtualNetworkGatewaysOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal VirtualNetworkGatewaysOperations(NetworkClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the NetworkClient
/// </summary>
public NetworkClient Client { get; private set; }
/// <summary>
/// Creates or updates a virtual network gateway in the specified resource
/// group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create or update virtual network gateway operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse<VirtualNetworkGateway>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, VirtualNetworkGateway parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send Request
AzureOperationResponse<VirtualNetworkGateway> _response = await BeginCreateOrUpdateWithHttpMessagesAsync(resourceGroupName, virtualNetworkGatewayName, parameters, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPutOrPatchOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the specified virtual network gateway by resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<VirtualNetworkGateway>> GetWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkGatewayName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkGatewayName");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkGatewayName", virtualNetworkGatewayName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkGatewayName}", System.Uri.EscapeDataString(virtualNetworkGatewayName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<VirtualNetworkGateway>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<VirtualNetworkGateway>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Deletes the specified virtual network gateway.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send request
AzureOperationResponse _response = await BeginDeleteWithHttpMessagesAsync(resourceGroupName, virtualNetworkGatewayName, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPostOrDeleteOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets all virtual network gateways by resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<VirtualNetworkGateway>>> ListWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<VirtualNetworkGateway>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<VirtualNetworkGateway>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Resets the primary of the virtual network gateway in the specified resource
/// group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='gatewayVip'>
/// Virtual network gateway vip address supplied to the begin reset of the
/// active-active feature enabled gateway.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse<VirtualNetworkGateway>> ResetWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, string gatewayVip = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send request
AzureOperationResponse<VirtualNetworkGateway> _response = await BeginResetWithHttpMessagesAsync(resourceGroupName, virtualNetworkGatewayName, gatewayVip, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPostOrDeleteOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Generates VPN client package for P2S client of the virtual network gateway
/// in the specified resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the generate virtual network gateway VPN client
/// package operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<string>> GeneratevpnclientpackageWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, VpnClientParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkGatewayName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkGatewayName");
}
if (parameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "parameters");
}
if (parameters != null)
{
parameters.Validate();
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkGatewayName", virtualNetworkGatewayName);
tracingParameters.Add("parameters", parameters);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Generatevpnclientpackage", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkGatewayName}", System.Uri.EscapeDataString(virtualNetworkGatewayName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(parameters != null)
{
_requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, Client.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 202)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<string>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 202)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<string>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// The GetBgpPeerStatus operation retrieves the status of all BGP peers.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='peer'>
/// The IP address of the peer to retrieve the status of.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse<BgpPeerStatusListResult>> GetBgpPeerStatusWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, string peer = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send request
AzureOperationResponse<BgpPeerStatusListResult> _response = await BeginGetBgpPeerStatusWithHttpMessagesAsync(resourceGroupName, virtualNetworkGatewayName, peer, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPostOrDeleteOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// This operation retrieves a list of routes the virtual network gateway has
/// learned, including routes learned from BGP peers.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse<GatewayRouteListResult>> GetLearnedRoutesWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send request
AzureOperationResponse<GatewayRouteListResult> _response = await BeginGetLearnedRoutesWithHttpMessagesAsync(resourceGroupName, virtualNetworkGatewayName, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPostOrDeleteOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// This operation retrieves a list of routes the virtual network gateway is
/// advertising to the specified peer.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='peer'>
/// The IP address of the peer
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public async Task<AzureOperationResponse<GatewayRouteListResult>> GetAdvertisedRoutesWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, string peer, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Send request
AzureOperationResponse<GatewayRouteListResult> _response = await BeginGetAdvertisedRoutesWithHttpMessagesAsync(resourceGroupName, virtualNetworkGatewayName, peer, customHeaders, cancellationToken).ConfigureAwait(false);
return await Client.GetPostOrDeleteOperationResultAsync(_response, customHeaders, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Creates or updates a virtual network gateway in the specified resource
/// group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create or update virtual network gateway operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<VirtualNetworkGateway>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, VirtualNetworkGateway parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkGatewayName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkGatewayName");
}
if (parameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "parameters");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkGatewayName", virtualNetworkGatewayName);
tracingParameters.Add("parameters", parameters);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginCreateOrUpdate", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkGatewayName}", System.Uri.EscapeDataString(virtualNetworkGatewayName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PUT");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(parameters != null)
{
_requestContent = Microsoft.Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, Client.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 201)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<VirtualNetworkGateway>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<VirtualNetworkGateway>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
// Deserialize Response
if ((int)_statusCode == 201)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<VirtualNetworkGateway>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Deletes the specified virtual network gateway.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkGatewayName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkGatewayName");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkGatewayName", virtualNetworkGatewayName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginDelete", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkGatewayName}", System.Uri.EscapeDataString(virtualNetworkGatewayName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("DELETE");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 204 && (int)_statusCode != 202 && (int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Resets the primary of the virtual network gateway in the specified resource
/// group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='gatewayVip'>
/// Virtual network gateway vip address supplied to the begin reset of the
/// active-active feature enabled gateway.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<VirtualNetworkGateway>> BeginResetWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, string gatewayVip = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkGatewayName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkGatewayName");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkGatewayName", virtualNetworkGatewayName);
tracingParameters.Add("gatewayVip", gatewayVip);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginReset", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkGatewayName}", System.Uri.EscapeDataString(virtualNetworkGatewayName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (gatewayVip != null)
{
_queryParameters.Add(string.Format("gatewayVip={0}", System.Uri.EscapeDataString(gatewayVip)));
}
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 202 && (int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<VirtualNetworkGateway>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<VirtualNetworkGateway>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// The GetBgpPeerStatus operation retrieves the status of all BGP peers.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='peer'>
/// The IP address of the peer to retrieve the status of.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<BgpPeerStatusListResult>> BeginGetBgpPeerStatusWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, string peer = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkGatewayName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkGatewayName");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkGatewayName", virtualNetworkGatewayName);
tracingParameters.Add("peer", peer);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginGetBgpPeerStatus", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkGatewayName}", System.Uri.EscapeDataString(virtualNetworkGatewayName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (peer != null)
{
_queryParameters.Add(string.Format("peer={0}", System.Uri.EscapeDataString(peer)));
}
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 202)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<BgpPeerStatusListResult>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<BgpPeerStatusListResult>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// This operation retrieves a list of routes the virtual network gateway has
/// learned, including routes learned from BGP peers.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<GatewayRouteListResult>> BeginGetLearnedRoutesWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkGatewayName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkGatewayName");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkGatewayName", virtualNetworkGatewayName);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginGetLearnedRoutes", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkGatewayName}", System.Uri.EscapeDataString(virtualNetworkGatewayName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 202)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<GatewayRouteListResult>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<GatewayRouteListResult>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// This operation retrieves a list of routes the virtual network gateway is
/// advertising to the specified peer.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayName'>
/// The name of the virtual network gateway.
/// </param>
/// <param name='peer'>
/// The IP address of the peer
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<GatewayRouteListResult>> BeginGetAdvertisedRoutesWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayName, string peer, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (resourceGroupName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "resourceGroupName");
}
if (virtualNetworkGatewayName == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "virtualNetworkGatewayName");
}
if (peer == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "peer");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SubscriptionId");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("resourceGroupName", resourceGroupName);
tracingParameters.Add("virtualNetworkGatewayName", virtualNetworkGatewayName);
tracingParameters.Add("peer", peer);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "BeginGetAdvertisedRoutes", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes").ToString();
_url = _url.Replace("{resourceGroupName}", System.Uri.EscapeDataString(resourceGroupName));
_url = _url.Replace("{virtualNetworkGatewayName}", System.Uri.EscapeDataString(virtualNetworkGatewayName));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(Client.SubscriptionId));
List<string> _queryParameters = new List<string>();
if (peer != null)
{
_queryParameters.Add(string.Format("peer={0}", System.Uri.EscapeDataString(peer)));
}
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200 && (int)_statusCode != 202)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<GatewayRouteListResult>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<GatewayRouteListResult>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets all virtual network gateways by resource group.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<VirtualNetworkGateway>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (nextPageLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextPageLink");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextPageLink", nextPageLink);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters);
}
// Construct URL
string _url = "{nextLink}";
_url = _url.Replace("{nextLink}", nextPageLink);
List<string> _queryParameters = new List<string>();
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<VirtualNetworkGateway>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Microsoft.Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<VirtualNetworkGateway>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| {
"content_hash": "a236693e2e98d4db4901b782f87c79f3",
"timestamp": "",
"source": "github",
"line_count": 2185,
"max_line_length": 329,
"avg_line_length": 48.01006864988558,
"alnum_prop": 0.5668338067911003,
"repo_name": "lmazuel/autorest",
"id": "b4341a9da0bd251c3bcbba7621ae62f6d3cfae04",
"size": "105058",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Samples/test/end-to-end/network/Client/VirtualNetworkGatewaysOperations.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "36"
},
{
"name": "C#",
"bytes": "15043916"
},
{
"name": "CSS",
"bytes": "110"
},
{
"name": "CoffeeScript",
"bytes": "64212"
},
{
"name": "Go",
"bytes": "149926"
},
{
"name": "HTML",
"bytes": "274"
},
{
"name": "Java",
"bytes": "7894733"
},
{
"name": "JavaScript",
"bytes": "6955414"
},
{
"name": "PowerShell",
"bytes": "41223"
},
{
"name": "Python",
"bytes": "2111184"
},
{
"name": "Ruby",
"bytes": "182108"
},
{
"name": "Shell",
"bytes": "196"
},
{
"name": "TypeScript",
"bytes": "465386"
}
],
"symlink_target": ""
} |
<?php
/* FOSUserBundle:Registration:confirmed.html.twig */
class __TwigTemplate_b43e02ca4b3d0a86dc7116b276b79647 extends Twig_Template
{
public function __construct(Twig_Environment $env)
{
parent::__construct($env);
$this->blocks = array(
'fos_user_content' => array($this, 'block_fos_user_content'),
);
}
protected function doGetParent(array $context)
{
return "FOSUserBundle::layout.html.twig";
}
protected function doDisplay(array $context, array $blocks = array())
{
$this->getParent($context)->display($context, array_merge($this->blocks, $blocks));
}
// line 3
public function block_fos_user_content($context, array $blocks = array())
{
// line 4
echo " <p>";
echo twig_escape_filter($this->env, $this->env->getExtension('translator')->trans("registration.confirmed", array("%username%" => $this->getAttribute($this->getContext($context, "user"), "username")), "FOSUserBundle"), "html", null, true);
echo "</p>
";
// line 5
if ((!twig_test_empty($this->getAttribute($this->getContext($context, "app"), "session")))) {
// line 6
echo " ";
$context["targetUrl"] = $this->getAttribute($this->getAttribute($this->getContext($context, "app"), "session"), "get", array(0 => "_security.target_path"), "method");
// line 7
echo " ";
if ((!twig_test_empty($this->getContext($context, "targetUrl")))) {
echo "<p><a href=\"";
echo twig_escape_filter($this->env, $this->getContext($context, "targetUrl"), "html", null, true);
echo "\">";
echo twig_escape_filter($this->env, $this->env->getExtension('translator')->trans("registration.back", array(), "FOSUserBundle"), "html", null, true);
echo "</a></p>";
}
// line 8
echo " ";
}
}
public function getTemplateName()
{
return "FOSUserBundle:Registration:confirmed.html.twig";
}
public function isTraitable()
{
return false;
}
}
| {
"content_hash": "fb19cc6f68c94dcf7cb457d370d12c99",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 247,
"avg_line_length": 35.91803278688525,
"alnum_prop": 0.5572797809219534,
"repo_name": "dziorki/Intimisi",
"id": "13b24d19e46b93ed7987db189deebcacd3f236ed",
"size": "2191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/cache/dev/twig/b4/3e/02ca4b3d0a86dc7116b276b79647.php",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1208458"
},
{
"name": "PHP",
"bytes": "120805"
},
{
"name": "Perl",
"bytes": "5948"
},
{
"name": "Shell",
"bytes": "95"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "2b8acd091297b4f09da46cecc9eb8863",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.307692307692308,
"alnum_prop": 0.6940298507462687,
"repo_name": "mdoering/backbone",
"id": "fd75a32107cd9908cbe73d78b81ab2d216eb7783",
"size": "182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Liliopsida/Poales/Cyperaceae/Carex/Carex sinomairei/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/webui/chromeos/login/core_oobe_handler.h"
#include <type_traits>
#include "ash/constants/ash_features.h"
#include "ash/public/ash_interfaces.h"
#include "ash/public/cpp/event_rewriter_controller.h"
#include "ash/public/cpp/shelf_config.h"
#include "ash/public/cpp/tablet_mode.h"
#include "ash/shell.h"
#include "base/bind.h"
#include "base/command_line.h"
#include "base/strings/utf_string_conversions.h"
#include "base/values.h"
#include "build/branding_buildflags.h"
#include "chrome/browser/ash/login/configuration_keys.h"
#include "chrome/browser/ash/login/demo_mode/demo_session.h"
#include "chrome/browser/ash/login/demo_mode/demo_setup_controller.h"
#include "chrome/browser/ash/login/helper.h"
#include "chrome/browser/ash/login/lock/screen_locker.h"
#include "chrome/browser/ash/login/screens/reset_screen.h"
#include "chrome/browser/ash/login/ui/login_display_host.h"
#include "chrome/browser/ash/login/ui/oobe_dialog_size_utils.h"
#include "chrome/browser/ash/login/wizard_controller.h"
#include "chrome/browser/ash/policy/enrollment/enrollment_requisition_manager.h"
#include "chrome/browser/ash/system/input_device_settings.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/browser_process_platform_part.h"
#include "chrome/browser/ui/ash/ash_util.h"
#include "chrome/browser/ui/ash/keyboard/chrome_keyboard_controller_client.h"
#include "chrome/browser/ui/webui/chromeos/login/demo_setup_screen_handler.h"
#include "chrome/browser/ui/webui/chromeos/login/eula_screen_handler.h"
#include "chrome/browser/ui/webui/chromeos/login/oobe_ui.h"
#include "chrome/browser/ui/webui/chromeos/login/reset_screen_handler.h"
#include "chrome/browser/ui/webui/chromeos/login/signin_screen_handler.h"
#include "chrome/common/channel_info.h"
#include "chrome/common/chrome_constants.h"
#include "chrome/common/pref_names.h"
#include "chrome/grit/chromium_strings.h"
#include "chrome/grit/generated_resources.h"
#include "components/login/base_screen_handler_utils.h"
#include "components/login/localized_values_builder.h"
#include "components/prefs/pref_service.h"
#include "components/strings/grit/components_strings.h"
#include "components/version_info/version_info.h"
#include "google_apis/google_api_keys.h"
#include "ui/aura/window_tree_host.h"
#include "ui/display/screen.h"
#include "ui/events/event_sink.h"
#include "ui/gfx/geometry/size.h"
namespace chromeos {
// Note that show_oobe_ui_ defaults to false because WizardController assumes
// OOBE UI is not visible by default.
CoreOobeHandler::CoreOobeHandler(JSCallsContainer* js_calls_container)
: BaseWebUIHandler(js_calls_container), version_info_updater_(this) {
DCHECK(js_calls_container);
ash::TabletMode::Get()->AddObserver(this);
ash::BindCrosDisplayConfigController(
cros_display_config_.BindNewPipeAndPassReceiver());
OobeConfiguration::Get()->AddAndFireObserver(this);
}
CoreOobeHandler::~CoreOobeHandler() {
OobeConfiguration::Get()->RemoveObserver(this);
// Ash may be released before us.
if (ash::TabletMode::Get())
ash::TabletMode::Get()->RemoveObserver(this);
}
void CoreOobeHandler::DeclareLocalizedValues(
::login::LocalizedValuesBuilder* builder) {
builder->Add("title", IDS_SHORT_PRODUCT_NAME);
builder->Add("productName", IDS_SHORT_PRODUCT_NAME);
builder->Add("learnMore", IDS_LEARN_MORE);
// Strings for Asset Identifier shown in version string.
builder->Add("assetIdLabel", IDS_OOBE_ASSET_ID_LABEL);
builder->AddF("missingAPIKeysNotice", IDS_LOGIN_API_KEYS_NOTICE,
base::ASCIIToUTF16(google_apis::kAPIKeysDevelopersHowToURL));
builder->Add("playAnimationAriaLabel", IDS_OOBE_PLAY_ANIMATION_MESSAGE);
builder->Add("pauseAnimationAriaLabel", IDS_OOBE_PAUSE_ANIMATION_MESSAGE);
}
void CoreOobeHandler::Initialize() {
UpdateOobeUIVisibility();
#if BUILDFLAG(GOOGLE_CHROME_BRANDING)
version_info_updater_.StartUpdate(true);
#else
version_info_updater_.StartUpdate(false);
#endif
UpdateKeyboardState();
UpdateClientAreaSize(
display::Screen::GetScreen()->GetPrimaryDisplay().size());
}
void CoreOobeHandler::GetAdditionalParameters(base::DictionaryValue* dict) {
dict->SetKey("isInTabletMode",
base::Value(ash::TabletMode::Get()->InTabletMode()));
dict->SetKey("isDemoModeEnabled",
base::Value(DemoSetupController::IsDemoModeAllowed()));
if (policy::EnrollmentRequisitionManager::IsRemoraRequisition()) {
dict->SetKey("flowType", base::Value("meet"));
}
dict->SetKey("isQuickStartEnabled",
base::Value(ash::features::IsOobeQuickStartEnabled()));
}
void CoreOobeHandler::RegisterMessages() {
AddCallback("screenStateInitialize", &CoreOobeHandler::HandleInitialized);
AddCallback("updateCurrentScreen",
&CoreOobeHandler::HandleUpdateCurrentScreen);
AddCallback("skipToLoginForTesting",
&CoreOobeHandler::HandleSkipToLoginForTesting);
AddCallback("launchHelpApp", &CoreOobeHandler::HandleLaunchHelpApp);
AddCallback("toggleResetScreen", &CoreOobeHandler::HandleToggleResetScreen);
AddCallback("raiseTabKeyEvent", &CoreOobeHandler::HandleRaiseTabKeyEvent);
// Note: Used by enterprise_RemoraRequisitionDisplayUsage.py:
// TODO(felixe): Use chrome.system.display or cros_display_config.mojom,
// https://crbug.com/858958.
AddRawCallback("getPrimaryDisplayNameForTesting",
&CoreOobeHandler::HandleGetPrimaryDisplayNameForTesting);
AddCallback("startDemoModeSetupForTesting",
&CoreOobeHandler::HandleStartDemoModeSetupForTesting);
AddCallback("updateOobeUIState", &CoreOobeHandler::HandleUpdateOobeUIState);
AddCallback("enableShelfButtons", &CoreOobeHandler::HandleEnableShelfButtons);
}
void CoreOobeHandler::FocusReturned(bool reverse) {
CallJS("cr.ui.Oobe.focusReturned", reverse);
}
void CoreOobeHandler::ReloadContent(const base::DictionaryValue& dictionary) {
CallJS("cr.ui.Oobe.reloadContent", dictionary);
}
void CoreOobeHandler::SetVirtualKeyboardShown(bool shown) {
CallJS("cr.ui.Oobe.setVirtualKeyboardShown", shown);
}
void CoreOobeHandler::SetShelfHeight(int height) {
// TODO(crbug.com/1180291) - Remove once OOBE JS calls are fixed.
if (IsSafeToCallJavascript()) {
CallJS("cr.ui.Oobe.setShelfHeight", height);
} else {
LOG(ERROR) << "Silently dropping SetShelfHeight request.";
}
}
void CoreOobeHandler::SetOrientation(bool is_horizontal) {
// TODO(crbug.com/1180291) - Remove once OOBE JS calls are fixed.
if (IsSafeToCallJavascript()) {
CallJS("cr.ui.Oobe.setOrientation", is_horizontal);
} else {
LOG(ERROR) << "Silently dropping SetOrientation request.";
}
}
void CoreOobeHandler::SetDialogSize(int width, int height) {
// TODO(crbug.com/1180291) - Remove once OOBE JS calls are fixed.
if (IsSafeToCallJavascript()) {
CallJS("cr.ui.Oobe.setDialogSize", width, height);
} else {
LOG(ERROR) << "Silently dropping SetDialogSize request.";
}
}
void CoreOobeHandler::HandleInitialized() {
VLOG(3) << "CoreOobeHandler::HandleInitialized";
AllowJavascript();
GetOobeUI()->InitializeHandlers();
}
void CoreOobeHandler::HandleUpdateCurrentScreen(
const std::string& screen_name) {
const OobeScreenId screen(screen_name);
GetOobeUI()->CurrentScreenChanged(screen);
ash::EventRewriterController::Get()->SetArrowToTabRewritingEnabled(
screen == EulaView::kScreenId);
}
void CoreOobeHandler::HandleEnableShelfButtons(bool enable) {
if (LoginDisplayHost::default_host())
LoginDisplayHost::default_host()->SetShelfButtonsEnabled(enable);
}
void CoreOobeHandler::HandleSkipToLoginForTesting() {
WizardController* controller = WizardController::default_controller();
if (controller && controller->is_initialized())
WizardController::default_controller()->SkipToLoginForTesting();
}
void CoreOobeHandler::HandleToggleResetScreen() {
base::OnceCallback<void(bool, absl::optional<tpm_firmware_update::Mode>)>
callback =
base::BindOnce(&CoreOobeHandler::HandleToggleResetScreenCallback,
weak_ptr_factory_.GetWeakPtr());
ResetScreen::CheckIfPowerwashAllowed(std::move(callback));
}
void CoreOobeHandler::HandleToggleResetScreenCallback(
bool is_reset_allowed,
absl::optional<tpm_firmware_update::Mode> tpm_firmware_update_mode) {
if (!is_reset_allowed)
return;
if (tpm_firmware_update_mode.has_value()) {
// Force the TPM firmware update option to be enabled.
g_browser_process->local_state()->SetInteger(
prefs::kFactoryResetTPMFirmwareUpdateMode,
static_cast<int>(tpm_firmware_update_mode.value()));
}
DCHECK(LoginDisplayHost::default_host());
LoginDisplayHost::default_host()->StartWizard(ResetView::kScreenId);
}
void CoreOobeHandler::ShowOobeUI(bool show) {
if (show == show_oobe_ui_)
return;
show_oobe_ui_ = show;
if (page_is_ready())
UpdateOobeUIVisibility();
}
void CoreOobeHandler::SetLoginUserCount(int user_count) {
CallJS("cr.ui.Oobe.setLoginUserCount", user_count);
}
void CoreOobeHandler::ForwardAccelerator(std::string accelerator_name) {
CallJS("cr.ui.Oobe.handleAccelerator", accelerator_name);
}
void CoreOobeHandler::UpdateOobeUIVisibility() {
const std::string& display = GetOobeUI()->display_type();
bool has_api_keys_configured = google_apis::HasAPIKeyConfigured() &&
google_apis::HasOAuthClientConfigured();
CallJS("cr.ui.Oobe.showAPIKeysNotice",
!has_api_keys_configured && (display == OobeUI::kOobeDisplay ||
display == OobeUI::kLoginDisplay));
// Don't show version label on the stable channel by default.
bool should_show_version = true;
version_info::Channel channel = chrome::GetChannel();
if (channel == version_info::Channel::STABLE ||
channel == version_info::Channel::BETA) {
should_show_version = false;
}
CallJS("cr.ui.Oobe.showVersion", should_show_version);
CallJS("cr.ui.Oobe.showOobeUI", show_oobe_ui_);
if (system::InputDeviceSettings::Get()->ForceKeyboardDrivenUINavigation())
CallJS("cr.ui.Oobe.enableKeyboardFlow", true);
}
void CoreOobeHandler::OnOSVersionLabelTextUpdated(
const std::string& os_version_label_text) {
UpdateLabel("version", os_version_label_text);
}
void CoreOobeHandler::OnEnterpriseInfoUpdated(const std::string& message_text,
const std::string& asset_id) {
// Not relevant in OOBE mode.
}
void CoreOobeHandler::OnDeviceInfoUpdated(const std::string& bluetooth_name) {
CallJS("cr.ui.Oobe.setBluetoothDeviceInfo", bluetooth_name);
}
ui::EventSink* CoreOobeHandler::GetEventSink() {
return ash::Shell::GetPrimaryRootWindow()->GetHost()->GetEventSink();
}
void CoreOobeHandler::UpdateLabel(const std::string& id,
const std::string& text) {
// TODO(crbug.com/1180291) - Remove once OOBE JS calls are fixed.
if (IsSafeToCallJavascript()) {
CallJS("cr.ui.Oobe.setLabelText", id, text);
} else {
LOG(ERROR) << "Silently dropping UpdateLabel request.";
}
}
void CoreOobeHandler::UpdateKeyboardState() {
const bool is_keyboard_shown =
ChromeKeyboardControllerClient::Get()->is_keyboard_visible();
SetVirtualKeyboardShown(is_keyboard_shown);
}
void CoreOobeHandler::OnTabletModeStarted() {
CallJS("cr.ui.Oobe.setTabletModeState", true);
}
void CoreOobeHandler::OnTabletModeEnded() {
CallJS("cr.ui.Oobe.setTabletModeState", false);
}
void CoreOobeHandler::UpdateClientAreaSize(const gfx::Size& size) {
SetShelfHeight(ash::ShelfConfig::Get()->shelf_size());
const gfx::Size display_size =
display::Screen::GetScreen()->GetPrimaryDisplay().size();
const bool is_horizontal = display_size.width() > display_size.height();
SetOrientation(is_horizontal);
const gfx::Size dialog_size = CalculateOobeDialogSize(
size, ash::ShelfConfig::Get()->shelf_size(), is_horizontal);
SetDialogSize(dialog_size.width(), dialog_size.height());
}
void CoreOobeHandler::OnOobeConfigurationChanged() {
base::Value configuration(base::Value::Type::DICTIONARY);
configuration::FilterConfiguration(
OobeConfiguration::Get()->GetConfiguration(),
configuration::ConfigurationHandlerSide::HANDLER_JS, configuration);
CallJS("cr.ui.Oobe.updateOobeConfiguration", configuration);
}
void CoreOobeHandler::HandleLaunchHelpApp(double help_topic_id) {
if (!help_app_.get())
help_app_ = new HelpAppLauncher(
LoginDisplayHost::default_host()->GetNativeWindow());
help_app_->ShowHelpTopic(
static_cast<HelpAppLauncher::HelpTopic>(help_topic_id));
}
void CoreOobeHandler::HandleRaiseTabKeyEvent(bool reverse) {
ui::KeyEvent event(ui::ET_KEY_PRESSED, ui::VKEY_TAB, ui::EF_NONE);
if (reverse)
event.set_flags(ui::EF_SHIFT_DOWN);
SendEventToSink(&event);
}
void CoreOobeHandler::HandleGetPrimaryDisplayNameForTesting(
const base::ListValue* args) {
CHECK_EQ(1U, args->GetListDeprecated().size());
const base::Value& callback_id = args->GetListDeprecated()[0];
cros_display_config_->GetDisplayUnitInfoList(
false /* single_unified */,
base::BindOnce(&CoreOobeHandler::GetPrimaryDisplayNameCallback,
weak_ptr_factory_.GetWeakPtr(), callback_id.Clone()));
}
void CoreOobeHandler::GetPrimaryDisplayNameCallback(
const base::Value& callback_id,
std::vector<ash::mojom::DisplayUnitInfoPtr> info_list) {
AllowJavascript();
std::string display_name;
for (const ash::mojom::DisplayUnitInfoPtr& info : info_list) {
if (info->is_primary) {
display_name = info->name;
break;
}
}
DCHECK(!display_name.empty());
ResolveJavascriptCallback(callback_id, base::Value(display_name));
}
void CoreOobeHandler::HandleStartDemoModeSetupForTesting(
const std::string& demo_config) {
DemoSession::DemoModeConfig config;
if (demo_config == "online") {
config = DemoSession::DemoModeConfig::kOnline;
} else if (demo_config == "offline") {
config = DemoSession::DemoModeConfig::kOffline;
} else {
NOTREACHED() << "Unknown demo config passed for tests";
}
WizardController* wizard_controller = WizardController::default_controller();
if (wizard_controller && !wizard_controller->login_screen_started()) {
wizard_controller->SimulateDemoModeSetupForTesting(config);
wizard_controller->AdvanceToScreen(DemoSetupScreenView::kScreenId);
}
}
void CoreOobeHandler::HandleUpdateOobeUIState(int state) {
if (LoginDisplayHost::default_host()) {
auto dialog_state = static_cast<ash::OobeDialogState>(state);
LoginDisplayHost::default_host()->UpdateOobeDialogState(dialog_state);
}
}
} // namespace chromeos
| {
"content_hash": "ba151b8e212e68deea63d233195da048",
"timestamp": "",
"source": "github",
"line_count": 394,
"max_line_length": 80,
"avg_line_length": 37.78934010152284,
"alnum_prop": 0.7335616898381355,
"repo_name": "scheib/chromium",
"id": "148ebe93aeb00f4814a0f8cdaf03d32f33181897",
"size": "14889",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "chrome/browser/ui/webui/chromeos/login/core_oobe_handler.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
'use strict';
describe('Controller: RegisterCtrl', function () {
// load the controller's module
beforeEach(module('infoApp'));
var RegisterCtrl,
scope;
// Initialize the controller and a mock scope
beforeEach(inject(function ($controller, $rootScope) {
scope = $rootScope.$new();
RegisterCtrl = $controller('RegisterCtrl', {
$scope: scope
// place here mocked dependencies
});
}));
it('should attach a list of awesomeThings to the scope', function () {
expect(RegisterCtrl.awesomeThings.length).toBe(3);
});
});
| {
"content_hash": "0d74b4de23b0779be9cbf168cc4e04bb",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 72,
"avg_line_length": 24.608695652173914,
"alnum_prop": 0.6625441696113075,
"repo_name": "ZhiRuYan/info",
"id": "ded405bcfd1e8defba0857ddd3da0f63ac1f7730",
"size": "566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/spec/controllers/register.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "382576"
},
{
"name": "HTML",
"bytes": "29072"
},
{
"name": "JavaScript",
"bytes": "73609"
}
],
"symlink_target": ""
} |
package org.datavec.api.records.reader.impl;
import org.datavec.api.records.SequenceRecord;
import org.datavec.api.records.metadata.RecordMetaData;
import org.datavec.api.records.reader.SequenceRecordReader;
import org.datavec.api.records.reader.impl.csv.CSVSequenceRecordReader;
import org.datavec.api.split.InputSplit;
import org.datavec.api.split.NumberedFileInputSplit;
import org.datavec.api.util.ClassPathResource;
import org.datavec.api.writable.Writable;
import org.datavec.api.writable.WritableType;
import org.junit.Test;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import static org.junit.Assert.assertEquals;
public class CSVSequenceRecordReaderTest {
@Test
public void test() throws Exception {
CSVSequenceRecordReader seqReader = new CSVSequenceRecordReader(1, ",");
seqReader.initialize(new TestInputSplit());
int sequenceCount = 0;
while (seqReader.hasNext()) {
List<List<Writable>> sequence = seqReader.sequenceRecord();
assertEquals(4, sequence.size()); //4 lines, plus 1 header line
Iterator<List<Writable>> timeStepIter = sequence.iterator();
int lineCount = 0;
while (timeStepIter.hasNext()) {
List<Writable> timeStep = timeStepIter.next();
assertEquals(3, timeStep.size());
Iterator<Writable> lineIter = timeStep.iterator();
int countInLine = 0;
while (lineIter.hasNext()) {
Writable entry = lineIter.next();
int expValue = 100 * sequenceCount + 10 * lineCount + countInLine;
assertEquals(String.valueOf(expValue), entry.toString());
countInLine++;
}
lineCount++;
}
sequenceCount++;
}
}
@Test
public void testReset() throws Exception {
CSVSequenceRecordReader seqReader = new CSVSequenceRecordReader(1, ",");
seqReader.initialize(new TestInputSplit());
int nTests = 5;
for (int i = 0; i < nTests; i++) {
seqReader.reset();
int sequenceCount = 0;
while (seqReader.hasNext()) {
List<List<Writable>> sequence = seqReader.sequenceRecord();
assertEquals(4, sequence.size()); //4 lines, plus 1 header line
Iterator<List<Writable>> timeStepIter = sequence.iterator();
int lineCount = 0;
while (timeStepIter.hasNext()) {
timeStepIter.next();
lineCount++;
}
sequenceCount++;
assertEquals(4, lineCount);
}
assertEquals(3, sequenceCount);
}
}
@Test
public void testMetaData() throws Exception {
CSVSequenceRecordReader seqReader = new CSVSequenceRecordReader(1, ",");
seqReader.initialize(new TestInputSplit());
List<List<List<Writable>>> l = new ArrayList<>();
while (seqReader.hasNext()) {
List<List<Writable>> sequence = seqReader.sequenceRecord();
assertEquals(4, sequence.size()); //4 lines, plus 1 header line
Iterator<List<Writable>> timeStepIter = sequence.iterator();
int lineCount = 0;
while (timeStepIter.hasNext()) {
timeStepIter.next();
lineCount++;
}
assertEquals(4, lineCount);
l.add(sequence);
}
List<SequenceRecord> l2 = new ArrayList<>();
List<RecordMetaData> meta = new ArrayList<>();
seqReader.reset();
while (seqReader.hasNext()) {
SequenceRecord sr = seqReader.nextSequence();
l2.add(sr);
meta.add(sr.getMetaData());
}
assertEquals(3, l2.size());
List<SequenceRecord> fromMeta = seqReader.loadSequenceFromMetaData(meta);
for (int i = 0; i < 3; i++) {
assertEquals(l.get(i), l2.get(i).getSequenceRecord());
assertEquals(l.get(i), fromMeta.get(i).getSequenceRecord());
}
}
private static class TestInputSplit implements InputSplit {
@Override
public long length() {
return 3;
}
@Override
public URI[] locations() {
URI[] arr = new URI[3];
try {
arr[0] = new ClassPathResource("csvsequence_0.txt").getFile().toURI();
arr[1] = new ClassPathResource("csvsequence_1.txt").getFile().toURI();
arr[2] = new ClassPathResource("csvsequence_2.txt").getFile().toURI();
} catch (Exception e) {
throw new RuntimeException(e);
}
return arr;
}
@Override
public Iterator<URI> locationsIterator() {
return Arrays.asList(locations()).iterator();
}
@Override
public Iterator<String> locationsPathIterator() {
URI[] loc = locations();
String[] arr = new String[loc.length];
for (int i = 0; i < loc.length; i++) {
arr[i] = loc[i].toString();
}
return Arrays.asList(arr).iterator();
}
@Override
public void reset() {
//No op
}
@Override
public void write(DataOutput out) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void readFields(DataInput in) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void writeType(DataOutput out) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public double toDouble() {
throw new UnsupportedOperationException();
}
@Override
public float toFloat() {
throw new UnsupportedOperationException();
}
@Override
public int toInt() {
throw new UnsupportedOperationException();
}
@Override
public long toLong() {
throw new UnsupportedOperationException();
}
@Override
public WritableType getType() {
throw new UnsupportedOperationException();
}
}
@Test
public void testCsvSeqAndNumberedFileSplit() throws Exception {
//Simple sanity check unit test
for (int i = 0; i < 3; i++) {
new org.nd4j.linalg.io.ClassPathResource(String.format("csvsequence_%d.txt", i)).getTempFileFromArchive();
}
//Load time series from CSV sequence files; compare to SequenceRecordReaderDataSetIterator
org.nd4j.linalg.io.ClassPathResource resource = new org.nd4j.linalg.io.ClassPathResource("csvsequence_0.txt");
String featuresPath = resource.getTempFileFromArchive().getAbsolutePath().replaceAll("0", "%d");
SequenceRecordReader featureReader = new CSVSequenceRecordReader(1, ",");
featureReader.initialize(new NumberedFileInputSplit(featuresPath, 0, 2));
while(featureReader.hasNext()){
featureReader.nextSequence();
}
}
}
| {
"content_hash": "1d58ee0ffce974fe592cb2e8fbba81ad",
"timestamp": "",
"source": "github",
"line_count": 225,
"max_line_length": 118,
"avg_line_length": 32.99111111111111,
"alnum_prop": 0.5856122861376802,
"repo_name": "huitseeker/DataVec",
"id": "e3fca263a0f0ae89e078eca19aacff790298f90a",
"size": "8086",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "datavec-api/src/test/java/org/datavec/api/records/reader/impl/CSVSequenceRecordReaderTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "FreeMarker",
"bytes": "28528"
},
{
"name": "Java",
"bytes": "3898724"
},
{
"name": "Shell",
"bytes": "10037"
}
],
"symlink_target": ""
} |
<!doctype html public "-//W3C//DTD HTML 4.0 Transitional//EN" "http://www.w3.org/TR/REC-html40/loose.dtd">
<html>
<head>
<title>PHPXRef 0.7.1 : Unnamed Project : Function Reference: form_prep()</title>
<link rel="stylesheet" href="../sample.css" type="text/css">
<link rel="stylesheet" href="../sample-print.css" type="text/css" media="print">
<style id="hilight" type="text/css"></style>
<meta http-equiv="content-type" content="text/html;charset=iso-8859-1">
</head>
<body bgcolor="#ffffff" text="#000000" link="#801800" vlink="#300540" alink="#ffffff">
<table class="pagetitle" width="100%">
<tr>
<td valign="top" class="pagetitle">
[ <a href="../index.html">Index</a> ]
</td>
<td align="right" class="pagetitle">
<h2 style="margin-bottom: 0px">PHP Cross Reference of Unnamed Project</h2>
</td>
</tr>
</table>
<!-- Generated by PHPXref 0.7.1 at Thu Oct 23 19:31:09 2014 -->
<!-- PHPXref (c) 2000-2010 Gareth Watts - gareth@omnipotent.net -->
<!-- http://phpxref.sourceforge.net/ -->
<script src="../phpxref.js" type="text/javascript"></script>
<script language="JavaScript" type="text/javascript">
<!--
ext='.html';
relbase='../';
subdir='_functions';
filename='index.html';
cookiekey='phpxref';
handleNavFrame(relbase, subdir, filename);
logFunction('form_prep');
// -->
</script>
<script language="JavaScript" type="text/javascript">
if (gwGetCookie('xrefnav')=='off')
document.write('<p class="navlinks">[ <a href="javascript:navOn()">Show Explorer<\/a> ]<\/p>');
else
document.write('<p class="navlinks">[ <a href="javascript:navOff()">Hide Explorer<\/a> ]<\/p>');
</script>
<noscript>
<p class="navlinks">
[ <a href="../nav.html" target="_top">Show Explorer</a> ]
[ <a href="index.html" target="_top">Hide Navbar</a> ]
</p>
</noscript>
[<a href="../index.html">Top level directory</a>]<br>
<script language="JavaScript" type="text/javascript">
<!--
document.writeln('<table align="right" class="searchbox-link"><tr><td><a class="searchbox-link" href="javascript:void(0)" onMouseOver="showSearchBox()">Search</a><br>');
document.writeln('<table border="0" cellspacing="0" cellpadding="0" class="searchbox" id="searchbox">');
document.writeln('<tr><td class="searchbox-title">');
document.writeln('<a class="searchbox-title" href="javascript:showSearchPopup()">Search History +</a>');
document.writeln('<\/td><\/tr>');
document.writeln('<tr><td class="searchbox-body" id="searchbox-body">');
document.writeln('<form name="search" style="margin:0px; padding:0px" onSubmit=\'return jump()\'>');
document.writeln('<a class="searchbox-body" href="../_classes/index.html">Class<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="classname"><br>');
document.writeln('<a id="funcsearchlink" class="searchbox-body" href="../_functions/index.html">Function<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="funcname"><br>');
document.writeln('<a class="searchbox-body" href="../_variables/index.html">Variable<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="varname"><br>');
document.writeln('<a class="searchbox-body" href="../_constants/index.html">Constant<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="constname"><br>');
document.writeln('<a class="searchbox-body" href="../_tables/index.html">Table<\/a>: ');
document.writeln('<input type="text" size=10 value="" name="tablename"><br>');
document.writeln('<input type="submit" class="searchbox-button" value="Search">');
document.writeln('<\/form>');
document.writeln('<\/td><\/tr><\/table>');
document.writeln('<\/td><\/tr><\/table>');
// -->
</script>
<div id="search-popup" class="searchpopup"><p id="searchpopup-title" class="searchpopup-title">title</p><div id="searchpopup-body" class="searchpopup-body">Body</div><p class="searchpopup-close"><a href="javascript:gwCloseActive()">[close]</a></p></div>
<h3>Function and Method Cross Reference</h3>
<h2><a href="index.html#form_prep">form_prep()</a></h2>
<b>Defined at:</b><ul>
<li><a href="../system/helpers/form_helper.php.html#form_prep">/system/helpers/form_helper.php</a> -> <a onClick="logFunction('form_prep', '/system/helpers/form_helper.php.source.html#l606')" href="../system/helpers/form_helper.php.source.html#l606"> line 606</a></li>
</ul>
<b>Referenced 6 times:</b><ul>
<li><a href="../system/helpers/form_helper.php.html">/system/helpers/form_helper.php</a> -> <a href="../system/helpers/form_helper.php.source.html#l147"> line 147</a></li>
<li><a href="../system/helpers/form_helper.php.html">/system/helpers/form_helper.php</a> -> <a href="../system/helpers/form_helper.php.source.html#l265"> line 265</a></li>
<li><a href="../system/helpers/form_helper.php.html">/system/helpers/form_helper.php</a> -> <a href="../system/helpers/form_helper.php.source.html#l626"> line 626</a></li>
<li><a href="../system/helpers/form_helper.php.html">/system/helpers/form_helper.php</a> -> <a href="../system/helpers/form_helper.php.source.html#l684"> line 684</a></li>
<li><a href="../system/helpers/form_helper.php.html">/system/helpers/form_helper.php</a> -> <a href="../system/helpers/form_helper.php.source.html#l687"> line 687</a></li>
<li><a href="../system/helpers/form_helper.php.html">/system/helpers/form_helper.php</a> -> <a href="../system/helpers/form_helper.php.source.html#l948"> line 948</a></li>
</ul>
<!-- A link to the phpxref site in your customized footer file is appreciated ;-) -->
<br><hr>
<table width="100%">
<tr><td>Generated: Thu Oct 23 19:31:09 2014</td>
<td align="right"><i>Cross-referenced by <a href="http://phpxref.sourceforge.net/">PHPXref 0.7.1</a></i></td>
</tr>
</table>
</body></html>
| {
"content_hash": "a95761819657a38a3fca275fb793c661",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 269,
"avg_line_length": 56.53465346534654,
"alnum_prop": 0.670753064798599,
"repo_name": "inputx/code-ref-doc",
"id": "0b6c0e98db7802ed4e7f8afd7e91c22a5f31588a",
"size": "5710",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "codeigniter/_functions/form_prep.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "17952"
},
{
"name": "JavaScript",
"bytes": "255489"
}
],
"symlink_target": ""
} |
<?php
namespace Nibbletech\Support\Facades;
use Illuminate\Support\Facades\Facade;
class Feedback extends Facade {
/**
* Get the registered name of the component.
*
* @return string
*/
protected static function getFacadeAccessor() { return 'feedback'; }
} | {
"content_hash": "d02040608b66b9a17e74581e489d6b6f",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 72,
"avg_line_length": 19.133333333333333,
"alnum_prop": 0.6794425087108014,
"repo_name": "NibbleTech/feedback-messages",
"id": "7a67d95da8bc29a38ae793c28b66e57cec71e27b",
"size": "287",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Nibbletech/Support/Facades/Feedback.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "15067"
}
],
"symlink_target": ""
} |
package com.navercorp.pinpoint.profiler.util;
import com.navercorp.pinpoint.bootstrap.util.StringUtils;
import com.navercorp.pinpoint.thrift.dto.*;
import org.apache.thrift.TBase;
/**
* @author emeroad
*/
public final class AnnotationValueMapper {
private AnnotationValueMapper() {
}
public static void mappingValue(TAnnotation annotation, Object value) {
if (value == null) {
return;
}
if (value instanceof String) {
annotation.setValue(TAnnotationValue.stringValue((String) value));
return;
} else if (value instanceof Integer) {
annotation.setValue(TAnnotationValue.intValue((Integer) value));
return;
} else if (value instanceof Long) {
annotation.setValue(TAnnotationValue.longValue((Long) value));
return;
} else if (value instanceof Boolean) {
annotation.setValue(TAnnotationValue.boolValue((Boolean) value));
return;
} else if (value instanceof Byte) {
annotation.setValue(TAnnotationValue.byteValue((Byte) value));
return;
} else if (value instanceof Float) {
// thrift does not contain "float" type
annotation.setValue(TAnnotationValue.doubleValue((Float) value));
return;
} else if (value instanceof Double) {
annotation.setValue(TAnnotationValue.doubleValue((Double) value));
return;
} else if (value instanceof byte[]) {
annotation.setValue(TAnnotationValue.binaryValue((byte[]) value));
return;
} else if (value instanceof Short) {
annotation.setValue(TAnnotationValue.shortValue((Short) value));
return;
} else if (value instanceof TBase) {
throw new IllegalArgumentException("TBase not supported. Class:" + value.getClass());
}
String str = StringUtils.abbreviate(value.toString());
annotation.setValue(TAnnotationValue.stringValue(str));
}
}
| {
"content_hash": "8e8ba1318bdb8d400bb15d55df1bae3a",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 97,
"avg_line_length": 35.48275862068966,
"alnum_prop": 0.6282798833819242,
"repo_name": "sjmittal/pinpoint",
"id": "af14b5282e90b627f0eb0f7076dddeadb84063ce",
"size": "2652",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "profiler/src/main/java/com/navercorp/pinpoint/profiler/util/AnnotationValueMapper.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "22853"
},
{
"name": "CSS",
"bytes": "148240"
},
{
"name": "CoffeeScript",
"bytes": "10124"
},
{
"name": "Groovy",
"bytes": "1423"
},
{
"name": "HTML",
"bytes": "627927"
},
{
"name": "Java",
"bytes": "9799087"
},
{
"name": "JavaScript",
"bytes": "4822318"
},
{
"name": "Makefile",
"bytes": "5246"
},
{
"name": "PLSQL",
"bytes": "4156"
},
{
"name": "Python",
"bytes": "3523"
},
{
"name": "Ruby",
"bytes": "943"
},
{
"name": "Shell",
"bytes": "30663"
},
{
"name": "Thrift",
"bytes": "9043"
}
],
"symlink_target": ""
} |
var Renderer = Class.create(DisplayObject, {
initialize: function($super, staticCanvas, dynamicCanvas) {
$super();
this.staticCanvas = staticCanvas;
this.dynamicCanvas = dynamicCanvas;
this.staticContext = this.staticCanvas.getContext('2d');
this.dynamicContext = this.dynamicCanvas.getContext('2d');
this.initField();
this.timeoutID = null;
this.isAnimated = false;
//this.staticImageData = null;
},
initField: function() {
this.field = new Field();
this.field.parent = this;
this.field.x = 64;
this.field.y = Brick.SIZE;
this.field.setup();
},
initializeHTMLInterface: function() {},
debug: function() {
this.field.debugMode = !this.field.debugMode;
},
startRender: function() {
if (!this.isAnimated) {
this.isAnimated = true;
this.animate();
}
},
stopRender: function() {
this.isAnimated = false;
},
quit: function() {
this.stopRender();
this.field.stopBox2D();
if (this.timeoutID) {
clearTimeout(this.timeoutID);
this.timeoutID = null;
}
},
init: function() {
this.startRender();
},
onBallExit: function() {
this.field.stopBox2D();
},
clearCanvas: function(canvas) {
var context = canvas.getContext('2d');
context.clearRect(0, 0, canvas.width, canvas.height);
context.beginPath();
},
animate: function() {
if (this.isAnimated) {
var myScope = this;
requestAnimFrame(function() {
myScope.animate();
});
}
this.draw();
},
draw: function() {
this.drawDynamics();
this.drawStatics();
// this.staticContext.putImageData(this.staticImageData, 0, 0);
this.dynamicContext.getImageData(0, 0, 1, 1);
},
drawStatics: function() {
if (this.field.renderNew) {
this.staticContext.save();
this.clearCanvas(this.staticCanvas);
this.staticContext.translate(0.5, 0.5);
this.field.drawStatics(this.staticContext);
//this.staticImageData = this.staticContext.getImageData(0, 0, this.staticCanvas.width, this.staticCanvas.height);
this.staticContext.restore();
}
},
drawDynamics: function() {
this.dynamicContext.save();
this.clearDynamicCanvas();
this.dynamicContext.translate(0.5, 0.5);
this.field.drawDynamics(this.dynamicContext);
if (this.field.debugMode) {
this.field.draw(this.dynamicContext);
}
this.dynamicContext.restore();
},
clearDynamicCanvas: function() {
this.dynamicContext.clearRectangles();
this.dynamicContext.clearRects = [];
}
}); | {
"content_hash": "39c465049e0355f479f71293025b4e59",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 122,
"avg_line_length": 18.72,
"alnum_prop": 0.5886752136752137,
"repo_name": "MathiasPaumgarten/marblerun",
"id": "76fe31352bd923c4c21854fb3adbbd42d4c75b39",
"size": "2808",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public/javascripts/marblerun/renderer.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14989"
},
{
"name": "HTML",
"bytes": "28342"
},
{
"name": "JavaScript",
"bytes": "131382"
},
{
"name": "Ruby",
"bytes": "39584"
}
],
"symlink_target": ""
} |
package com.mikepenz.materialdrawer.holder;
import android.support.annotation.StringRes;
/**
* Created by mikepenz on 13.07.15.
*/
public class StringHolder extends com.mikepenz.materialize.holder.StringHolder {
public StringHolder(CharSequence text) {
super(text);
}
public StringHolder(@StringRes int textRes) {
super(textRes);
}
}
| {
"content_hash": "e175cf685c27fbe00b1ae1065ebeb82b",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 80,
"avg_line_length": 23.1875,
"alnum_prop": 0.7115902964959568,
"repo_name": "yunarta/MaterialDrawer",
"id": "60f3a40d870f681340436ee0ea6c65a1c30e95db",
"size": "371",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "library/src/main/java/com/mikepenz/materialdrawer/holder/StringHolder.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "516690"
},
{
"name": "Ruby",
"bytes": "25"
}
],
"symlink_target": ""
} |
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/component/item/quest_item/shared_signal_rerouter.iff"
result.attribute_template_id = -1
result.stfName("craft_item_ingredients_n","signal_rerouter")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | {
"content_hash": "c2b9ccb9b6eb54cf3290786a793d01e6",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 89,
"avg_line_length": 26.153846153846153,
"alnum_prop": 0.7147058823529412,
"repo_name": "anhstudios/swganh",
"id": "71c87990042a74d53384f9e237060a801bb501f5",
"size": "485",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/tangible/component/item/quest_item/shared_signal_rerouter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
} |
/**
* @author Kobi
* @date 2020/5/12
*/
describe("test node.multilayer.icon.arrow", function () {
/**
* test_author_kobi
**/
it("doRedMark 和 unRedMark", function () {
var widget = BI.Test.createWidget({
type: "bi.multilayer_icon_arrow_node",
text: "要标红的AAA",
layer: 3,
});
expect(widget.isOnce()).to.equal(true);
widget.doRedMark("AAA");
expect(widget.element.find(".bi-keyword-red-mark").length).to.not.equal(0);
widget.unRedMark();
expect(widget.element.find(".bi-keyword-red-mark").length).to.equal(0);
widget.destroy();
});
/**
* test_author_kobi
**/
it("isSelected 和 setSelected", function () {
var widget = BI.Test.createWidget({
type: "bi.multilayer_icon_arrow_node",
text: "AAA",
layer: 3,
});
widget.setSelected(true);
expect(widget.element.find(".active").length).to.not.equal(0);
expect(widget.isSelected()).to.equal(true);
widget.destroy();
});
/**
* test_author_kobi
**/
it("doClick", function (done) {
var widget = BI.Test.createWidget({
type: "bi.multilayer_icon_arrow_node",
text: "AAA",
layer: 3,
});
BI.nextTick(function () {
widget.element.click();
expect(widget.isSelected()).to.equal(true);
widget.destroy();
done();
});
});
/**
* test_author_kobi
**/
it("点击图标", function (done) {
var widget = BI.Test.createWidget({
type: "bi.multilayer_icon_arrow_node",
text: "AAA",
layer: 3,
});
BI.nextTick(function () {
widget.node.element.click();
expect(widget.element.find(".expander-down-font").length).to.not.equal(0);
widget.destroy();
done();
});
});
});
| {
"content_hash": "da4413b84a99de6608f0f0139c55a95a",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 86,
"avg_line_length": 27.28767123287671,
"alnum_prop": 0.5,
"repo_name": "fanruan/fineui",
"id": "c060e5499f473bcef2237eb14c419ceb4fc6d0ad",
"size": "2012",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/case/button/node/__test__/node.multilayer.icon.arrow.test.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "24"
},
{
"name": "HTML",
"bytes": "615"
},
{
"name": "JavaScript",
"bytes": "3472897"
},
{
"name": "Less",
"bytes": "218501"
},
{
"name": "Shell",
"bytes": "24"
},
{
"name": "TypeScript",
"bytes": "164533"
}
],
"symlink_target": ""
} |
// See http://www.boost.org/libs/test for the library home page.
//
// File : $RCSfile$
//
// Version : $Revision$
//
// Description : included (vs. linked ) version of Program Execution Monitor
// ***************************************************************************
#ifndef BOOST_INCLUDED_PRG_EXEC_MONITOR_HPP_071894GER
#define BOOST_INCLUDED_PRG_EXEC_MONITOR_HPP_071894GER
#include <boost/test/impl/execution_monitor.ipp>
#include <boost/test/impl/debug.ipp>
#include <boost/test/impl/cpp_main.ipp>
#define BOOST_TEST_INCLUDED
#include <boost/test/prg_exec_monitor.hpp>
#endif // BOOST_INCLUDED_PRG_EXEC_MONITOR_HPP_071894GER
| {
"content_hash": "6a855ef3571f15cc5d68aa98458bb8e1",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 78,
"avg_line_length": 32.19047619047619,
"alnum_prop": 0.6198224852071006,
"repo_name": "Franky666/programmiersprachen-raytracer",
"id": "828ab019b43a317b8cf925c05764a8c00a25686f",
"size": "886",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "external/boost_1_59_0/boost/test/included/prg_exec_monitor.hpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "905071"
},
{
"name": "C++",
"bytes": "46207"
},
{
"name": "CMake",
"bytes": "4419"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_75) on Tue May 19 17:15:49 PDT 2015 -->
<title>Uses of Class org.apache.hadoop.mapred.lib.TotalOrderPartitioner (Hadoop 2.6.0-mr1-cdh5.4.2 API)</title>
<meta name="date" content="2015-05-19">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.mapred.lib.TotalOrderPartitioner (Hadoop 2.6.0-mr1-cdh5.4.2 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/apache/hadoop/mapred/lib/TotalOrderPartitioner.html" title="class in org.apache.hadoop.mapred.lib">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/apache/hadoop/mapred/lib/class-use/TotalOrderPartitioner.html" target="_top">Frames</a></li>
<li><a href="TotalOrderPartitioner.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.apache.hadoop.mapred.lib.TotalOrderPartitioner" class="title">Uses of Class<br>org.apache.hadoop.mapred.lib.TotalOrderPartitioner</h2>
</div>
<div class="classUseContainer">No usage of org.apache.hadoop.mapred.lib.TotalOrderPartitioner</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/apache/hadoop/mapred/lib/TotalOrderPartitioner.html" title="class in org.apache.hadoop.mapred.lib">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/apache/hadoop/mapred/lib/class-use/TotalOrderPartitioner.html" target="_top">Frames</a></li>
<li><a href="TotalOrderPartitioner.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2009 The Apache Software Foundation</small></p>
</body>
</html>
| {
"content_hash": "1ee454531abbcbeda98ecef82a8f3265",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 163,
"avg_line_length": 38.5948275862069,
"alnum_prop": 0.6191646191646192,
"repo_name": "ZhangXFeng/hadoop",
"id": "262cdb9a25754f5c92d4bde153aaebdbfeaaded9",
"size": "4477",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "share/doc/hadoop-mapreduce1/api/org/apache/hadoop/mapred/lib/class-use/TotalOrderPartitioner.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "189381"
},
{
"name": "Batchfile",
"bytes": "215694"
},
{
"name": "C",
"bytes": "3575939"
},
{
"name": "C++",
"bytes": "2163041"
},
{
"name": "CMake",
"bytes": "100256"
},
{
"name": "CSS",
"bytes": "621096"
},
{
"name": "HTML",
"bytes": "96504707"
},
{
"name": "Java",
"bytes": "111573402"
},
{
"name": "JavaScript",
"bytes": "228374"
},
{
"name": "Makefile",
"bytes": "7278"
},
{
"name": "Objective-C",
"bytes": "118273"
},
{
"name": "PHP",
"bytes": "152555"
},
{
"name": "Perl",
"bytes": "187872"
},
{
"name": "Protocol Buffer",
"bytes": "561225"
},
{
"name": "Python",
"bytes": "1166492"
},
{
"name": "Ruby",
"bytes": "28485"
},
{
"name": "Shell",
"bytes": "912677"
},
{
"name": "Smalltalk",
"bytes": "56562"
},
{
"name": "TeX",
"bytes": "45082"
},
{
"name": "Thrift",
"bytes": "3965"
},
{
"name": "XSLT",
"bytes": "183042"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en-us">
<head>
<link href="http://gmpg.org/xfn/11" rel="profile">
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="description" content="startups, venture capital, big ideas">
<!-- Enable responsiveness on mobile devices-->
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1">
<title>
Are Big Ideas of the Past Now? · Afzal Jasani
</title>
<!-- CSS -->
<link rel="stylesheet" href="/public/css/poole.css">
<link rel="stylesheet" href="/public/css/syntax.css">
<link rel="stylesheet" href="/public/css/lanyon.css">
<link rel="stylesheet" href="http://fonts.googleapis.com/css?family=PT+Serif:400,400italic,700|PT+Sans:400">
<!-- Icons -->
<link rel="apple-touch-icon-precomposed" sizes="144x144" href="/public/apple-touch-icon-144-precomposed.png">
<link rel="shortcut icon" href="/public/favicon.ico">
<!-- RSS -->
<link rel="alternate" type="application/rss+xml" title="RSS" href="/atom.xml">
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-55017909-1', 'auto');
ga('send', 'pageview');
</script>
</head>
<body>
<!-- Target for toggling the sidebar `.sidebar-checkbox` is for regular
styles, `#sidebar-checkbox` for behavior. -->
<input type="checkbox" class="sidebar-checkbox" id="sidebar-checkbox">
<!-- Toggleable sidebar -->
<div class="sidebar" id="sidebar">
<div class="sidebar-item">
<p>Learning and exploring new things</p>
</div>
<nav class="sidebar-nav">
<a class="sidebar-nav-item" href="/">Home</a>
<a class="sidebar-nav-item" href="/about.html">About</a>
<a class="sidebar-nav-item" href="/archive.html">Archive</a>
<a class="sidebar-nav-item" href="/photos.html">Photos</a>
<a class="sidebar-nav-item" href="/projects.html">Projects</a>
<span class="sidebar-nav-item">Currently v2.0.0</span>
</nav>
<div class="sidebar-item">
<p>
© 2015. All rights reserved.
</p>
</div>
</div>
<!-- Wrap is the content to shift when toggling the sidebar. We wrap the
content to avoid any CSS collisions with our real content. -->
<div class="wrap">
<div class="masthead">
<div class="container">
<label for="sidebar-checkbox" class="sidebar-toggle"></label>
<h3 class="masthead-title">
<a href="/" title="Home">Afzal Jasani</a>
<small></small>
</h3>
</div>
</div>
<div class="container content">
<div class="post">
<h1 class="post-title">Are Big Ideas of the Past Now?</h1>
<span class="post-date">21 Jun 2013</span>
<p>As more and more individuals jump on the bangwagon of starting their own companies and pitching their ideas to VCs, the more dulled down ideas are starting to appear. Across the industry in different fields, growing competitors are hacking and improving but are dismissing the “big ideas.” It could be because most startups have a 99% fail rate, so why not play it safe and hack something that has already been done before. However, it seems that the growing number of startups are lacking the innovation and quailty they once had over 10 years ago. Chamath Palihapitiya, a venture capitalist and founder of The Social+Capital Partnership has stated the following: “We are at an absolute minimum in terms of things that are being started.” There are several sectors which could use more innovation and growth such as health care, education, and energy. But the lack of big ideas is making it a struggle for both investors and founders.</p>
<p>While looking back at the past and comparing it to the present might not be the best analogy, it does prove a point. VCs and founders are playing it safe to make minimal improvements to platforms which have been around since 2003. For huge revolutions, the ideas must stem from somewhere else, not the start up scene. It is not wrong to want bigger and better ideas, but given the circumstances we can not hope for something of the past to reappear.</p>
<p>Instead maybe we should wait and see what comes about in the next few years. There could perhaps be an idea we have never thought of or maybe a product which could tie many open ends together, maybe it is 3D printing, maybe it is a transformation in online education. Maybe Google will bring the next revolution in online connecivity. But until then we can only focus on making what we have more sustainable and scaleable in order to make these ideas expand beyond ourselves.</p>
<p><em>Reference Links</em></p>
<ul>
<li><a href="http://finance.fortune.cnn.com/tag/chamath-palihapitiya/">http://finance.fortune.cnn.com/tag/chamath-palihapitiya/</a></li>
<li><a href="http://techcrunch.com/2013/04/29/chamath-palihapitiya-disrupt/">http://techcrunch.com/2013/04/29/chamath-palihapitiya-disrupt/</a></li>
</ul>
<div id="disqus_thread"></div>
<script type="text/javascript">
/* * * CONFIGURATION VARIABLES: EDIT BEFORE PASTING INTO YOUR WEBPAGE * * */
var disqus_shortname = 'afzaljasanicom'; // required: replace example with your forum shortname
/* * * DON'T EDIT BELOW THIS LINE * * */
(function() {
var dsq = document.createElement('script'); dsq.type = 'text/javascript'; dsq.async = true;
dsq.src = '//' + disqus_shortname + '.disqus.com/embed.js';
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq);
})();
</script>
<noscript>Please enable JavaScript to view the <a href="http://disqus.com/?ref_noscript">comments powered by Disqus.</a></noscript>
<a href="http://disqus.com" class="dsq-brlink">comments powered by <span class="logo-disqus">Disqus</span></a>
</div>
<div class="related">
<h2>Related Posts</h2>
<ul class="related-posts">
<li>
<h3>
<a href="/blog/Writting-A-Twitter-Bot">
Writing a Twitter Bot
<small>01 May 2015</small>
</a>
</h3>
</li>
<li>
<h3>
<a href="/blog/Installing-Google-Analytics-on-a-Jekyll-Site">
Installing Google Analytics on Jekyll Site
<small>01 Oct 2014</small>
</a>
</h3>
</li>
<li>
<h3>
<a href="/blog/Funding-At-Its-Finest">
Funding At It's Finest
<small>23 Jul 2014</small>
</a>
</h3>
</li>
</ul>
</div>
</div>
</div>
</body>
</html>
| {
"content_hash": "30f4550ceeb03168997aaf4b20996447",
"timestamp": "",
"source": "github",
"line_count": 197,
"max_line_length": 944,
"avg_line_length": 36.52791878172589,
"alnum_prop": 0.6346581434130072,
"repo_name": "aj786123/aj786123.github.io",
"id": "134c4676cd045d9ebfacac4aee1c8e84746b239e",
"size": "7204",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_site/blog/Big-Ideas/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "41386"
},
{
"name": "HTML",
"bytes": "143469"
}
],
"symlink_target": ""
} |
#include "elst2.h"
#include <cstdlib>
namespace tesseract {
/***********************************************************************
* ELIST2::internal_clear
*
* Used by the destructor and the "clear" member function of derived list
* classes to destroy all the elements on the list.
* The calling function passes a "zapper" function which can be called to
* delete each element of the list, regardless of its derived type. This
* technique permits a generic clear function to destroy elements of
* different derived types correctly, without requiring virtual functions and
* the consequential memory overhead.
**********************************************************************/
void ELIST2::internal_clear( // destroy all links
void (*zapper)(void *)) {
// ptr to zapper functn
ELIST2_LINK *ptr;
ELIST2_LINK *next;
if (!empty()) {
ptr = last->next; // set to first
last->next = nullptr; // break circle
last = nullptr; // set list empty
while (ptr) {
next = ptr->next;
zapper(ptr);
ptr = next;
}
}
}
/***********************************************************************
* ELIST2::assign_to_sublist
*
* The list is set to a sublist of another list. "This" list must be empty
* before this function is invoked. The two iterators passed must refer to
* the same list, different from "this" one. The sublist removed is the
* inclusive list from start_it's current position to end_it's current
* position. If this range passes over the end of the source list then the
* source list has its end set to the previous element of start_it. The
* extracted sublist is unaffected by the end point of the source list, its
* end point is always the end_it position.
**********************************************************************/
void ELIST2::assign_to_sublist( // to this list
ELIST2_ITERATOR *start_it, // from list start
ELIST2_ITERATOR *end_it) { // from list end
constexpr ERRCODE LIST_NOT_EMPTY("Destination list must be empty before extracting a sublist");
if (!empty()) {
LIST_NOT_EMPTY.error("ELIST2.assign_to_sublist", ABORT);
}
last = start_it->extract_sublist(end_it);
}
/***********************************************************************
* ELIST2::sort
*
* Sort elements on list
* NB If you don't like the const declarations in the comparator, coerce yours:
* (int (*)(const void *, const void *)
**********************************************************************/
void ELIST2::sort( // sort elements
int comparator( // comparison routine
const void *, const void *)) {
// Allocate an array of pointers, one per list element.
auto count = length();
if (count > 0) {
// ptr array to sort
std::vector<ELIST2_LINK *> base;
base.reserve(count);
ELIST2_ITERATOR it(this);
// Extract all elements, putting the pointers in the array.
for (it.mark_cycle_pt(); !it.cycled_list(); it.forward()) {
base.push_back(it.extract());
}
// Sort the pointer array.
qsort(&base[0], count, sizeof(base[0]), comparator);
// Rebuild the list from the sorted pointers.
for (auto current : base) {
it.add_to_end(current);
}
}
}
// Assuming list has been sorted already, insert new_link to
// keep the list sorted according to the same comparison function.
// Comparison function is the same as used by sort, i.e. uses double
// indirection. Time is O(1) to add to beginning or end.
// Time is linear to add pre-sorted items to an empty list.
void ELIST2::add_sorted(int comparator(const void *, const void *), ELIST2_LINK *new_link) {
// Check for adding at the end.
if (last == nullptr || comparator(&last, &new_link) < 0) {
if (last == nullptr) {
new_link->next = new_link;
new_link->prev = new_link;
} else {
new_link->next = last->next;
new_link->prev = last;
last->next = new_link;
new_link->next->prev = new_link;
}
last = new_link;
} else {
// Need to use an iterator.
ELIST2_ITERATOR it(this);
for (it.mark_cycle_pt(); !it.cycled_list(); it.forward()) {
ELIST2_LINK *link = it.data();
if (comparator(&link, &new_link) > 0) {
break;
}
}
if (it.cycled_list()) {
it.add_to_end(new_link);
} else {
it.add_before_then_move(new_link);
}
}
}
/***********************************************************************
* MEMBER FUNCTIONS OF CLASS: ELIST2_ITERATOR
* ==========================================
**********************************************************************/
/***********************************************************************
* ELIST2_ITERATOR::forward
*
* Move the iterator to the next element of the list.
* REMEMBER: ALL LISTS ARE CIRCULAR.
**********************************************************************/
ELIST2_LINK *ELIST2_ITERATOR::forward() {
#ifndef NDEBUG
if (!list)
NO_LIST.error("ELIST2_ITERATOR::forward", ABORT);
#endif
if (list->empty()) {
return nullptr;
}
if (current) { // not removed so
// set previous
prev = current;
started_cycling = true;
// In case next is deleted by another iterator, get it from the current.
current = current->next;
} else {
if (ex_current_was_cycle_pt) {
cycle_pt = next;
}
current = next;
}
#ifndef NDEBUG
if (!current)
NULL_DATA.error("ELIST2_ITERATOR::forward", ABORT);
#endif
next = current->next;
#ifndef NDEBUG
if (!next) {
NULL_NEXT.error("ELIST2_ITERATOR::forward", ABORT,
"This is: %p Current is: %p",
static_cast<void *>(this),
static_cast<void *>(current));
}
#endif
return current;
}
/***********************************************************************
* ELIST2_ITERATOR::backward
*
* Move the iterator to the previous element of the list.
* REMEMBER: ALL LISTS ARE CIRCULAR.
**********************************************************************/
ELIST2_LINK *ELIST2_ITERATOR::backward() {
#ifndef NDEBUG
if (!list)
NO_LIST.error("ELIST2_ITERATOR::backward", ABORT);
#endif
if (list->empty()) {
return nullptr;
}
if (current) { // not removed so
// set previous
next = current;
started_cycling = true;
// In case prev is deleted by another iterator, get it from current.
current = current->prev;
} else {
if (ex_current_was_cycle_pt) {
cycle_pt = prev;
}
current = prev;
}
#ifndef NDEBUG
if (!current)
NULL_DATA.error("ELIST2_ITERATOR::backward", ABORT);
if (!prev) {
NULL_PREV.error("ELIST2_ITERATOR::backward", ABORT,
"This is: %p Current is: %p",
static_cast<void *>(this),
static_cast<void *>(current));
}
#endif
prev = current->prev;
return current;
}
/***********************************************************************
* ELIST2_ITERATOR::data_relative
*
* Return the data pointer to the element "offset" elements from current.
* (This function can't be INLINEd because it contains a loop)
**********************************************************************/
ELIST2_LINK *ELIST2_ITERATOR::data_relative( // get data + or - ..
int8_t offset) { // offset from current
ELIST2_LINK *ptr;
#ifndef NDEBUG
if (!list)
NO_LIST.error("ELIST2_ITERATOR::data_relative", ABORT);
if (list->empty())
EMPTY_LIST.error("ELIST2_ITERATOR::data_relative", ABORT);
#endif
if (offset < 0) {
for (ptr = current ? current : next; offset++ < 0; ptr = ptr->prev) {
;
}
} else {
for (ptr = current ? current : prev; offset-- > 0; ptr = ptr->next) {
;
}
}
#ifndef NDEBUG
if (!ptr)
NULL_DATA.error("ELIST2_ITERATOR::data_relative", ABORT);
#endif
return ptr;
}
/***********************************************************************
* ELIST2_ITERATOR::exchange()
*
* Given another iterator, whose current element is a different element on
* the same list list OR an element of another list, exchange the two current
* elements. On return, each iterator points to the element which was the
* other iterators current on entry.
* (This function hasn't been in-lined because its a bit big!)
**********************************************************************/
void ELIST2_ITERATOR::exchange( // positions of 2 links
ELIST2_ITERATOR *other_it) { // other iterator
constexpr ERRCODE DONT_EXCHANGE_DELETED("Can't exchange deleted elements of lists");
ELIST2_LINK *old_current;
#ifndef NDEBUG
if (!list)
NO_LIST.error("ELIST2_ITERATOR::exchange", ABORT);
if (!other_it)
BAD_PARAMETER.error("ELIST2_ITERATOR::exchange", ABORT, "other_it nullptr");
if (!(other_it->list))
NO_LIST.error("ELIST2_ITERATOR::exchange", ABORT, "other_it");
#endif
/* Do nothing if either list is empty or if both iterators reference the same
link */
if ((list->empty()) || (other_it->list->empty()) || (current == other_it->current)) {
return;
}
/* Error if either current element is deleted */
if (!current || !other_it->current) {
DONT_EXCHANGE_DELETED.error("ELIST2_ITERATOR.exchange", ABORT);
}
/* Now handle the 4 cases: doubleton list; non-doubleton adjacent elements
(other before this); non-doubleton adjacent elements (this before other);
non-adjacent elements. */
// adjacent links
if ((next == other_it->current) || (other_it->next == current)) {
// doubleton list
if ((next == other_it->current) && (other_it->next == current)) {
prev = next = current;
other_it->prev = other_it->next = other_it->current;
} else { // non-doubleton with
// adjacent links
// other before this
if (other_it->next == current) {
other_it->prev->next = current;
other_it->current->next = next;
other_it->current->prev = current;
current->next = other_it->current;
current->prev = other_it->prev;
next->prev = other_it->current;
other_it->next = other_it->current;
prev = current;
} else { // this before other
prev->next = other_it->current;
current->next = other_it->next;
current->prev = other_it->current;
other_it->current->next = current;
other_it->current->prev = prev;
other_it->next->prev = current;
next = current;
other_it->prev = other_it->current;
}
}
} else { // no overlap
prev->next = other_it->current;
current->next = other_it->next;
current->prev = other_it->prev;
next->prev = other_it->current;
other_it->prev->next = current;
other_it->current->next = next;
other_it->current->prev = prev;
other_it->next->prev = current;
}
/* update end of list pointer when necessary (remember that the 2 iterators
may iterate over different lists!) */
if (list->last == current) {
list->last = other_it->current;
}
if (other_it->list->last == other_it->current) {
other_it->list->last = current;
}
if (current == cycle_pt) {
cycle_pt = other_it->cycle_pt;
}
if (other_it->current == other_it->cycle_pt) {
other_it->cycle_pt = cycle_pt;
}
/* The actual exchange - in all cases*/
old_current = current;
current = other_it->current;
other_it->current = old_current;
}
/***********************************************************************
* ELIST2_ITERATOR::extract_sublist()
*
* This is a private member, used only by ELIST2::assign_to_sublist.
* Given another iterator for the same list, extract the links from THIS to
* OTHER inclusive, link them into a new circular list, and return a
* pointer to the last element.
* (Can't inline this function because it contains a loop)
**********************************************************************/
ELIST2_LINK *ELIST2_ITERATOR::extract_sublist( // from this current
ELIST2_ITERATOR *other_it) { // to other current
#ifndef NDEBUG
constexpr ERRCODE BAD_EXTRACTION_PTS("Can't extract sublist from points on different lists");
constexpr ERRCODE DONT_EXTRACT_DELETED("Can't extract a sublist marked by deleted points");
#endif
constexpr ERRCODE BAD_SUBLIST("Can't find sublist end point in original list");
ELIST2_ITERATOR temp_it = *this;
ELIST2_LINK *end_of_new_list;
#ifndef NDEBUG
if (!other_it)
BAD_PARAMETER.error("ELIST2_ITERATOR::extract_sublist", ABORT, "other_it nullptr");
if (!list)
NO_LIST.error("ELIST2_ITERATOR::extract_sublist", ABORT);
if (list != other_it->list)
BAD_EXTRACTION_PTS.error("ELIST2_ITERATOR.extract_sublist", ABORT);
if (list->empty())
EMPTY_LIST.error("ELIST2_ITERATOR::extract_sublist", ABORT);
if (!current || !other_it->current)
DONT_EXTRACT_DELETED.error("ELIST2_ITERATOR.extract_sublist", ABORT);
#endif
ex_current_was_last = other_it->ex_current_was_last = false;
ex_current_was_cycle_pt = false;
other_it->ex_current_was_cycle_pt = false;
temp_it.mark_cycle_pt();
do { // walk sublist
if (temp_it.cycled_list()) { // can't find end pt
BAD_SUBLIST.error("ELIST2_ITERATOR.extract_sublist", ABORT);
}
if (temp_it.at_last()) {
list->last = prev;
ex_current_was_last = other_it->ex_current_was_last = true;
}
if (temp_it.current == cycle_pt) {
ex_current_was_cycle_pt = true;
}
if (temp_it.current == other_it->cycle_pt) {
other_it->ex_current_was_cycle_pt = true;
}
temp_it.forward();
}
// do INCLUSIVE list
while (temp_it.prev != other_it->current);
// circularise sublist
other_it->current->next = current;
// circularise sublist
current->prev = other_it->current;
end_of_new_list = other_it->current;
// sublist = whole list
if (prev == other_it->current) {
list->last = nullptr;
prev = current = next = nullptr;
other_it->prev = other_it->current = other_it->next = nullptr;
} else {
prev->next = other_it->next;
other_it->next->prev = prev;
current = other_it->current = nullptr;
next = other_it->next;
other_it->prev = prev;
}
return end_of_new_list;
}
} // namespace tesseract
| {
"content_hash": "f6b50c45c92a3de4eeac25aca37912f1",
"timestamp": "",
"source": "github",
"line_count": 460,
"max_line_length": 97,
"avg_line_length": 31.339130434782607,
"alnum_prop": 0.5677025527192009,
"repo_name": "amitdo/tesseract",
"id": "64d22fdb659bc586cc15d6bb6ce88a3d21afd772",
"size": "15314",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "src/ccutil/elst2.cpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "83843"
},
{
"name": "C++",
"bytes": "7576183"
},
{
"name": "CMake",
"bytes": "55272"
},
{
"name": "Dockerfile",
"bytes": "582"
},
{
"name": "Java",
"bytes": "74135"
},
{
"name": "M4",
"bytes": "3378"
},
{
"name": "Makefile",
"bytes": "68108"
},
{
"name": "Python",
"bytes": "700"
},
{
"name": "Shell",
"bytes": "27567"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<application xmlns="http://ns.adobe.com/air/application/17.0">
<!-- Adobe AIR Application Descriptor File Template.
Specifies parameters for identifying, installing, and launching AIR applications.
xmlns - The Adobe AIR namespace: http://ns.adobe.com/air/application/3.5
The last segment of the namespace specifies the version
of the AIR runtime required for this application to run.
minimumPatchLevel - The minimum patch level of the AIR runtime required to run
the application. Optional.
-->
<!-- A universally unique application identifier. Must be unique across all AIR applications.
Using a reverse DNS-style name as the id is recommended. (Eg. com.example.ExampleApplication.) Required. -->
<id>de.cware.apps.DukeConMobile</id>
<!-- Used as the filename for the application. Required. -->
<filename>DukeConMobile</filename>
<!-- The name that is displayed in the AIR application installer.
May have multiple values for each language. See samples or xsd schema file. Optional. -->
<name>DukeConMobile</name>
<!-- A string value of the format <0-999>.<0-999>.<0-999> that represents application version which can be used to check for application upgrade.
Values can also be 1-part or 2-part. It is not necessary to have a 3-part value.
An updated version of application must have a versionNumber value higher than the previous version. Required for namespace >= 2.5 . -->
<versionNumber>1.0.0</versionNumber>
<!-- A string value (such as "v1", "2.5", or "Alpha 1") that represents the version of the application, as it should be shown to users. Optional. -->
<!-- <versionLabel></versionLabel> -->
<!-- Description, displayed in the AIR application installer.
May have multiple values for each language. See samples or xsd schema file. Optional. -->
<!-- <description></description> -->
<!-- Copyright information. Optional -->
<!-- <copyright></copyright> -->
<!-- Publisher ID. Used if you're updating an application created prior to 1.5.3 -->
<!-- <publisherID></publisherID> -->
<!-- Settings for the application's initial window. Required. -->
<initialWindow>
<!-- The main SWF or HTML file of the application. Required. -->
<!-- Note: In Flash Builder, the SWF reference is set automatically. -->
<content>${output}</content>
<!-- The title of the main window. Optional. -->
<!-- <title></title> -->
<!-- The type of system chrome to use (either "standard" or "none"). Optional. Default standard. -->
<!-- <systemChrome></systemChrome> -->
<!-- Whether the window is transparent. Only applicable when systemChrome is none. Optional. Default false. -->
<!-- <transparent></transparent> -->
<!-- Whether the window is initially visible. Optional. Default false. -->
<!-- <visible></visible> -->
<!-- Whether the user can minimize the window. Optional. Default true. -->
<!-- <minimizable></minimizable> -->
<!-- Whether the user can maximize the window. Optional. Default true. -->
<!-- <maximizable></maximizable> -->
<!-- Whether the user can resize the window. Optional. Default true. -->
<!-- <resizable></resizable> -->
<!-- The window's initial width in pixels. Optional. -->
<!-- <width></width> -->
<!-- The window's initial height in pixels. Optional. -->
<!-- <height></height> -->
<!-- The window's initial x position. Optional. -->
<!-- <x></x> -->
<!-- The window's initial y position. Optional. -->
<!-- <y></y> -->
<!-- The window's minimum size, specified as a width/height pair in pixels, such as "400 200". Optional. -->
<!-- <minSize></minSize> -->
<!-- The window's initial maximum size, specified as a width/height pair in pixels, such as "1600 1200". Optional. -->
<!-- <maxSize></maxSize> -->
<!-- The aspect ratio of the app ("portrait" or "landscape" or "any"). Optional. Mobile only. Default is the natural orientation of the device -->
<!-- <aspectRatio></aspectRatio> -->
<!-- Whether the app will begin auto-orienting on launch. Optional. Mobile only. Default false -->
<!-- <autoOrients></autoOrients> -->
<!-- Whether the app launches in full screen. Optional. Mobile only. Default false -->
<!-- <fullScreen></fullScreen> -->
<!-- The render mode for the app (either auto, cpu, gpu, or direct). Optional. Default auto -->
<!-- <renderMode></renderMode> -->
<!-- Whether the default direct mode rendering context allocates storage for depth and stencil buffers. Optional. Default false. -->
<!-- <depthAndStencil></depthAndStencil> -->
<!-- Whether or not to pan when a soft keyboard is raised or lowered (either "pan" or "none"). Optional. Defaults "pan." -->
<!-- <softKeyboardBehavior></softKeyboardBehavior> -->
<!-- Display Resolution for the app (either "standard" or "high"). Optional, OSX-only. Default "standard" -->
<!-- <requestedDisplayResolution></requestedDisplayResolution> -->
<autoOrients>true</autoOrients>
<fullScreen>false</fullScreen>
<visible>true</visible>
<softKeyboardBehavior>none</softKeyboardBehavior>
</initialWindow>
<!-- We recommend omitting the supportedProfiles element, -->
<!-- which in turn permits your application to be deployed to all -->
<!-- devices supported by AIR. If you wish to restrict deployment -->
<!-- (i.e., to only mobile devices) then add this element and list -->
<!-- only the profiles which your application does support. -->
<!-- <supportedProfiles>${supportedProfiles}</supportedProfiles> -->
<!-- Languages supported by application -->
<!-- Only these languages can be specified -->
<!-- <supportedLanguages>en de cs es fr it ja ko nl pl pt ru sv tr zh</supportedLanguages> -->
<!-- The subpath of the standard default installation location to use. Optional. -->
<!-- <installFolder></installFolder> -->
<!-- The subpath of the Programs menu to use. (Ignored on operating systems without a Programs menu.) Optional. -->
<!-- <programMenuFolder></programMenuFolder> -->
<!-- The icon the system uses for the application. For at least one resolution,
specify the path to a PNG file included in the AIR package. Optional. -->
<!-- <icon>
<image16x16></image16x16>
<image29x29></image29x29>
<image32x32></image32x32>
<image36x36></image36x36>
<image48x48></image48x48>
<image50x50></image50x50>
<image57x57></image57x57>
<image58x58></image58x58>
<image72x72></image72x72>
<image96x96></image96x96>
<image100x100></image100x100>
<image114x114></image114x114>
<image128x128></image128x128>
<image144x144></image144x144>
<image512x512></image512x512>
<image732x412></image732x412>
<image1024x1024></image1024x1024>
</icon> -->
<!-- Whether the application handles the update when a user double-clicks an update version
of the AIR file (true), or the default AIR application installer handles the update (false).
Optional. Default false. -->
<!-- <customUpdateUI></customUpdateUI> -->
<!-- Whether the application can be launched when the user clicks a link in a web browser.
Optional. Default false. -->
<!-- <allowBrowserInvocation></allowBrowserInvocation> -->
<!-- Listing of file types for which the application can register. Optional. -->
<!-- <fileTypes> -->
<!-- Defines one file type. Optional. -->
<!-- <fileType> -->
<!-- The name that the system displays for the registered file type. Required. -->
<!-- <name></name> -->
<!-- The extension to register. Required. -->
<!-- <extension></extension> -->
<!-- The description of the file type. Optional. -->
<!-- <description></description> -->
<!-- The MIME content type. -->
<!-- <contentType></contentType> -->
<!-- The icon to display for the file type. Optional. -->
<!-- <icon>
<image16x16></image16x16>
<image32x32></image32x32>
<image48x48></image48x48>
<image128x128></image128x128>
</icon> -->
<!-- </fileType> -->
<!-- </fileTypes> -->
<!-- iOS specific capabilities -->
<!-- <iPhone> -->
<!-- A list of plist key/value pairs to be added to the application Info.plist -->
<!-- <InfoAdditions>
<![CDATA[
<key>UIDeviceFamily</key>
<array>
<string>1</string>
<string>2</string>
</array>
<key>UIStatusBarStyle</key>
<string>UIStatusBarStyleBlackOpaque</string>
<key>UIRequiresPersistentWiFi</key>
<string>YES</string>
]]>
</InfoAdditions> -->
<!-- A list of plist key/value pairs to be added to the application Entitlements.plist -->
<!-- <Entitlements>
<![CDATA[
<key>keychain-access-groups</key>
<array>
<string></string>
<string></string>
</array>
]]>
</Entitlements> -->
<!-- Display Resolution for the app (either "standard" or "high"). Optional. Default "standard" -->
<!-- <requestedDisplayResolution></requestedDisplayResolution> -->
<!-- Forcing Render Mode CPU for the devices mentioned. Optional -->
<!-- <forceCPURenderModeForDevices></forceCPURenderModeForDevices> -->
<!-- File containing line separated list of external swf paths. These swfs won't be
packaged inside the application and corresponding stripped swfs will be output in
externalStrippedSwfs folder. -->
<!-- <externalSwfs></externalSwfs> -->
<!-- </iPhone> -->
<!-- Specify Android specific tags that get passed to AndroidManifest.xml file. -->
<!--<android> -->
<!-- <manifestAdditions>
<![CDATA[
<manifest android:installLocation="auto">
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION"/>
<uses-feature android:required="true" android:name="android.hardware.touchscreen.multitouch"/>
<application android:enabled="true">
<activity android:excludeFromRecents="false">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
</application>
</manifest>
]]>
</manifestAdditions> -->
<!-- Color depth for the app (either "32bit" or "16bit"). Optional. Default 16bit before namespace 3.0, 32bit after -->
<!-- <colorDepth></colorDepth> -->
<!-- Indicates if the app contains video or not. Necessary for ordering of video planes with graphics plane, especially in Jellybean - if you app does video this must be set to true - valid values are true or false -->
<!-- <containsVideo></containsVideo> -->
<!-- </android> -->
<!-- End of the schema for adding the android specific tags in AndroidManifest.xml file -->
<android>
<colorDepth>32bit</colorDepth>
<manifestAdditions><![CDATA[
<manifest android:installLocation="auto">
<!--See the Adobe AIR documentation for more information about setting Google Android permissions-->
<!--Removing the permission android.permission.INTERNET will have the side effect
of preventing you from debugging your application on your device-->
<uses-permission android:name="android.permission.INTERNET"/>
<!--<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>-->
<!--<uses-permission android:name="android.permission.READ_PHONE_STATE"/>-->
<!--<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION"/>-->
<!--The DISABLE_KEYGUARD and WAKE_LOCK permissions should be toggled together
in order to access AIR's SystemIdleMode APIs-->
<!--<uses-permission android:name="android.permission.DISABLE_KEYGUARD"/>-->
<!--<uses-permission android:name="android.permission.WAKE_LOCK"/>-->
<!--<uses-permission android:name="android.permission.CAMERA"/>-->
<!--<uses-permission android:name="android.permission.RECORD_AUDIO"/>-->
<!--The ACCESS_NETWORK_STATE and ACCESS_WIFI_STATE permissions should be toggled
together in order to use AIR's NetworkInfo APIs-->
<!--<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE"/>-->
<!--<uses-permission android:name="android.permission.ACCESS_WIFI_STATE"/>-->
</manifest>
]]></manifestAdditions>
</android>
<iPhone>
<requestedDisplayResolution>high</requestedDisplayResolution>
</iPhone>
</application>
| {
"content_hash": "bc84d23f3a9004bd2486f6b1dab82a51",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 226,
"avg_line_length": 43.824137931034485,
"alnum_prop": 0.6585884019198993,
"repo_name": "jugda/dukecon_flex",
"id": "de88797aee92261b3d70286029ddea4b606781b3",
"size": "12709",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/src/main/air/descriptor.xml",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "105106"
},
{
"name": "CSS",
"bytes": "1380"
},
{
"name": "Groovy",
"bytes": "12439"
}
],
"symlink_target": ""
} |
""" Grid layout.
Layout a series of widgets in a grid. The grid has a specified number of columns.
Example:
.. UIExample:: 300
from flexx import flx
class Example(flx.Widget):
def init(self):
with flx.HSplit():
with flx.GridLayout(ncolumns=3):
flx.Button(text='A')
flx.Button(text='B')
flx.Button(text='C')
flx.Button(text='D')
flx.Button(text='E')
flx.Button(text='F')
with flx.GridLayout(ncolumns=2):
flx.Button(text='A', flex=(1, 1)) # Set flex for 1st row and col
flx.Button(text='B', flex=(2, 1)) # Set flex for 2nd col
flx.Button(text='C', flex=(1, 1)) # Set flex for 2nd row
flx.Button(text='D')
flx.Button(text='E', flex=(1, 2)) # Set flex for 3d row
flx.Button(text='F')
"""
from ... import event
from . import Layout
class GridLayout(Layout):
""" A layout widget that places its children in a grid with a certain number
of columns. The flex values of the children in the first row determine the
sizing of the columns. The flex values of the first child of each row
determine the sizing of the rows.
The ``node`` of this widget is a
`<div> <https://developer.mozilla.org/docs/Web/HTML/Element/div>`_,
which lays out it's child widgets and their labels using
`CSS grid <https://css-tricks.com/snippets/css/complete-guide-grid/>`_.
"""
CSS = """
.flx-GridLayout {
display: grid;
justify-content: stretch;
align-content: stretch;
justify-items: stretch;
align-items: stretch;
}
"""
ncolumns = event.IntProp(2, settable=True, doc="""
The number of columns of the grid.
""")
@event.reaction
def _on_columns(self):
ncolumns = self.ncolumns
children = self.children
column_templates = []
row_templates = []
for i in range(min(ncolumns, len(children))):
flex = children[i].flex[0]
column_templates.append(flex + "fr" if flex > 0 else "auto")
for i in range(0, len(children), ncolumns):
flex = children[i].flex[1]
row_templates.append(flex + "fr" if flex > 0 else "auto")
self.node.style['grid-template-rows'] = " ".join(row_templates)
self.node.style['grid-template-columns'] = " ".join(column_templates)
def _query_min_max_size(self):
""" Overload to also take child limits into account.
"""
# Collect contributions of child widgets
mima1 = [0, 1e9, 0, 0]
for child in self.children:
mima2 = child._size_limits
mima1[0] = max(mima1[0], mima2[0])
mima1[1] = min(mima1[1], mima2[1])
mima1[2] += mima2[2]
mima1[3] += mima2[3]
# Dont forget padding and spacing
extra_padding = 2
extra_spacing = 2
for i in range(4):
mima1[i] += extra_padding
mima1[2] += extra_spacing
mima1[3] += extra_spacing
# Own limits
mima3 = super()._query_min_max_size()
# Combine own limits with limits of children
return [max(mima1[0], mima3[0]),
min(mima1[1], mima3[1]),
max(mima1[2], mima3[2]),
min(mima1[3], mima3[3])]
| {
"content_hash": "d4672f51f83c2e33b8a9aadb0cd5b875",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 85,
"avg_line_length": 33.24761904761905,
"alnum_prop": 0.5439702091091377,
"repo_name": "zoofIO/flexx",
"id": "068c89ee9d7c22b8b936232f06f1e1e1e3bc27c8",
"size": "3491",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "flexx/ui/layouts/_grid.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "JavaScript",
"bytes": "2989"
},
{
"name": "Python",
"bytes": "1265638"
}
],
"symlink_target": ""
} |
// Code generated by go-swagger; DO NOT EDIT.
package service
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"net/http"
"time"
"golang.org/x/net/context"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
strfmt "github.com/go-openapi/strfmt"
)
// NewGetServiceParams creates a new GetServiceParams object
// with the default values initialized.
func NewGetServiceParams() *GetServiceParams {
return &GetServiceParams{
timeout: cr.DefaultTimeout,
}
}
// NewGetServiceParamsWithTimeout creates a new GetServiceParams object
// with the default values initialized, and the ability to set a timeout on a request
func NewGetServiceParamsWithTimeout(timeout time.Duration) *GetServiceParams {
return &GetServiceParams{
timeout: timeout,
}
}
// NewGetServiceParamsWithContext creates a new GetServiceParams object
// with the default values initialized, and the ability to set a context for a request
func NewGetServiceParamsWithContext(ctx context.Context) *GetServiceParams {
return &GetServiceParams{
Context: ctx,
}
}
// NewGetServiceParamsWithHTTPClient creates a new GetServiceParams object
// with the default values initialized, and the ability to set a custom HTTPClient for a request
func NewGetServiceParamsWithHTTPClient(client *http.Client) *GetServiceParams {
return &GetServiceParams{
HTTPClient: client,
}
}
/*GetServiceParams contains all the parameters to send to the API endpoint
for the get service operation typically these are written to a http.Request
*/
type GetServiceParams struct {
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithTimeout adds the timeout to the get service params
func (o *GetServiceParams) WithTimeout(timeout time.Duration) *GetServiceParams {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the get service params
func (o *GetServiceParams) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the get service params
func (o *GetServiceParams) WithContext(ctx context.Context) *GetServiceParams {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the get service params
func (o *GetServiceParams) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the get service params
func (o *GetServiceParams) WithHTTPClient(client *http.Client) *GetServiceParams {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the get service params
func (o *GetServiceParams) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WriteToRequest writes these params to a swagger request
func (o *GetServiceParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
| {
"content_hash": "6a97ffe1b27a9f92c2f2bd47fc2443a9",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 96,
"avg_line_length": 26.885964912280702,
"alnum_prop": 0.7745513866231648,
"repo_name": "scanf/cilium",
"id": "353f28dbcf89971585366b58d7759b1aa57175d8",
"size": "3065",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "api/v1/client/service/get_service_parameters.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "660212"
},
{
"name": "C++",
"bytes": "6177"
},
{
"name": "Dockerfile",
"bytes": "4492"
},
{
"name": "Go",
"bytes": "4476462"
},
{
"name": "Makefile",
"bytes": "25535"
},
{
"name": "Perl 6",
"bytes": "4948"
},
{
"name": "Python",
"bytes": "10259"
},
{
"name": "Ruby",
"bytes": "11622"
},
{
"name": "Shell",
"bytes": "225848"
},
{
"name": "TeX",
"bytes": "416"
},
{
"name": "sed",
"bytes": "4191"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/BluetoothSmart+.iml" filepath="$PROJECT_DIR$/BluetoothSmart+.iml" />
<module fileurl="file://$PROJECT_DIR$/bluetoothsmart/bluetoothsmart.iml" filepath="$PROJECT_DIR$/bluetoothsmart/bluetoothsmart.iml" />
</modules>
</component>
</project> | {
"content_hash": "1df94e942aece5d0c6067196343e955b",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 140,
"avg_line_length": 45.666666666666664,
"alnum_prop": 0.7007299270072993,
"repo_name": "jlandrum/bluetoothsmart",
"id": "de76618984e7322b8052af48b4b3f8efe6953f80",
"size": "411",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".idea/modules.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "84704"
}
],
"symlink_target": ""
} |
#ifndef ENGINE_CONFIG_HPP
#define ENGINE_CONFIG_HPP
#include "storage/storage_config.hpp"
#include <boost/filesystem/path.hpp>
#include <string>
namespace osrm
{
namespace engine
{
/**
* Configures an OSRM instance.
*
* You can customize the storage OSRM uses for auxiliary files specifying a storage config.
*
* You can further set service constraints.
* These are the maximum number of allowed locations (-1 for unlimited) for the services:
* - Trip
* - Route
* - Table
* - Match
* - Nearest
*
* In addition, shared memory can be used for datasets loaded with osrm-datastore.
*
* You can chose between three algorithms:
* - Algorithm::CH
* Contraction Hierarchies, extremely fast queries but slow pre-processing. The default right
* now.
* - Algorithm::CoreCH
* Contractoin Hierachies with partial contraction for faster pre-processing but slower queries.
* - Algorithm::MLD
* Multi Level Dijkstra which is experimental and moderately fast in both pre-processing and
* query.
*
* Algorithm::CH is specified we will automatically upgrade to CoreCH if we find the data for it.
* If Algorithm::CoreCH is specified and we don't find the speedup data, we fail hard.
*
* \see OSRM, StorageConfig
*/
struct EngineConfig final
{
bool IsValid() const;
enum class Algorithm
{
CH, // will upgrade to CoreCH if it finds core data
CoreCH, // will fail hard if there is no core data
MLD
};
storage::StorageConfig storage_config;
int max_locations_trip = -1;
int max_locations_viaroute = -1;
int max_locations_distance_table = -1;
int max_locations_map_matching = -1;
int max_results_nearest = -1;
bool use_shared_memory = true;
Algorithm algorithm = Algorithm::CH;
};
}
}
#endif // SERVER_CONFIG_HPP
| {
"content_hash": "62f330bddb13dff4d796e8f48012e584",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 99,
"avg_line_length": 25.64788732394366,
"alnum_prop": 0.6968698517298187,
"repo_name": "duizendnegen/osrm-backend",
"id": "e2e8d8ae897a8481633b9e2d09e7bb87c8cda311",
"size": "3125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "include/engine/engine_config.hpp",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "6616"
},
{
"name": "C++",
"bytes": "2961564"
},
{
"name": "CMake",
"bytes": "99471"
},
{
"name": "Gherkin",
"bytes": "1053654"
},
{
"name": "JavaScript",
"bytes": "196416"
},
{
"name": "Lua",
"bytes": "95749"
},
{
"name": "Makefile",
"bytes": "3170"
},
{
"name": "Python",
"bytes": "5717"
},
{
"name": "Shell",
"bytes": "15158"
}
],
"symlink_target": ""
} |
def set_db
if (Settings[:db] && Settings[:APP_NAME])
puts "CouchDB Server: #{Settings[:db]}"
@server = CouchRest.new(server = Settings[:db])
else
puts "No database info defined. Please set the environment variable CLOUDANT_URL."
exit
end
# Check to see if the database exists. If it does, assume it's
# correctly initialized. If not, create it.
# I know when I looked at this I thought "define_available_database" would return a true or false,
# but, by default, if the database doesn't exist, define_available_database will create it.
begin
@server.define_available_database(:default, Settings[:APP_NAME])
rescue Exception => e
puts "Couldn't use or create the database (#{Settings[:db]}/#{Settings[:APP_NAME]})."
puts "Check to make sure that CLOUDANT_URL is correct."
puts e
end
db = CouchRest.database("#{Settings[:db]}/#{Settings[:APP_NAME]}")
return db
end
# We supprt three different search methods, in preference order
# 1. Cloudant search 2.0
# 2. couchdb-lucene proxy
# 3. couchdb-lucene python hooks
# Define which one to use by querying the database
def set_view
#Are we a cloudant database?
if (Settings[:db].include? "cloudant.com")
return "#{Settings[:APP_NAME]}/_design/search/_search/by_content"
else
#Assuming a couchdb-lucene database
begin
if RestClient.get "#{Settings[:db]}/_fti/local"
return "_fti/local/#{Settings[:APP_NAME]}/_design/lucene_search/by_content"
end
rescue => e
begin
if RestClient.get "#{Settings[:db]}/#{Settings[:APP_NAME]}/_fti/_design/lucene_search/by_content"
return "#{Settings[:APP_NAME]}/_fti/_design/lucene_search/by_content"
end
rescue => e2
end
end
#puts "Unable to find a search design doc. This may happen when the database is empty. Starting the app with a default search view "
return "#{Settings[:APP_NAME]}/_fti/_design/lucene_search/by_content"
end
end
#Load a property file and add it to the configliere Settings array
#fileName - The filename of the property file
def load_config_file(file_name)
if (File.file?(file_name))
prop_file = File.open(file_name)
#go through each line and add it to the Settings array
prop_file.readlines.each do |line|
line.strip!
#if it's not a comment
if (line[0] != ?# and line[0] != ?=)
i = line.index('=')
if (i)
Settings[line[0..i - 1].strip] = line[i + 1..-1].strip
else
Settings[line] = ''
end
end
end
prop_file.close
end
end
################################################################################
Settings.use :env_var, :commandline
#Environment variables
# Read the other config settings
Settings.read('config/app_config.yaml')
load_config_file(".env") #This overrides the configs in the above files
Settings({
#take the config from the environment if it's set, OR from a .env file if it's not set
:db => ENV['CLOUDANT_URL'].nil? ? Settings[:CLOUDANT_URL] : ENV['CLOUDANT_URL'],
:DEFAULT_TOPIC => ENV['DEFAULT_TOPIC'].nil? ? Settings[:DEFAULT_TOPIC] : ENV['DEFAULT_TOPIC'],
:CURRENT_VERSION => ENV['CURRENT_VERSION'].nil? ? Settings[:CURRENT_VERSION] : ENV['CURRENT_VERSION'],
:LANDING_PAGE => ENV['LANDING_PAGE'].nil? ? Settings[:LANDING_PAGE] : ENV['LANDING_PAGE'],
:APP_TYPE => ENV['APP_TYPE'].nil? ? Settings[:APP_TYPE] : ENV['APP_TYPE'],
:BETA => ENV['BETA'].nil? ? Settings[:BETA] : ENV['BETA'],
:LOGIN_USERNAME => ENV['LOGIN_USERNAME'].nil? ? Settings[:LOGIN_USERNAME] : ENV['LOGIN_USERNAME'],
:LOGIN_PASSWORD => ENV['LOGIN_PASSWORD'].nil? ? Settings[:LOGIN_PASSWORD] : ENV['LOGIN_PASSWORD']
})
Settings.resolve!
# Setup the database
if (Settings[:db] && Settings[:APP_NAME])
puts "CouchDB Server: #{Settings[:db]}"
@server = CouchRest.new(server = Settings[:db])
else
puts "No database info defined. Please set the environment variable CLOUDANT_URL."
exit
end
| {
"content_hash": "fe51234e769523bdcb6f97c17cc86f59",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 136,
"avg_line_length": 36.916666666666664,
"alnum_prop": 0.6501128668171557,
"repo_name": "ashleybrown415/doc",
"id": "c09cb78798c19c73217b459baa7c31e22fafc2af",
"size": "5320",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "config/setup.rb",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "17731"
},
{
"name": "JavaScript",
"bytes": "7279"
},
{
"name": "Perl",
"bytes": "6050"
},
{
"name": "Ruby",
"bytes": "86787"
},
{
"name": "Shell",
"bytes": "68"
},
{
"name": "TeX",
"bytes": "3164"
},
{
"name": "XSLT",
"bytes": "6243"
}
],
"symlink_target": ""
} |
<?php
$dbParams = array(
'database' => 'zend',
'username' => 'root',
'password' => '',
'hostname' => 'localhost',
);
return array(
'service_manager' => array(
'factories' => array(
'Zend\Db\Adapter\Adapter' => function ($sm) use ($dbParams) {
return new Zend\Db\Adapter\Adapter(array(
'driver' => 'pdo',
'dsn' => 'mysql:dbname='.$dbParams['database'].';host='.$dbParams['hostname'],
'database' => $dbParams['database'],
'username' => $dbParams['username'],
'password' => $dbParams['password'],
'hostname' => $dbParams['hostname'],
));
},
),
),
); | {
"content_hash": "b09c073cf03be5cec61d7041615b84ed",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 104,
"avg_line_length": 31.24,
"alnum_prop": 0.43405889884763127,
"repo_name": "betogm/zend-test",
"id": "38bf5b8d1e6a3730df0e6119251ed5d91cbf71cf",
"size": "781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "config/autoload/database.local.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "PHP",
"bytes": "28104"
}
],
"symlink_target": ""
} |
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { UnidirectionalIslComponent } from './unidirectional-isl.component';
describe('UnidirectionalIslComponent', () => {
let component: UnidirectionalIslComponent;
let fixture: ComponentFixture<UnidirectionalIslComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ UnidirectionalIslComponent ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(UnidirectionalIslComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
| {
"content_hash": "678bc06c5e3491cb532d9d7705e12db7",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 76,
"avg_line_length": 28.24,
"alnum_prop": 0.7053824362606232,
"repo_name": "telstra/open-kilda",
"id": "6f773de97dc1c940712bb188b2e90a124c00d604",
"size": "706",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src-gui/ui/src/app/modules/topology/unidirectional-isl/unidirectional-isl.component.spec.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "89798"
},
{
"name": "CMake",
"bytes": "4314"
},
{
"name": "CSS",
"bytes": "233390"
},
{
"name": "Dockerfile",
"bytes": "30541"
},
{
"name": "Groovy",
"bytes": "2234079"
},
{
"name": "HTML",
"bytes": "362166"
},
{
"name": "Java",
"bytes": "14631453"
},
{
"name": "JavaScript",
"bytes": "369015"
},
{
"name": "Jinja",
"bytes": "937"
},
{
"name": "Makefile",
"bytes": "20500"
},
{
"name": "Python",
"bytes": "367364"
},
{
"name": "Shell",
"bytes": "62664"
},
{
"name": "TypeScript",
"bytes": "867537"
}
],
"symlink_target": ""
} |
import * as React from 'react';
import styled from 'styled-components';
import applyMarkdown from '@utils/apply-markdown';
const Important = styled.span`
color: #f1002b;
font-weight: bold;
`;
const Quote = styled.span`
font-style: italic;
`;
const mark = applyMarkdown({
'*': Important,
'"': Quote
});
export default mark;
| {
"content_hash": "424a376f82f820defd30ff6364847563",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 50,
"avg_line_length": 17.736842105263158,
"alnum_prop": 0.6884272997032641,
"repo_name": "ubery/change-case",
"id": "75b5d782a664a3536195f705a09c063de57b5ff2",
"size": "337",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/options/components/layout/mark.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4045"
},
{
"name": "HTML",
"bytes": "1301"
},
{
"name": "JavaScript",
"bytes": "48122"
}
],
"symlink_target": ""
} |
#pragma once
#include <aws/servicediscovery/ServiceDiscovery_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <utility>
namespace Aws
{
template<typename RESULT_TYPE>
class AmazonWebServiceResult;
namespace Utils
{
namespace Json
{
class JsonValue;
} // namespace Json
} // namespace Utils
namespace ServiceDiscovery
{
namespace Model
{
class AWS_SERVICEDISCOVERY_API DeregisterInstanceResult
{
public:
DeregisterInstanceResult();
DeregisterInstanceResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
DeregisterInstanceResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
/**
* <p>A value that you can use to determine whether the request completed
* successfully. For more information, see <a
* href="https://docs.aws.amazon.com/cloud-map/latest/api/API_GetOperation.html">GetOperation</a>.</p>
*/
inline const Aws::String& GetOperationId() const{ return m_operationId; }
/**
* <p>A value that you can use to determine whether the request completed
* successfully. For more information, see <a
* href="https://docs.aws.amazon.com/cloud-map/latest/api/API_GetOperation.html">GetOperation</a>.</p>
*/
inline void SetOperationId(const Aws::String& value) { m_operationId = value; }
/**
* <p>A value that you can use to determine whether the request completed
* successfully. For more information, see <a
* href="https://docs.aws.amazon.com/cloud-map/latest/api/API_GetOperation.html">GetOperation</a>.</p>
*/
inline void SetOperationId(Aws::String&& value) { m_operationId = std::move(value); }
/**
* <p>A value that you can use to determine whether the request completed
* successfully. For more information, see <a
* href="https://docs.aws.amazon.com/cloud-map/latest/api/API_GetOperation.html">GetOperation</a>.</p>
*/
inline void SetOperationId(const char* value) { m_operationId.assign(value); }
/**
* <p>A value that you can use to determine whether the request completed
* successfully. For more information, see <a
* href="https://docs.aws.amazon.com/cloud-map/latest/api/API_GetOperation.html">GetOperation</a>.</p>
*/
inline DeregisterInstanceResult& WithOperationId(const Aws::String& value) { SetOperationId(value); return *this;}
/**
* <p>A value that you can use to determine whether the request completed
* successfully. For more information, see <a
* href="https://docs.aws.amazon.com/cloud-map/latest/api/API_GetOperation.html">GetOperation</a>.</p>
*/
inline DeregisterInstanceResult& WithOperationId(Aws::String&& value) { SetOperationId(std::move(value)); return *this;}
/**
* <p>A value that you can use to determine whether the request completed
* successfully. For more information, see <a
* href="https://docs.aws.amazon.com/cloud-map/latest/api/API_GetOperation.html">GetOperation</a>.</p>
*/
inline DeregisterInstanceResult& WithOperationId(const char* value) { SetOperationId(value); return *this;}
private:
Aws::String m_operationId;
};
} // namespace Model
} // namespace ServiceDiscovery
} // namespace Aws
| {
"content_hash": "cd11a3acf0609e2adba0eb8f1f9cb9f5",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 124,
"avg_line_length": 37.05681818181818,
"alnum_prop": 0.702851885924563,
"repo_name": "jt70471/aws-sdk-cpp",
"id": "26f9cda22af9a500ac7108826dc014fe32b14dab",
"size": "3380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-cpp-sdk-servicediscovery/include/aws/servicediscovery/model/DeregisterInstanceResult.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "13452"
},
{
"name": "C++",
"bytes": "278594037"
},
{
"name": "CMake",
"bytes": "653931"
},
{
"name": "Dockerfile",
"bytes": "5555"
},
{
"name": "HTML",
"bytes": "4471"
},
{
"name": "Java",
"bytes": "302182"
},
{
"name": "Python",
"bytes": "110380"
},
{
"name": "Shell",
"bytes": "4674"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN"
"http://www.hibernate.org/dtd/hibernate-mapping-3.0.dtd">
<!--
Mapping file autogenerated by MyEclipse Persistence Tools
-->
<hibernate-mapping>
<class name="com.dale.ms.entities.HmUser" table="hm_user" catalog="housing_market">
<id name="userId" type="java.lang.Long">
<column name="USER_ID" />
<generator class="increment" />
</id>
<property name="userName" type="java.lang.String">
<column name="USER_NAME" length="11" not-null="true">
<comment>名字</comment>
</column>
</property>
<property name="password" type="java.lang.String">
<column name="PASSWORD" not-null="true" />
</property>
<property name="userImg" type="java.lang.String">
<column name="USER_IMG">
<comment>用户头像</comment>
</column>
</property>
<property name="gender" type="java.lang.Integer">
<column name="GENDER" />
</property>
<property name="idCard" type="java.lang.String">
<column name="ID_CARD" length="18" />
</property>
<property name="createTime" type="java.sql.Timestamp">
<column name="CREATE_TIME" length="19" not-null="true">
<comment>用户创建时间</comment>
</column>
</property>
<property name="mobile" type="java.lang.String">
<column name="MOBILE" length="20">
<comment>联系方式</comment>
</column>
</property>
<property name="address" type="java.lang.String">
<column name="ADDRESS">
<comment>收货地址</comment>
</column>
</property>
<property name="lastLoginTime" type="java.sql.Timestamp">
<column name="LAST_LOGIN_TIME" length="19">
<comment>上次登录时间</comment>
</column>
</property>
<property name="storeId" type="java.lang.Long">
<column name="STORE_ID">
<comment>商家id</comment>
</column>
</property>
<property name="isStore" type="java.lang.Integer">
<column name="IS_STORE">
<comment>是否是商家</comment>
</column>
</property>
<property name="bussinessLicense" type="java.lang.String">
<column name="BUSSINESS_LICENSE">
<comment>商家许可证</comment>
</column>
</property>
<property name="storeAddress" type="java.lang.String">
<column name="STORE_ADDRESS">
<comment>商家地址</comment>
</column>
</property>
<property name="storeName" type="java.lang.String">
<column name="STORE_NAME">
<comment>店名</comment>
</column>
</property>
<property name="storeMobile" type="java.lang.String">
<column name="STORE_MOBILE">
<comment>商家联系方式</comment>
</column>
</property>
<property name="getuiClientId" type="java.lang.String">
<column name="GETUI_CLIENT_ID" length="100">
<comment>个推clientid</comment>
</column>
</property>
<property name="deviceInfo" type="java.lang.String">
<column name="DEVICE_INFO" length="10">
<comment>客户端类型</comment>
</column>
</property>
</class>
</hibernate-mapping>
| {
"content_hash": "d8266b1ff1528da325c633ffe771d6f3",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 87,
"avg_line_length": 38.54838709677419,
"alnum_prop": 0.5347280334728034,
"repo_name": "noob/HousingMarket-Server",
"id": "a8f16d1909343d4ab40da6ff195b19c96481f7cf",
"size": "3699",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "target/classes/com/dale/ms/entities/HmUser.hbm.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2449124"
},
{
"name": "HTML",
"bytes": "3457026"
},
{
"name": "Java",
"bytes": "770543"
},
{
"name": "JavaScript",
"bytes": "5115486"
},
{
"name": "Makefile",
"bytes": "285"
},
{
"name": "PHP",
"bytes": "7937"
},
{
"name": "Ruby",
"bytes": "3879"
},
{
"name": "Shell",
"bytes": "680"
}
],
"symlink_target": ""
} |
#ifndef _MINIX_SYS_CONFIG_H
#define _MINIX_SYS_CONFIG_H 1
/* This is a modified sys_config.h for compiling a small Minix system
* with only the options described in the text, Operating Systems Design and
* Implementation, 3rd edition. See the sys_config.h in the full
* source code directory for information on alternatives omitted here.
*/
/*===========================================================================*
* This section contains user-settable parameters *
*===========================================================================*/
#define _MINIX_MACHINE _MACHINE_IBM_PC
#define _MACHINE_IBM_PC 1 /* any 8088 or 80x86-based system */
/* Word size in bytes (a constant equal to sizeof(int)). */
#if __ACK__ || __GNUC__
#define _WORD_SIZE _EM_WSIZE
#define _PTR_SIZE _EM_WSIZE
#endif
#define _NR_PROCS 64
#define _NR_SYS_PROCS 32
/* Set the CHIP type based on the machine selected. The symbol CHIP is actually
* indicative of more than just the CPU. For example, machines for which
* CHIP == INTEL are expected to have 8259A interrrupt controllers and the
* other properties of IBM PC/XT/AT/386 types machines in general. */
#define _CHIP_INTEL 1 /* CHIP type for PC, XT, AT, 386 and clones */
/* Set the FP_FORMAT type based on the machine selected, either hw or sw */
#define _FP_NONE 0 /* no floating point support */
#define _FP_IEEE 1 /* conform IEEE floating point standard */
#define _MINIX_CHIP _CHIP_INTEL
#define _MINIX_FP_FORMAT _FP_NONE
#ifndef _MINIX_MACHINE
error "In <minix/sys_config.h> please define _MINIX_MACHINE"
#endif
#ifndef _MINIX_CHIP
error "In <minix/sys_config.h> please define _MINIX_MACHINE to have a legal value"
#endif
#if (_MINIX_MACHINE == 0)
error "_MINIX_MACHINE has incorrect value (0)"
#endif
#endif /* _MINIX_SYS_CONFIG_H */
| {
"content_hash": "ae8247cc88a0e32d99a6a988b91a8653",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 82,
"avg_line_length": 34.629629629629626,
"alnum_prop": 0.6368983957219251,
"repo_name": "freudshow/learnc",
"id": "1c0c2f96f399f21ccc4e856dbd323b1caea24441",
"size": "1870",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Operating.Systems-Design.and.Implementation.3e/include/minix/sys_config.h",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "39610"
},
{
"name": "C",
"bytes": "1430865"
},
{
"name": "C++",
"bytes": "208661"
},
{
"name": "HTML",
"bytes": "8988"
},
{
"name": "Lex",
"bytes": "119"
},
{
"name": "Makefile",
"bytes": "13633"
}
],
"symlink_target": ""
} |
wswsh - [w]eb [s]ucks [w]ithout [sh]ell
---------------------------------------
wswsh is a static website script using shell. It means [w]eb [s]ucks
[w]ithout [sh]ell. Simple name for a simple script.
It has many advantages:
* Lightweight
* Only requires a shell + UNIX utilities
* Compatible with [ahrf](https://github.com/Ypnose/ahrf)
* Easily "hackable" with external scripts / interpreters
* Less than 140 LOC (without external layouts)
* Human readable configuration
*You can read another howto with examples [here](http://ywstd.fr/blog/2013/blogging-shell.html) (might be a good intro).*
How to use it?
--------------
Create a directory including the following files:
includes/layout
wswsh
wswsh.conf.default
You'll need a config file. Copy the file `wswsh.conf.default` to
`wswsh.conf`. Edit it according to your needs. The comments explain
almost everything.
A typical hierarchy contains a `src` directory, with your website inside
it.
.
├── includes/
│ └── layout
├── src/
│ ├── css/
│ │ └── style.css
│ ├── blog/
│ │ └── my_post.txt
│ ├── me/
│ │ └── john_doe.txt
│ └── foo/
│ └── baz/
│ └── this_is_a_test.txt
├── wswsh
└── wswsh.conf
Each directory in `src` will be reproduced in a new directory `dest`.
There is no default interpreter, only `cat` is called. It involves posts
written in HTML.
**wswsh** also supports [ahrf](https://github.com/Ypnose/ahrf).
When you're ready, launch `./wswsh "$PWD"`. Using the previous example,
we now have:
.
├── includes/
│ └── layout
├── dest/
│ ├── css/
│ │ └── style.css
│ ├── blog/
│ │ └── my_post.html
│ ├── me/
│ │ └── john_doe.html
│ └── foo/
│ └── baz/
│ └── this_is_a_test.html
├── src/
│ ├── css/
│ │ └── style.css
│ ├── blog/
│ │ └── my_post.txt
│ ├── me/
│ │ └── john_doe.txt
│ └── foo/
│ └── baz/
│ └── this_is_a_test.txt
├── wswsh
└── wswsh.conf
`dest` is your generated website. You can upload it anywhere.
Note(s)
-------
If you want to have the same `wswsh` executable for all your blogs
(assuming you have several websites), it's possible to put `wswsh` in
your `PATH`, instead of having a "redundant" file, in every directory.
The default behavior allows you to modify `wswsh` for your websites. So,
it's possible to write custom modifications per site.
The true "power" of `wswsh` resides in hooks launched at many different
steps. Those hooks are sourced inside `wswsh` env allowing you to use
inherited variables to launch all kind of specific actions.
An "interpreter" can be run if it's placed ouside your `PATH`. Write the
full path to the executable, within `wswsh.conf`:
WSH_INTERP="/home/foo/my_exec"
Why not provide a script sh compliant (or even bash)?
-----------------------------------------------------
Few months ago, I still wanted to write a `sh` compliant version but I
decided to drop that idea. At the moment, we are less than 5 people who
use it. Creating a second version would be a waste of time, especially
when the users already switched to `mksh`.
Maintaining two redundant versions isn't easy and I do not want to work
for nothing. If you're still interested, you're free to adapt to `sh`.
It shouldn't be complicated.
`awk` compatibility
-------------------
When I added awk regexes, one of my goal was to support `nawk`, `mawk`
and `gawk`. `gawk` is very common among Linux distributions, so I didn't
have the choice. The regexes were "created" on `mawk`. I tested the
compatibility and it worked flawlessly with the required implementations.
So, you can gain some precious seconds if you're brave enough to use
`nawk` or `mawk`.
Copyright
---------
Code created by Ypnose, under BSD (3-Clause) License.
Website(s)
----------
Powering:
* http://ywstd.fr/blog/
* http://savoirvivre.intraaktion.de
You decided to adopt wswsh for your website(s)? Please contact me. I
would be happy to add it in the README.
| {
"content_hash": "4a691ba76858fd9d92031f058173de81",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 121,
"avg_line_length": 28.848920863309353,
"alnum_prop": 0.6476309226932668,
"repo_name": "Ypnose/wswsh",
"id": "7f831555d4618d380fee93f1a7d718ec239df052",
"size": "4314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
//=================================================================================================
//=================================================================================================
#ifndef _BLAZE_MATH_SIMD_CONJ_H_
#define _BLAZE_MATH_SIMD_CONJ_H_
//*************************************************************************************************
// Includes
//*************************************************************************************************
#include <blaze/math/simd/BasicTypes.h>
#include <blaze/system/Inline.h>
#include <blaze/system/Vectorization.h>
namespace blaze {
//=================================================================================================
//
// 8-BIT INTEGRAL SIMD TYPES
//
//=================================================================================================
//*************************************************************************************************
/*!\brief Complex conjugate of a vector of 8-bit integral values.
// \ingroup simd
//
// \param a The vector of 8-bit integral values.
// \return The complex conjugate values.
*/
template< typename T > // Type of the operand
BLAZE_ALWAYS_INLINE const T conj( const SIMDi8<T>& a ) noexcept
{
return a;
}
//*************************************************************************************************
//=================================================================================================
//
// 16-BIT INTEGRAL SIMD TYPES
//
//=================================================================================================
//*************************************************************************************************
/*!\brief Complex conjugate of a vector of 16-bit integral values.
// \ingroup simd
//
// \param a The vector of 16-bit integral values.
// \return The complex conjugate values.
*/
template< typename T > // Type of the operand
BLAZE_ALWAYS_INLINE const T conj( const SIMDi16<T>& a ) noexcept
{
return a;
}
//*************************************************************************************************
//*************************************************************************************************
/*!\brief Complex conjugate of a vector of 16-bit integral complex values.
// \ingroup simd
//
// \param a The vector of 16-bit integral complex values.
// \return The complex conjugate values.
//
// This operation is only available for SSE2, AVX2, and AVX-512.
*/
BLAZE_ALWAYS_INLINE const SIMDcint16 conj( const SIMDcint16& a ) noexcept
#if BLAZE_AVX512BW_MODE
{
return _mm512_mask_sub_epi16( a.value, 0XAAAAAAAA, _mm512_setzero_si512(), a.value );
}
#elif BLAZE_AVX2_MODE
{
return _mm256_mullo_epi16( a.value, _mm256_set_epi16( -1, 1, -1, 1, -1, 1, -1, 1,
-1, 1, -1, 1, -1, 1, -1, 1 ) );
}
#elif BLAZE_SSE2_MODE
{
return _mm_mullo_epi16( a.value, _mm_set_epi16( -1, 1, -1, 1, -1, 1, -1, 1 ) );
}
#else
= delete;
#endif
//*************************************************************************************************
//=================================================================================================
//
// 32-BIT INTEGRAL SIMD TYPES
//
//=================================================================================================
//*************************************************************************************************
/*!\brief Complex conjugate of a vector of 32-bit integral values.
// \ingroup simd
//
// \param a The vector of 32-bit integral values.
// \return The complex conjugate values.
*/
template< typename T > // Type of the operand
BLAZE_ALWAYS_INLINE const T conj( const SIMDi32<T>& a ) noexcept
{
return a;
}
//*************************************************************************************************
//*************************************************************************************************
/*!\brief Complex conjugate of a vector of 32-bit integral complex values.
// \ingroup simd
//
// \param a The vector of 32-bit integral complex values.
// \return The complex conjugate values.
//
// This operation is only available for SSE4, AVX2, MIC, and AVX-512.
*/
BLAZE_ALWAYS_INLINE const SIMDcint32 conj( const SIMDcint32& a ) noexcept
#if BLAZE_AVX512F_MODE || BLAZE_MIC_MODE
{
return _mm512_mullo_epi32( a.value, _mm512_set_epi32( -1, 1, -1, 1, -1, 1, -1, 1,
-1, 1, -1, 1, -1, 1, -1, 1 ) );
}
#elif BLAZE_AVX2_MODE
{
return _mm256_mullo_epi32( a.value, _mm256_set_epi32( -1, 1, -1, 1, -1, 1, -1, 1 ) );
}
#elif BLAZE_SSE4_MODE
{
return _mm_mullo_epi32( a.value, _mm_set_epi32( -1, 1, -1, 1 ) );
}
#else
= delete;
#endif
//*************************************************************************************************
//=================================================================================================
//
// 64-BIT INTEGRAL SIMD TYPES
//
//=================================================================================================
//*************************************************************************************************
/*!\brief Complex conjugate of a vector of 64-bit integral values.
// \ingroup simd
//
// \param a The vector of 64-bit integral values.
// \return The complex conjugate values.
*/
template< typename T > // Type of the operand
BLAZE_ALWAYS_INLINE const T conj( const SIMDi64<T>& a ) noexcept
{
return a;
}
//*************************************************************************************************
//*************************************************************************************************
/*!\brief Complex conjugate of a vector of 64-bit integral complex values.
// \ingroup simd
//
// \param a The vector of 64-bit integral values.
// \return The complex conjugate values.
//
// This operation is only available for AVX-512.
*/
BLAZE_ALWAYS_INLINE const SIMDcint64 conj( const SIMDcint64& a ) noexcept
#if BLAZE_AVX512F_MODE
{
return _mm512_mask_sub_epi64( a.value, 0XAA, _mm512_setzero_si512(), a.value );
}
#else
= delete;
#endif
//*************************************************************************************************
//=================================================================================================
//
// 32-BIT FLOATING POINT SIMD TYPES
//
//=================================================================================================
//*************************************************************************************************
/*!\brief Complex conjugate of a vector of single precision floating point values.
// \ingroup simd
//
// \param a The vector of single precision floating point values.
// \return The complex conjugate values.
*/
template< typename T > // Type of the operand
BLAZE_ALWAYS_INLINE const T conj( const SIMDf32<T>& a ) noexcept
{
return (~a);
}
//*************************************************************************************************
//*************************************************************************************************
/*!\brief Complex conjugate of a vector of single precision complex values.
// \ingroup simd
//
// \param a The vector of single precision complex values.
// \return The complex conjugate values.
//
// This operation is only available for SSE2, AVX, and AVX-512.
*/
BLAZE_ALWAYS_INLINE const SIMDcfloat conj( const SIMDcfloat& a ) noexcept
#if BLAZE_AVX512F_MODE || BLAZE_MIC_MODE
{
return _mm512_mask_sub_ps( a.value, 0XAAAA, _mm512_setzero_ps(), a.value );
}
#elif BLAZE_AVX_MODE
{
return _mm256_mul_ps( a.value, _mm256_set_ps( -1.0F, 1.0F, -1.0F, 1.0F, -1.0F, 1.0F, -1.0F, 1.0F ) );
}
#elif BLAZE_SSE2_MODE
{
return _mm_mul_ps( a.value, _mm_set_ps( -1.0F, 1.0F, -1.0F, 1.0F ) );
}
#else
= delete;
#endif
//*************************************************************************************************
//=================================================================================================
//
// 64-BIT FLOATING POINT SIMD TYPES
//
//=================================================================================================
//*************************************************************************************************
/*!\brief Complex conjugate of a vector of double precision floating point values.
// \ingroup simd
//
// \param a The vector of double precision floating point values.
// \return The complex conjugate values.
*/
template< typename T > // Type of the operand
BLAZE_ALWAYS_INLINE const T conj( const SIMDf64<T>& a ) noexcept
{
return (~a);
}
//*************************************************************************************************
//*************************************************************************************************
/*!\brief Complex conjugate of a vector of double precision complex values.
// \ingroup simd
//
// \param a The vector of double precision complex values.
// \return The complex conjugate values.
//
// This operation is only available for SSE2, AVX, and AVX-512.
*/
BLAZE_ALWAYS_INLINE const SIMDcdouble conj( const SIMDcdouble& a ) noexcept
#if BLAZE_AVX512F_MODE || BLAZE_MIC_MODE
{
return _mm512_mask_sub_pd( a.value, 0XAA, _mm512_setzero_pd(), a.value );
}
#elif BLAZE_AVX_MODE
{
return _mm256_mul_pd( a.value, _mm256_set_pd( -1.0, 1.0, -1.0, 1.0 ) );
}
#elif BLAZE_SSE2_MODE
{
return _mm_mul_pd( a.value, _mm_set_pd( -1.0, 1.0 ) );
}
#else
= delete;
#endif
//*************************************************************************************************
} // namespace blaze
#endif
| {
"content_hash": "d85580b4c41b73b3f2cf5135c19a9809",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 104,
"avg_line_length": 33.313793103448276,
"alnum_prop": 0.40275333816375114,
"repo_name": "camillescott/boink",
"id": "0655779f472eaf5396e41f8a23a30466b4cc5b37",
"size": "11499",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "include/goetia/sketches/sketch/vec/blaze/blaze/math/simd/Conj.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "100250"
},
{
"name": "C++",
"bytes": "1054510"
},
{
"name": "CMake",
"bytes": "302273"
},
{
"name": "Jupyter Notebook",
"bytes": "17489756"
},
{
"name": "Python",
"bytes": "267582"
},
{
"name": "Shell",
"bytes": "98"
}
],
"symlink_target": ""
} |

[](https://github.com/mgechev/angular2-style-guide)
[](https://travis-ci.org/NathanWalker/angular2-seed-advanced)
[](http://opensource.org/licenses/MIT)
[](https://david-dm.org/NathanWalker/angular2-seed-advanced)
[](https://david-dm.org/NathanWalker/angular2-seed-advanced#info=devDependencies)
[](http://stackshare.io/NathanWalker/angular-2-seed-advanced)
[](https://waffle.io/NathanWalker/angular2-seed-advanced)
#### Considering [Angular 2](https://angular.io/) for a large project? Do you need potential i18n support? Enhanced testing support? Oh and building for multiple platforms too? Web, *native* Mobile (Android/iOS), and even Desktop (Mac, Windows and Linux)?
This is an **advanced** seed project for Angular 2 apps based on [Minko Gechev's](https://github.com/mgechev) [angular2-seed](https://github.com/mgechev/angular2-seed) that expands on all of it's great features to include core support for:
#### Integration with:
- [ngrx/store](https://github.com/ngrx/store) RxJS powered state management, inspired by **Redux**
- [ngrx-store-router](https://github.com/CodeSequence/ngrx-store-router) middleware for syncing state with Angular 2 Router.
- [ng2-translate](https://github.com/ocombe/ng2-translate) for i18n
- Usage is optional but on by default
- Up to you and your team how you want to utilize it. It can be easily removed if not needed.
- [angulartics2](https://github.com/angulartics/angulartics2) Vendor-agnostic analytics for Angular2 applications.
- Out of box support for [Segment](https://segment.com/)
- When using the seed, be sure to change your `write_key` [here](https://github.com/NathanWalker/angular2-seed-advanced/blob/master/src/index.html#L18)
- Can be changed to any vendor, [learn more here](https://github.com/angulartics/angulartics2#supported-providers)
- [lodash](https://lodash.com/) Helps reduce blocks of code down to single lines and enhances readability
- [NativeScript](https://www.nativescript.org/) cross platform mobile (w/ native UI) apps. [Setup instructions here](#nativescript-app).
- [Electron](http://electron.atom.io/) cross platform desktop apps (Mac, Windows and Linux). [Setup instructions here](#electron-app).
|  |
| :---: |
| *The zen of multiple platforms.* Chrome, Android and iPhone all running the same code. |
|  |
| :---: |
| *Programming Nirvana.* Mac and Windows desktop both running the same code. |
# Table of Contents
- [Enhanced development workflow](#enhanced-development-workflow)
- [Enhanced testing support options](#enhanced-testing-support-options)
- [Additional features coming soon...](#additional-features-coming-soon)
- [Prerequisites](#prerequisites)
- [Usage](#usage)
- [Progressive Web Apps](#progressive-web-apps)
- [NativeScript App](#nativescript-app)
- [Electron App](#electron-app)
- [Testing](#testing)
- [Web Configuration Options](#web-configuration-options)
- [How best to use for your project](#how-best-to-use-for-your-project)
- [Contributing](#contributing)
- [License](#license)
#### Enhanced development workflow
- Decorators for components which reduce boilerplate for common component setups
- Introduction of `frameworks` to help organize your code for different platforms:
- `app.framework`: your shared application architecture code (grow your app here or create new frameworks)
- `core.framework`: foundation layer (decorators and low-level services)
- `analytics.framework`: analytics provided by [Segment](https://segment.com/)
- Only reports data in **production** build
- `i18n.framework`: internationalization features
- `nativescript.framework`: [NativeScript](https://www.nativescript.org/) specific code
- `electron.framework`: [Electron](http://electron.atom.io/) specific code
- `test.framework`: test specific code providing conveniences to make testing your code easier and faster
#### Enhanced testing support options
- mocks for various services
- configurable provider blocks for easy test setup of common application providers
- tired of setting up similar providers over and over again for different tests?
- configure a reusable test provider which can be configured on a case-by-base basis
- see [example here](https://github.com/NathanWalker/angular2-seed-advanced/blob/master/src/client/app/frameworks/test.framework/providers/component.ts)
- watch [video explanation **coming soon**](https://github.com/NathanWalker/angular2-seed-advanced)
- helpers for end-to-end (e2e, integration) tests
- convenient shorthand to reduce test setup boilerplate and enhance speed of writing tests
- are your test cases buried by multiple import lines requiring you to scroll just to get to the substance of the test?
- removes noise allowing you to better focus on the substance of the test
- provides full intellisense support
- allows your team to add unique shorthands for various testing scenarios specific to your application needs
- see [example here **coming soon**](https://github.com/NathanWalker/angular2-seed-advanced)
- watch [video explanation **coming soon**](https://github.com/NathanWalker/angular2-seed-advanced)
- plays nice with `tslint` options like `"no-unused-variable": true` as the api hangs off a plain `Object` instead of globals
- what's the value of that you ask? have you ever isolated a test with `iit` or `ddescribe` but didn't import those or vice versa, used `iit` leaving an unused `it` now in your tests? yeah, `tslint` will be all over you :/
- avoids `unused` variable warnings altogether in tests since you are always using a valid key from the shorthand `Object`
- see [example here](https://github.com/NathanWalker/angular2-seed-advanced/blob/master/src/client/app/frameworks/test.framework/shorthand/ng2-jasmine.ts)
- watch [video explanation **coming soon**](https://github.com/NathanWalker/angular2-seed-advanced)
**Advice**: If your project is intended to target a single platform (i.e, web only), then [angular2-seed](https://github.com/mgechev/angular2-seed) is likely more than suitable for your needs. However if your project goals are to target multiple platforms (web, native mobile and native desktop), with powerful out of the box library support and highly configurable/flexible testing options, then you might want to keep reading.
#### Additional features coming soon...
- [ ] provider for LocalStorage (abstraction for IndexedDB, WebSQL, localStorage, perhaps a port of localForage)
- [ ] sophisticated setup for Service Worker
### Prerequisites
* node v5.x.x or higher and npm 3 or higher.
* To run the NativeScript app:
```
npm install -g nativescript
npm install -g typescript
```
## Usage
```bash
git clone --depth 1 https://github.com/NathanWalker/angular2-seed-advanced.git
cd angular2-seed-advanced
# install the project's dependencies
npm install
# watches your files and uses livereload by default
npm start
# api document for the app
npm run serve.docs
# dev build
npm run build.dev
# prod build
npm run build.prod
```
## NativeScript App
#### Setup
```
npm install -g nativescript
```
#### Dev Workflow
You can make changes to files in `src/client` or `nativescript` folders. A symbolic link exists between the web `src/client` and the `nativescript` folder so changes in either location are mirrored because they are the same directory inside.
Create `.tns.html` NativeScript view files for every web component view file you have. You will see an example of the `app.component.html` as a [NativeScript view file here](https://github.com/NathanWalker/angular2-seed-advanced/blob/master/src/client/app/components/app/app.component.tns.html).
#### Run
```
npm run start.ios // iOS simulator
npm run start.livesync.ios // iOS with livesync dev workflow
// or...
npm run start.android // Android emulator*
npm run start.livesync.android // Android with livesync dev workflow
```
* Requires an image setup via AVD Manager. [Learn more here](http://developer.android.com/intl/zh-tw/tools/devices/managing-avds.html) and [here](https://github.com/NativeScript/nativescript-cli#the-commands).
## Electron App
#### Develop
* Mac:
```
npm run start.desktop
```
* Windows:
```
npm run start.desktop.windows
```
#### Release: Package Electron App for Mac, Windows or Linux
* Mac:
```
npm run build.desktop.mac
```
* Windows:
```
npm run build.desktop.windows
```
* Linux:
```
npm run build.desktop.linux
```
## Testing
```bash
npm test
# Debug - In two different shell windows
npm run build.test.watch # 1st window
npm run karma.start # 2nd window
# code coverage (istanbul)
# auto-generated at the end of `npm test`
# view coverage report:
npm run serve.coverage
# e2e (aka. end-to-end, integration) - In three different shell windows
# Make sure you don't have a global instance of Protractor
# npm run webdriver-update <- You will need to run this the first time
npm run webdriver-start
npm run serve.e2e
npm run e2e
# e2e live mode - Protractor interactive mode
# Instead of last command above, you can use:
npm run e2e.live
```
You can learn more about [Protractor Interactive Mode here](https://github.com/angular/protractor/blob/master/docs/debugging.md#testing-out-protractor-interactively)
## Web Configuration Options
Default application server configuration
```javascript
var PORT = 5555;
var LIVE_RELOAD_PORT = 4002;
var DOCS_PORT = 4003;
var APP_BASE = '/';
```
Configure at runtime
```bash
npm start -- --port 8080 --reload-port 4000 --base /my-app/
```
## How best to use for your project
#### Setup
1. Download a zip of the seed. *This allows you to manually setup origin/upstream*
2. `git remote add origin ....your private repo....`
3. `git remote add upstream https://github.com/NathanWalker/angular2-seed-advanced.git`
4. Create a new `framework` for your application in `src/client/app/frameworks` to build your codebase out. Say your app is called `AwesomeApp`, then create `awesomeapp.framework` and start building out all your components and services in there. Create other frameworks as you see fit to organize.
5. If you don't want an integration that comes out of box with this seed; for example. let's say you don't want to use i18n. Then just delete the `i18n.framework`, remove `ng2-translate` as dependency root `package.json` and `nativescript/package.json`. Then remove the references to `i18n` throughout.
6. Remove `src/components` since those are just samples and create a new folder for your components, let's say `src/pages`. It's not absolutely necessary to remove and create a new differently named folder for your components but it might make merging in upstream changes a bit smoother.
You can read more about [configuring a remote for a fork here](https://help.github.com/articles/configuring-a-remote-for-a-fork/)
#### Merging latest upstream changes
1. `git fetch upstream`
2. `git merge upstream/master` *you could rebase, but it wouldn't be worth it as the conflict resolution can often be more painful if there are conflicts*
3. Handle any conflicts to get latest upstream into your application. If you removed `src/components` as mentioned above, they may show back up when merging in latest upstream. You can just remove the folder again.
4. Continue building your app.
You can read more about [syncing a fork here](https://help.github.com/articles/syncing-a-fork/).
If you have any suggestions to this workflow, please post [here](https://github.com/NathanWalker/angular2-seed-advanced/issues).
## Contributing
Please see the [CONTRIBUTING](https://github.com/NathanWalker/angular2-seed-advanced/blob/master/CONTRIBUTING.md) file for guidelines.
# Awesome Contributors
[<img alt="mgechev" src="https://avatars.githubusercontent.com/u/455023?v=3&s=117" width="117">](https://github.com/mgechev)[<img alt="ludohenin" src="https://avatars.githubusercontent.com/u/1011516?v=3&s=117" width="117">](https://github.com/ludohenin)[<img alt="NathanWalker" src="https://avatars.githubusercontent.com/u/457187?v=3&s=117" width="117">](https://github.com/NathanWalker)[<img alt="d3viant0ne" src="https://avatars.githubusercontent.com/u/8420490?v=3&s=117" width="117">](https://github.com/d3viant0ne)[<img alt="tarlepp" src="https://avatars.githubusercontent.com/u/595561?v=3&s=117" width="117">](https://github.com/tarlepp)[<img alt="nareshbhatia" src="https://avatars.githubusercontent.com/u/1220198?v=3&s=117" width="117">](https://github.com/nareshbhatia)
[<img alt="jesperronn" src="https://avatars.githubusercontent.com/u/6267?v=3&s=117" width="117">](https://github.com/jesperronn)[<img alt="Shyam-Chen" src="https://avatars.githubusercontent.com/u/13535256?v=3&s=117" width="117">](https://github.com/Shyam-Chen)[<img alt="TheDonDope" src="https://avatars.githubusercontent.com/u/1188033?v=3&s=117" width="117">](https://github.com/TheDonDope)[<img alt="aboeglin" src="https://avatars.githubusercontent.com/u/8297302?v=3&s=117" width="117">](https://github.com/aboeglin)[<img alt="the-ult" src="https://avatars.githubusercontent.com/u/4863062?v=3&s=117" width="117">](https://github.com/the-ult)[<img alt="gkalpak" src="https://avatars.githubusercontent.com/u/8604205?v=3&s=117" width="117">](https://github.com/gkalpak)
[<img alt="ryzy" src="https://avatars.githubusercontent.com/u/994940?v=3&s=117" width="117">](https://github.com/ryzy)[<img alt="pgrzeszczak" src="https://avatars.githubusercontent.com/u/3300099?v=3&s=117" width="117">](https://github.com/pgrzeszczak)[<img alt="natarajanmca11" src="https://avatars.githubusercontent.com/u/9244766?v=3&s=117" width="117">](https://github.com/natarajanmca11)[<img alt="njs50" src="https://avatars.githubusercontent.com/u/55112?v=3&s=117" width="117">](https://github.com/njs50)[<img alt="JakePartusch" src="https://avatars.githubusercontent.com/u/6424140?v=3&s=117" width="117">](https://github.com/JakePartusch)[<img alt="domfarolino" src="https://avatars.githubusercontent.com/u/9669289?v=3&s=117" width="117">](https://github.com/domfarolino)
[<img alt="larsthorup" src="https://avatars.githubusercontent.com/u/1202959?v=3&s=117" width="117">](https://github.com/larsthorup)[<img alt="LuxDie" src="https://avatars.githubusercontent.com/u/12536671?v=3&s=117" width="117">](https://github.com/LuxDie)[<img alt="jerryorta-dev" src="https://avatars.githubusercontent.com/u/341155?v=3&s=117" width="117">](https://github.com/jerryorta-dev)[<img alt="tsm91" src="https://avatars.githubusercontent.com/u/4459551?v=3&s=117" width="117">](https://github.com/tsm91)[<img alt="e-oz" src="https://avatars.githubusercontent.com/u/526352?v=3&s=117" width="117">](https://github.com/e-oz)[<img alt="JohnCashmore" src="https://avatars.githubusercontent.com/u/2050794?v=3&s=117" width="117">](https://github.com/JohnCashmore)
[<img alt="ouq77" src="https://avatars.githubusercontent.com/u/1796191?v=3&s=117" width="117">](https://github.com/ouq77)[<img alt="devanp92" src="https://avatars.githubusercontent.com/u/4533277?v=3&s=117" width="117">](https://github.com/devanp92)[<img alt="hAWKdv" src="https://avatars.githubusercontent.com/u/4449497?v=3&s=117" width="117">](https://github.com/hAWKdv)[<img alt="c-ice" src="https://avatars.githubusercontent.com/u/347238?v=3&s=117" width="117">](https://github.com/c-ice)[<img alt="markharding" src="https://avatars.githubusercontent.com/u/851436?v=3&s=117" width="117">](https://github.com/markharding)[<img alt="gotenxds" src="https://avatars.githubusercontent.com/u/3519520?v=3&s=117" width="117">](https://github.com/gotenxds)
[<img alt="evanplaice" src="https://avatars.githubusercontent.com/u/303159?v=3&s=117" width="117">](https://github.com/evanplaice)[<img alt="Nightapes" src="https://avatars.githubusercontent.com/u/15911153?v=3&s=117" width="117">](https://github.com/Nightapes)[<img alt="TuiKiken" src="https://avatars.githubusercontent.com/u/959821?v=3&s=117" width="117">](https://github.com/TuiKiken)[<img alt="juristr" src="https://avatars.githubusercontent.com/u/542458?v=3&s=117" width="117">](https://github.com/juristr)[<img alt="ip512" src="https://avatars.githubusercontent.com/u/1699735?v=3&s=117" width="117">](https://github.com/ip512)[<img alt="Yonet" src="https://avatars.githubusercontent.com/u/3523671?v=3&s=117" width="117">](https://github.com/Yonet)
[<img alt="ojacquemart" src="https://avatars.githubusercontent.com/u/1189345?v=3&s=117" width="117">](https://github.com/ojacquemart)[<img alt="mjwwit" src="https://avatars.githubusercontent.com/u/4455124?v=3&s=117" width="117">](https://github.com/mjwwit)[<img alt="nulldev07" src="https://avatars.githubusercontent.com/u/2115712?v=3&s=117" width="117">](https://github.com/nulldev07)[<img alt="allenhwkim" src="https://avatars.githubusercontent.com/u/1437734?v=3&s=117" width="117">](https://github.com/allenhwkim)[<img alt="yassirh" src="https://avatars.githubusercontent.com/u/4649139?v=3&s=117" width="117">](https://github.com/yassirh)[<img alt="brendanbenson" src="https://avatars.githubusercontent.com/u/866866?v=3&s=117" width="117">](https://github.com/brendanbenson)
[<img alt="briantopping" src="https://avatars.githubusercontent.com/u/158115?v=3&s=117" width="117">](https://github.com/briantopping)[<img alt="cadriel" src="https://avatars.githubusercontent.com/u/205520?v=3&s=117" width="117">](https://github.com/cadriel)[<img alt="dszymczuk" src="https://avatars.githubusercontent.com/u/539352?v=3&s=117" width="117">](https://github.com/dszymczuk)[<img alt="dstockhammer" src="https://avatars.githubusercontent.com/u/1156637?v=3&s=117" width="117">](https://github.com/dstockhammer)[<img alt="dwido" src="https://avatars.githubusercontent.com/u/154235?v=3&s=117" width="117">](https://github.com/dwido)[<img alt="totev" src="https://avatars.githubusercontent.com/u/4454638?v=3&s=117" width="117">](https://github.com/totev)
[<img alt="koodikindral" src="https://avatars.githubusercontent.com/u/6285484?v=3&s=117" width="117">](https://github.com/koodikindral)[<img alt="Green-Cat" src="https://avatars.githubusercontent.com/u/3328823?v=3&s=117" width="117">](https://github.com/Green-Cat)[<img alt="alexweber" src="https://avatars.githubusercontent.com/u/14409?v=3&s=117" width="117">](https://github.com/alexweber)[<img alt="hpinsley" src="https://avatars.githubusercontent.com/u/750098?v=3&s=117" width="117">](https://github.com/hpinsley)[<img alt="jeffbcross" src="https://avatars.githubusercontent.com/u/463703?v=3&s=117" width="117">](https://github.com/jeffbcross)[<img alt="Jimmysh" src="https://avatars.githubusercontent.com/u/230652?v=3&s=117" width="117">](https://github.com/Jimmysh)
[<img alt="johnjelinek" src="https://avatars.githubusercontent.com/u/873610?v=3&s=117" width="117">](https://github.com/johnjelinek)[<img alt="justindujardin" src="https://avatars.githubusercontent.com/u/101493?v=3&s=117" width="117">](https://github.com/justindujardin)[<img alt="lihaibh" src="https://avatars.githubusercontent.com/u/4681233?v=3&s=117" width="117">](https://github.com/lihaibh)[<img alt="Brooooooklyn" src="https://avatars.githubusercontent.com/u/3468483?v=3&s=117" width="117">](https://github.com/Brooooooklyn)[<img alt="tandu" src="https://avatars.githubusercontent.com/u/273313?v=3&s=117" width="117">](https://github.com/tandu)[<img alt="inkidotcom" src="https://avatars.githubusercontent.com/u/100466?v=3&s=117" width="117">](https://github.com/inkidotcom)
[<img alt="amaltsev" src="https://avatars.githubusercontent.com/u/2480962?v=3&s=117" width="117">](https://github.com/amaltsev)[<img alt="ocombe" src="https://avatars.githubusercontent.com/u/265378?v=3&s=117" width="117">](https://github.com/ocombe)[<img alt="gdi2290" src="https://avatars.githubusercontent.com/u/1016365?v=3&s=117" width="117">](https://github.com/gdi2290)[<img alt="typekpb" src="https://avatars.githubusercontent.com/u/499820?v=3&s=117" width="117">](https://github.com/typekpb)[<img alt="philipooo" src="https://avatars.githubusercontent.com/u/1702399?v=3&s=117" width="117">](https://github.com/philipooo)[<img alt="pidupuis" src="https://avatars.githubusercontent.com/u/2828353?v=3&s=117" width="117">](https://github.com/pidupuis)
[<img alt="redian" src="https://avatars.githubusercontent.com/u/816941?v=3&s=117" width="117">](https://github.com/redian)[<img alt="Bigous" src="https://avatars.githubusercontent.com/u/6886560?v=3&s=117" width="117">](https://github.com/Bigous)[<img alt="robbatt" src="https://avatars.githubusercontent.com/u/1379424?v=3&s=117" width="117">](https://github.com/robbatt)[<img alt="robertpenner" src="https://avatars.githubusercontent.com/u/79827?v=3&s=117" width="117">](https://github.com/robertpenner)[<img alt="sclausen" src="https://avatars.githubusercontent.com/u/916076?v=3&s=117" width="117">](https://github.com/sclausen)[<img alt="heavymery" src="https://avatars.githubusercontent.com/u/3417123?v=3&s=117" width="117">](https://github.com/heavymery)
[<img alt="tapas4java" src="https://avatars.githubusercontent.com/u/2254963?v=3&s=117" width="117">](https://github.com/tapas4java)[<img alt="gitter-badger" src="https://avatars.githubusercontent.com/u/8518239?v=3&s=117" width="117">](https://github.com/gitter-badger)[<img alt="blackheart01" src="https://avatars.githubusercontent.com/u/1414277?v=3&s=117" width="117">](https://github.com/blackheart01)[<img alt="butterfieldcons" src="https://avatars.githubusercontent.com/u/12204784?v=3&s=117" width="117">](https://github.com/butterfieldcons)[<img alt="jgolla" src="https://avatars.githubusercontent.com/u/1542447?v=3&s=117" width="117">](https://github.com/jgolla)[<img alt="sebfag" src="https://avatars.githubusercontent.com/u/6400825?v=3&s=117" width="117">](https://github.com/sebfag)
[<img alt="ultrasonicsoft" src="https://avatars.githubusercontent.com/u/4145169?v=3&s=117" width="117">](https://github.com/ultrasonicsoft)[<img alt="sonicparke" src="https://avatars.githubusercontent.com/u/1139721?v=3&s=117" width="117">](https://github.com/sonicparke)
## License
MIT | {
"content_hash": "13b9d734a05ed4abc28c3f2a90d4e205",
"timestamp": "",
"source": "github",
"line_count": 288,
"max_line_length": 791,
"avg_line_length": 79.90277777777777,
"alnum_prop": 0.7453502520424127,
"repo_name": "JohnMagtoto/dev-bookmark",
"id": "29fed98a3e68b3fcdb7d698550a7774d88993e97",
"size": "23012",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3755"
},
{
"name": "HTML",
"bytes": "8601"
},
{
"name": "JavaScript",
"bytes": "8894"
},
{
"name": "TypeScript",
"bytes": "86256"
}
],
"symlink_target": ""
} |
const unquote = require('unquote')
const systemFontKeywords = require('css-system-font-keywords')
const fontWeightKeywords = require('css-font-weight-keywords')
const fontStyleKeywords = require('css-font-style-keywords')
const fontStretchKeywords = require('css-font-stretch-keywords')
import * as cssListHelpers from 'css-list-helpers'
import * as helpers from './helpers'
export interface ISystemFont {
system: string
}
export interface IFont {
style?: string
variant?: string
weight?: string
stretch?: string
size?: string
lineHeight?: string | number
family?: string[]
}
const errorPrefix = '[parse-css-font]'
const firstDeclarations: ['style', 'weight', 'stretch', 'variant'] = [
'style',
'weight',
'stretch',
'variant',
]
export default function parseCSSFont(value: string) {
if (typeof value !== 'string') {
throw error('Expected a string.', TypeError)
}
if (value === '') {
throw error('Cannot parse an empty string.')
}
if (systemFontKeywords.indexOf(value) !== -1) {
return { system: value } as ISystemFont
}
const font: IFont = {
lineHeight: 'normal',
stretch: '',
style: '',
variant: '',
weight: '',
}
const consumers = [style, weight, stretch, variant]
const tokens = cssListHelpers.splitBySpaces(value)
nextToken: for (
let token = tokens.shift();
!!token;
token = tokens.shift()
) {
if (token === 'normal') {
continue
}
for (const consume of consumers) {
if (consume(token)) {
continue nextToken
}
}
const parts = cssListHelpers.split(token, ['/'])
font.size = parts[0]
if (!!parts[1]) {
font.lineHeight = parseLineHeight(parts[1])
} else if (tokens[0] === '/') {
tokens.shift()
font.lineHeight = parseLineHeight(tokens.shift() as string)
}
if (!tokens.length) {
throw error('Missing required font-family.')
}
font.family = cssListHelpers.splitByCommas(tokens.join(' ')).map(unquote)
for (const name of firstDeclarations) {
font[name] = font[name] || 'normal'
}
return font
}
throw error('Missing required font-size.')
function style(token: string) {
if (fontStyleKeywords.indexOf(token) === -1) {
return
}
if (font.style) {
throw error('Font style already defined.')
}
return (font.style = token)
}
function weight(token: string) {
if (fontWeightKeywords.indexOf(token) === -1) {
return
}
if (font.weight) {
throw error('Font weight already defined.')
}
return (font.weight = token)
}
function stretch(token: string) {
if (fontStretchKeywords.indexOf(token) === -1) {
return
}
if (font.stretch) {
throw error('Font stretch already defined.')
}
return (font.stretch = token)
}
function variant(token: string) {
return (
!helpers.isSize(token) &&
(font.variant = font.variant ? [font.variant, token].join(' ') : token)
)
}
}
function error(
message: string,
ErrorType: typeof Error | typeof TypeError = Error,
) {
return new ErrorType(`${errorPrefix} ${message}`)
}
function parseLineHeight(value: string) {
const parsed = parseFloat(value)
if (parsed.toString() === value) {
return parsed
}
return value
}
// @ts-ignore
module.exports = Object.assign(exports.default, exports)
| {
"content_hash": "cfa5e4fcb4845d7701a5864b0a9dd84a",
"timestamp": "",
"source": "github",
"line_count": 146,
"max_line_length": 75,
"avg_line_length": 21.89041095890411,
"alnum_prop": 0.6664580725907384,
"repo_name": "jedmao/parse-css-font",
"id": "268111e92a47ed08c91f0b1c905ab7f62532a960",
"size": "3198",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/index.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "TypeScript",
"bytes": "8186"
}
],
"symlink_target": ""
} |
package ro.uaic.info.nlptools.corpus;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import java.util.*;
public class InmemorySentence implements INlpSentence {
private List<SpanAnnotation> spanAnnotations = new ArrayList<>();
private INlpCorpus parentCorpus;
private int sentenceIndex;
private int startTokenIndex;
public Map<String, String> getFeatures() {
return features;
}
protected Map<String, String> features;
List<Token> tokens;
@Override
public boolean hasInputAnnotations() {
return !spanAnnotations.isEmpty();
}
@Override
public INlpCorpus getParentCorpus() {
return parentCorpus;
}
@Override
public int getSentenceIndexInCorpus() {
return sentenceIndex;
}
@Override
public int getTokenCount() {
return tokens.size();
}
@Override
public Token getToken(int indexInSentence) {
return tokens.get(indexInSentence);
}
@Override
public int getFirstTokenIndexInCorpus() {
return startTokenIndex;
}
InmemorySentence(Node node, Map<String, Token> tokensToRefIds) {
tokens = new ArrayList<>();
features = new TreeMap<>();
for (int i = 0; i < node.getAttributes().getLength(); i++) {
features.put(node.getAttributes().item(i).getNodeName(), node.getAttributes().item(i).getNodeValue());
}
NodeList allChildNodes = ((Element) node).getElementsByTagName("*");
Map<Node, SpanAnnotation> xmlNodesToAnnotations = new HashMap<>(); //used for inline spanAnnotations
for (int i = 0; i < allChildNodes.getLength(); i++) {
Node childNode = allChildNodes.item(i);
if (childNode.getNodeType() == Node.TEXT_NODE) {
continue;
}
NodeList grandChildrenNodes = childNode.getChildNodes();
if (grandChildrenNodes.getLength() > 1 || (grandChildrenNodes.getLength() == 1 && grandChildrenNodes.item(0).getNodeType() != Node.TEXT_NODE))// if not leaf node
continue;
Token token = new Token(childNode);
String refId = token.getFeatures().get("GGS:RefId");
if (refId != null) {
tokensToRefIds.put(refId, token);
token.getFeatures().remove("GGS:RefId");
}
addToken(token);
Node parent = childNode.getParentNode();
int annotationsSoFar = spanAnnotations.size();
while (parent != node) {
SpanAnnotation annotation = xmlNodesToAnnotations.get(parent);
if (annotation == null) {
//create new input annotation
annotation = new SpanAnnotation();
annotation.setPreferInlineRepresentation(true);
annotation.setStartTokenIndex(tokens.size() - 1);
annotation.setEndTokenIndex(annotation.getStartTokenIndex() - 1);
annotation.setSentence(this);
annotation.setName(parent.getNodeName());
xmlNodesToAnnotations.put(parent, annotation);
for (int j = 0; j < parent.getAttributes().getLength(); j++)
annotation.features.put(parent.getAttributes().item(j).getNodeName(), parent.getAttributes().item(j).getNodeValue());
this.spanAnnotations.add(annotationsSoFar, annotation);
}
annotation.setEndTokenIndex(annotation.getEndTokenIndex() + 1);
token.parentAnnotations.add(0, annotation);
parent = parent.getParentNode();
}
}
}
public InmemorySentence() {
tokens = new ArrayList<>();
features = new TreeMap<>();
}
public String toString() {
return features.toString() + "\n" + tokens.toString();
}
public void addTokens(List<Token> tokens) {
for (Token w : tokens) {
addToken(w);
}
}
public void addToken(Token token) {
token.indexInSentence = tokens.size();
token.parentSentence = this;
if (getParentCorpus() != null)
((InmemoryCorpus)getParentCorpus()).allTokensDirty = true;
tokens.add(token);
}
public List<SpanAnnotation> getSpanAnnotations() {
return spanAnnotations;
}
public void setSentenceIndex(int sentenceIndex) {
this.sentenceIndex = sentenceIndex;
}
public void setParentCorpus(InmemoryCorpus parentCorpus) {
this.parentCorpus = parentCorpus;
}
public void setStartTokenIndex(int startTokenIndex) {
this.startTokenIndex = startTokenIndex;
}
}
| {
"content_hash": "50397cd1d0b95e621f6cc407f5c6e415",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 173,
"avg_line_length": 33.0625,
"alnum_prop": 0.6032346145767696,
"repo_name": "radsimu/UaicNlpToolkit",
"id": "229df93c2ab3ab7f8af3ee3e146d1dbeb94caf9e",
"size": "5507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Modules/CorpusTools/src/main/java/ro/uaic/info/nlptools/corpus/InmemorySentence.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "878435"
},
{
"name": "JavaScript",
"bytes": "2607"
}
],
"symlink_target": ""
} |
#ifndef __PSCI_H__
#define __PSCI_H__
/*******************************************************************************
* Defines for runtime services func ids
******************************************************************************/
#define PSCI_VERSION 0x84000000
#define PSCI_CPU_SUSPEND_AARCH32 0x84000001
#define PSCI_CPU_SUSPEND_AARCH64 0xc4000001
#define PSCI_CPU_OFF 0x84000002
#define PSCI_CPU_ON_AARCH32 0x84000003
#define PSCI_CPU_ON_AARCH64 0xc4000003
#define PSCI_AFFINITY_INFO_AARCH32 0x84000004
#define PSCI_AFFINITY_INFO_AARCH64 0xc4000004
#define PSCI_MIG_AARCH32 0x84000005
#define PSCI_MIG_AARCH64 0xc4000005
#define PSCI_MIG_INFO_TYPE 0x84000006
#define PSCI_MIG_INFO_UP_CPU_AARCH32 0x84000007
#define PSCI_MIG_INFO_UP_CPU_AARCH64 0xc4000007
#define PSCI_SYSTEM_OFF 0x84000008
#define PSCI_SYSTEM_RESET 0x84000009
/*
* Number of PSCI calls (above) implemented. System off and reset aren't
* implemented as yet
*/
#define PSCI_NUM_CALLS 13
/*******************************************************************************
* PSCI Migrate and friends
******************************************************************************/
#define PSCI_TOS_UP_MIG_CAP 0
#define PSCI_TOS_NOT_UP_MIG_CAP 1
#define PSCI_TOS_NOT_PRESENT_MP 2
/*******************************************************************************
* PSCI CPU_SUSPEND 'power_state' parameter specific defines
******************************************************************************/
#define PSTATE_ID_SHIFT 0
#define PSTATE_TYPE_SHIFT 16
#define PSTATE_AFF_LVL_SHIFT 24
#define PSTATE_ID_MASK 0xffff
#define PSTATE_TYPE_MASK 0x1
#define PSTATE_AFF_LVL_MASK 0x3
#define PSTATE_VALID_MASK 0xFCFE0000
#define PSTATE_TYPE_STANDBY 0x0
#define PSTATE_TYPE_POWERDOWN 0x1
#define psci_get_pstate_id(pstate) (pstate >> PSTATE_ID_SHIFT) & \
PSTATE_ID_MASK
#define psci_get_pstate_type(pstate) (pstate >> PSTATE_TYPE_SHIFT) & \
PSTATE_TYPE_MASK
#define psci_get_pstate_afflvl(pstate) (pstate >> PSTATE_AFF_LVL_SHIFT) & \
PSTATE_AFF_LVL_MASK
/*******************************************************************************
* PSCI version
******************************************************************************/
#define PSCI_MAJOR_VER (0 << 16)
#define PSCI_MINOR_VER 0x2
/*******************************************************************************
* PSCI error codes
******************************************************************************/
#define PSCI_E_SUCCESS 0
#define PSCI_E_NOT_SUPPORTED -1
#define PSCI_E_INVALID_PARAMS -2
#define PSCI_E_DENIED -3
#define PSCI_E_ALREADY_ON -4
#define PSCI_E_ON_PENDING -5
#define PSCI_E_INTERN_FAIL -6
#define PSCI_E_NOT_PRESENT -7
#define PSCI_E_DISABLED -8
/*******************************************************************************
* PSCI affinity state related constants. An affinity instance could be present
* or absent physically to cater for asymmetric topologies. If present then it
* could in one of the 4 further defined states.
******************************************************************************/
#define PSCI_STATE_SHIFT 1
#define PSCI_STATE_MASK 0xff
#define PSCI_AFF_ABSENT 0x0
#define PSCI_AFF_PRESENT 0x1
#define PSCI_STATE_ON 0x0
#define PSCI_STATE_OFF 0x1
#define PSCI_STATE_ON_PENDING 0x2
#define PSCI_STATE_SUSPEND 0x3
#define PSCI_INVALID_DATA -1
#define get_phys_state(x) (x != PSCI_STATE_ON ? \
PSCI_STATE_OFF : PSCI_STATE_ON)
#define psci_validate_power_state(pstate) (pstate & PSTATE_VALID_MASK)
/* Number of affinity instances whose state this psci imp. can track */
#define PSCI_NUM_AFFS 32ull
#ifndef __ASSEMBLY__
/*******************************************************************************
* Structure populated by platform specific code to export routines which
* perform common low level pm functions
******************************************************************************/
typedef struct {
int (*affinst_standby)(unsigned int);
int (*affinst_on)(unsigned long,
unsigned long,
unsigned long,
unsigned int,
unsigned int);
int (*affinst_off)(unsigned long, unsigned int, unsigned int);
int (*affinst_suspend)(unsigned long,
unsigned long,
unsigned long,
unsigned int,
unsigned int);
int (*affinst_on_finish)(unsigned long, unsigned int, unsigned int);
int (*affinst_suspend_finish)(unsigned long,
unsigned int,
unsigned int);
} plat_pm_ops;
/*******************************************************************************
* Optional structure populated by the Secure Payload Dispatcher to be given a
* chance to perform any bookkeeping before PSCI executes a power mgmt.
* operation. It also allows PSCI to determine certain properties of the SP e.g.
* migrate capability etc.
******************************************************************************/
typedef struct {
void (*svc_on)(uint64_t target_cpu);
int32_t (*svc_off)(uint64_t __unused);
void (*svc_suspend)(uint64_t power_state);
void (*svc_on_finish)(uint64_t __unused);
void (*svc_suspend_finish)(uint64_t suspend_level);
void (*svc_migrate)(uint64_t __unused1, uint64_t __unused2);
int32_t (*svc_migrate_info)(uint64_t *__unused);
} spd_pm_ops;
/*******************************************************************************
* Function & Data prototypes
******************************************************************************/
extern unsigned int psci_version(void);
extern int __psci_cpu_suspend(unsigned int, unsigned long, unsigned long);
extern int __psci_cpu_off(void);
extern int psci_affinity_info(unsigned long, unsigned int);
extern int psci_migrate(unsigned int);
extern unsigned int psci_migrate_info_type(void);
extern unsigned long psci_migrate_info_up_cpu(void);
extern void psci_system_off(void);
extern void psci_system_reset(void);
extern int psci_cpu_on(unsigned long,
unsigned long,
unsigned long);
extern void psci_aff_on_finish_entry(void);
extern void psci_aff_suspend_finish_entry(void);
extern void psci_register_spd_pm_hook(const spd_pm_ops *);
extern int psci_get_suspend_stateid(unsigned long mpidr);
extern int psci_get_suspend_afflvl(unsigned long mpidr);
#endif /*__ASSEMBLY__*/
#endif /* __PSCI_H__ */
| {
"content_hash": "7bbec130b62be234e6e6e61b9927977b",
"timestamp": "",
"source": "github",
"line_count": 169,
"max_line_length": 80,
"avg_line_length": 37.34319526627219,
"alnum_prop": 0.5637775312945651,
"repo_name": "soby-mathew/arm-trusted-firmware",
"id": "e29079372f6b255277034c723af190a7b183efde",
"size": "7872",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "include/psci.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using Stripe;
namespace SaaSPro.Infrastructure.Payment
{
public class StripeAdapter:IStripeService
{
public IEnumerable<StripePlan> GetAvailablePlans()
{
var planService = new StripePlanService();
return planService.List();
}
public StripeCustomer GetCustomer(string id)
{
var customerService = new StripeCustomerService();
return customerService.Get(id);
}
public StripePlan GetCustomerPlan(string customerId)
{
var customerService = new StripeCustomerService();
var customer = customerService.Get(customerId);
if (customer.StripeSubscriptionList != null)
return customer.StripeSubscriptionList.Data.Any()
? customer.StripeSubscriptionList.Data.First().StripePlan
: null;
else
return null;
}
public IEnumerable<StripeCharge> GetCustomerPayments(int limit, string customerId)
{
var chargeService = new StripeChargeService();
return chargeService.List(new StripeChargeListOptions
{
Limit = limit,
CustomerId = customerId
});
}
public StripeCharge GetCustomerPayment(string paymentId)
{
var chargeService = new StripeChargeService();
return chargeService.Get(paymentId);
}
public StripeCustomer CreateCustomer(string name, string email)
{
var newCustomer = new StripeCustomerCreateOptions
{
Email = email,
Description = $"{name} ({email})"
};
var customerService = new StripeCustomerService();
var stripeCustomer = customerService.Create(newCustomer);
return stripeCustomer;
}
public string AssignCustomerPlan(string customerId, string planId, string cardNumber,
string cardCvc, int cardExpirationMonth, int cardExpirationYear)
{
// Create token
var token = CreateToken(cardNumber, cardCvc, cardExpirationMonth, cardExpirationYear);
var subscriptionService = new StripeSubscriptionService();
var subscription = subscriptionService.List(customerId).FirstOrDefault();
if (subscription == null)
{
var options = new StripeSubscriptionCreateOptions
{
Card = new StripeCreditCardOptions
{
TokenId = token.Id
},
PlanId = planId
};
subscription = subscriptionService.Create(customerId, planId, options);
}
else
{
var options = new StripeSubscriptionUpdateOptions
{
Card = new StripeCreditCardOptions
{
TokenId = token.Id
},
PlanId = planId
};
subscription = subscriptionService.Update(customerId, subscription.Id, options);
}
return subscription.Status;
}
private static StripeToken CreateToken(string cardNumber, string cardCvc, int cardExpMonth,
int cardExpYear)
{
var myToken = new StripeTokenCreateOptions
{
Card = new StripeCreditCardOptions
{
Cvc = cardCvc,
ExpirationMonth = cardExpMonth.ToString(CultureInfo.InvariantCulture),
ExpirationYear = cardExpYear.ToString(CultureInfo.InvariantCulture),
Number = cardNumber
}
};
var tokenService = new StripeTokenService();
var stripeToken = tokenService.Create(myToken);
return stripeToken;
}
public void DeleteCustomer(string customerId)
{
var customerService = new StripeCustomerService();
var customer = GetCustomer(customerId);
if (string.IsNullOrWhiteSpace(customer.Email))
customerService.Delete(customerId);
}
public string CloseCustomerPlan(string customerId)
{
var subscriptionService = new StripeSubscriptionService();
var subscription = subscriptionService.List(customerId).FirstOrDefault();
if (subscription == null) throw new NullReferenceException("Subscription for stripe customer is not found");
subscription = subscriptionService.Cancel(customerId, subscription.Id);
return subscription.Status;
}
public void Refund(string paymentId, int amount)
{
var chargeService = new StripeRefundService();
new StripeRefundCreateOptions().Amount = amount;
chargeService.Create(paymentId);
}
}
}
| {
"content_hash": "f9a21d70adcefe8330701b9d1299cab8",
"timestamp": "",
"source": "github",
"line_count": 151,
"max_line_length": 120,
"avg_line_length": 34.496688741721854,
"alnum_prop": 0.566903436360146,
"repo_name": "buenadigital/SaaSPro",
"id": "8e1ec546a23aa67adfec19c1c3962420ffe89d20",
"size": "5211",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/SaaSPro.Infrastructure/Payment/StripeAdapter.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "322"
},
{
"name": "C#",
"bytes": "766007"
}
],
"symlink_target": ""
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace TaskClient.Models
{
public class Task
{
public int Id { get; set; }
public string Owner { get; set; }
public string Text { get; set; }
public bool Completed { get; set; }
public DateTime DateModified { get; set; }
}
}
| {
"content_hash": "ed6ad059a61e6ef8e4f6b39e9a3cadd8",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 50,
"avg_line_length": 23.41176470588235,
"alnum_prop": 0.6381909547738693,
"repo_name": "AzureADQuickStarts/B2C-NativeClient-DotNet",
"id": "a462a9353aa375fcc128ba5aae183f8fb58010f7",
"size": "400",
"binary": false,
"copies": "1",
"ref": "refs/heads/complete",
"path": "TaskClient/Models/Task.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "21616"
}
],
"symlink_target": ""
} |
package com.opengamma.util;
import java.lang.ref.Reference;
import java.lang.ref.WeakReference;
import java.util.Iterator;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.Lifecycle;
/**
* Implementation of {@link Executor} that allows jobs to run in a group with a single consumer receiving results for them.
* <p>
* The maximum number of additional threads is limited, but the thread which submitted jobs may temporarily join the pool to allow its tasks to complete.
*/
public class PoolExecutor implements Executor, Lifecycle {
private static final Logger LOGGER = LoggerFactory.getLogger(PoolExecutor.class);
/**
* Callback interface for receiving results of a pooled execution.
*/
public interface CompletionListener<T> {
void success(T result);
void failure(Throwable error);
}
/**
* Implementation of a {@link ExecutorService} that is associated with a group.
*/
public class Service<T> implements Executor {
private final AtomicInteger _pending = new AtomicInteger();
private final CompletionListener<T> _listener;
private volatile boolean _shutdown;
private boolean _joining;
protected Service(final CompletionListener<T> listener) {
LOGGER.info("Created thread pool service {}", this);
_listener = listener;
}
protected void decrementAndNotify() {
if (_pending.decrementAndGet() == 0) {
synchronized (this) {
if (_joining) {
notifyAll();
}
}
}
}
protected void postResult(final T result) {
if (_listener != null && !_shutdown) {
LOGGER.debug("Result available from {} - {} remaining", this, _pending);
_listener.success(result);
} else {
LOGGER.debug("Discarding result from {} - {} remaining", this, _pending);
}
}
protected void postException(final Throwable error) {
if (_listener != null && !_shutdown) {
LOGGER.debug("Error available from {} - {} remaining", this, _pending);
_listener.failure(error);
} else {
LOGGER.debug("Discarding result from {} - {} remaining", this, _pending);
}
}
/**
* Submits a job for execution, posting the result when it completes.
* <p>
* This must not be used after {@link #shutdown} or {@link #join} have been called.
*
* @param command the job to execute, not null
* @param result the result to post
*/
public void execute(final Runnable command, final T result) {
_pending.incrementAndGet();
PoolExecutor.this.execute(new ExecuteRunnable<>(this, command, result));
}
/**
* Submits a job for execution, posting its result when it completes.
* <p>
* This must not be used after {@link #shutdown} or {@link #join} have been called.
*
* @param command the job to execute, not null
*/
public void execute(final Callable<T> command) {
_pending.incrementAndGet();
PoolExecutor.this.execute(new ExecuteCallable<>(this, command));
}
/**
* Discards any outstanding jobs. This will return immediately; to wait for jobs to be discarded or completed, call {@link #join} afterwards.
*/
public synchronized void shutdown() {
LOGGER.info("Shutting down {}", this);
if (_shutdown) {
return;
}
_shutdown = true;
if (_joining) {
notifyAll();
}
final Iterator<Runnable> itrQueue = getQueue().iterator();
while (itrQueue.hasNext()) {
final Runnable entry = itrQueue.next();
if (entry instanceof Execute) {
final Execute<?> execute = (Execute<?>) entry;
if (execute._service == this && execute.markExecuted()) {
LOGGER.debug("Discarding {}", execute);
_pending.decrementAndGet();
itrQueue.remove();
}
}
}
}
/**
* Waits for all submitted jobs to complete. This thread may execute one or more of the submitted jobs.
*
* @throws InterruptedException
* if there is an interruption before the jobs are complete
*/
public void join() throws InterruptedException {
LOGGER.info("Joining");
Execute<?> inline = null;
try {
Iterator<Runnable> itrQueue = null;
do {
synchronized (this) {
_joining = true;
try {
if (_pending.get() == 0) {
LOGGER.info("No pending tasks");
_shutdown = true;
return;
}
if (itrQueue == null || !itrQueue.hasNext()) {
itrQueue = getQueue().iterator();
}
while (itrQueue.hasNext()) {
final Runnable entry = itrQueue.next();
if (entry instanceof Execute) {
final Execute<?> execute = (Execute<?>) entry;
if (execute._service == this && execute.markExecuted()) {
LOGGER.debug("Inline execution of {}", execute);
itrQueue.remove();
inline = execute;
break;
}
}
}
if (inline == null) {
LOGGER.info("No inline executions available, waiting for {} remaining tasks", _pending);
wait();
}
} finally {
_joining = false;
}
}
if (inline != null) {
inline.runImpl();
inline = null;
}
} while (true);
} finally {
if (inline != null) {
getQueue().add(inline);
}
}
}
// Executor
/**
* Submit a job for execution to the group. This is the same as calling {@link #execute(Runnable,Object)}.
* <p>
* This must not be used after {@link #shutdown} or {@link #join} have been called.
*
* @param command the job to execute, not null
*/
@Override
public void execute(final Runnable command) {
execute(command, null);
}
// Object
@Override
public String toString() {
return Integer.toHexString(hashCode());
}
}
private abstract static class Execute<T> implements Runnable {
private final Service<T> _service;
private final AtomicBoolean _executed = new AtomicBoolean();
protected Execute(final Service<T> service) {
_service = service;
}
public boolean markExecuted() {
return !_executed.getAndSet(true);
}
protected abstract T callImpl() throws Throwable;
protected void runImpl() {
try {
LOGGER.debug("Executing {}", this);
_service.postResult(callImpl());
} catch (final Throwable t) {
_service.postException(t);
} finally {
_service.decrementAndNotify();
}
}
@Override
public void run() {
if (_service._shutdown) {
return;
}
if (markExecuted()) {
runImpl();
} else {
LOGGER.debug("Already executed or cancelled {}", this);
}
}
@Override
public String toString() {
return _service.toString();
}
}
private static final class ExecuteRunnable<T> extends Execute<T> {
private final Runnable _runnable;
private final T _result;
ExecuteRunnable(final Service<T> service, final Runnable runnable, final T result) {
super(service);
ArgumentChecker.notNull(runnable, "runnable");
_runnable = runnable;
_result = result;
}
@Override
protected T callImpl() {
_runnable.run();
return _result;
}
@Override
public String toString() {
return super.toString() + "/" + _runnable;
}
}
private static final class ExecuteCallable<T> extends Execute<T> {
private final Callable<T> _callable;
ExecuteCallable(final Service<T> service, final Callable<T> callable) {
super(service);
ArgumentChecker.notNull(callable, "callable");
_callable = callable;
}
@Override
protected T callImpl() throws Throwable {
return _callable.call();
}
@Override
public String toString() {
return super.toString() + "/" + _callable;
}
}
private static final ThreadLocal<Reference<PoolExecutor>> INSTANCE = new ThreadLocal<>();
private final Reference<PoolExecutor> _me = new WeakReference<>(this);
private final BlockingQueue<Runnable> _queue = new LinkedBlockingQueue<>();
private final ThreadPoolExecutor _underlying;
private static final class ExecutorThread extends Thread {
private final Reference<PoolExecutor> _owner;
private ExecutorThread(final Reference<PoolExecutor> owner, final ThreadGroup group, final Runnable runnable, final String threadName,
final int stackSize) {
super(group, runnable, threadName, stackSize);
_owner = owner;
}
@Override
public void run() {
INSTANCE.set(_owner);
super.run();
}
}
private static final class ExecutorThreadFactory extends NamedThreadPoolFactory {
private final Reference<PoolExecutor> _owner;
private ExecutorThreadFactory(final Reference<PoolExecutor> owner, final String name) {
super(name, true);
_owner = owner;
}
@Override
protected Thread createThread(final ThreadGroup group, final Runnable runnable, final String threadName, final int stackSize) {
return new ExecutorThread(_owner, group, runnable, threadName, stackSize);
}
}
/**
* Creates a new execution pool with the given (maximum) number of threads.
* <p>
* This can be created with no threads. Tasks submitted will never be executed unless they arrive from a pool and another thread
* then joins that pool to complete its execution.
*
* @param maxThreads the maximum number of threads to put in the pool
* @param name the diagnostic name to use for the pool
*/
public PoolExecutor(final int maxThreads, final String name) {
if (maxThreads > 0) {
final ThreadFactory factory = new ExecutorThreadFactory(_me, name);
_underlying = new MdcAwareThreadPoolExecutor(maxThreads, maxThreads, 60, TimeUnit.SECONDS, _queue, factory);
_underlying.allowCoreThreadTimeOut(true);
} else {
_underlying = null;
}
}
@Override
protected void finalize() {
if (_underlying != null) {
_underlying.shutdown();
}
}
protected BlockingQueue<Runnable> getQueue() {
return _queue;
}
/**
* Creates a service group with a listener to handle results from that group.
*
* @param <T> the result type for jobs submitted to the group
* @param listener the listener to receive results from jobs in the group, or null if the results are not wanted
* @return the service group to submit further jobs to
*/
public <T> Service<T> createService(final CompletionListener<T> listener) {
return new Service<>(listener);
}
public ExecutorService asService() {
return _underlying;
}
/**
* Registers an instance with the current thread, returning the previously registered instance (if any).
*
* @param instance the instance to register, or null for none
* @return the previously registered instance, or null for none
*/
public static PoolExecutor setInstance(final PoolExecutor instance) {
final Reference<PoolExecutor> previous = INSTANCE.get();
if (instance != null) {
INSTANCE.set(instance._me);
} else {
INSTANCE.set(null);
}
if (previous != null) {
return previous.get();
}
return null;
}
/**
* Returns the instance registered with the current thread, if any.
*
* @return the registered instance, or null for none
*/
public static PoolExecutor instance() {
final Reference<PoolExecutor> executor = INSTANCE.get();
if (executor != null) {
return executor.get();
}
return null;
}
// Executor
/**
* Submits a job to the underlying execution pool.
*
* @param command the job to execute, not null
*/
@Override
public void execute(final Runnable command) {
LOGGER.debug("Submitting {}", command);
if (_underlying != null) {
_underlying.execute(command);
} else {
getQueue().add(command);
}
}
// Lifecycle
/**
* Dummy {@link Lifecycle#start} method; this object is implicitly started at construction and it is not possible to restart it after a {@link #stop} request.
*/
@Override
public void start() {
if (!isRunning()) {
throw new IllegalStateException("Can't restart service after explicit stop");
}
}
@Override
public void stop() {
_me.clear();
if (_underlying != null) {
_underlying.shutdown();
}
}
@Override
public boolean isRunning() {
return _me.get() != null;
}
}
| {
"content_hash": "eac7edf82892d95baeaaa61aaf38e180",
"timestamp": "",
"source": "github",
"line_count": 464,
"max_line_length": 160,
"avg_line_length": 28.767241379310345,
"alnum_prop": 0.6232394366197183,
"repo_name": "McLeodMoores/starling",
"id": "ecb71e88ab616cb202b6916e5e7132b740123256",
"size": "13485",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "projects/util/src/main/java/com/opengamma/util/PoolExecutor.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "2505"
},
{
"name": "CSS",
"bytes": "213501"
},
{
"name": "FreeMarker",
"bytes": "310184"
},
{
"name": "GAP",
"bytes": "1490"
},
{
"name": "Groovy",
"bytes": "11518"
},
{
"name": "HTML",
"bytes": "318295"
},
{
"name": "Java",
"bytes": "79541905"
},
{
"name": "JavaScript",
"bytes": "1511230"
},
{
"name": "PLSQL",
"bytes": "398"
},
{
"name": "PLpgSQL",
"bytes": "26901"
},
{
"name": "Shell",
"bytes": "11481"
},
{
"name": "TSQL",
"bytes": "604117"
}
],
"symlink_target": ""
} |
title: Thoughts on Developer Evangelism
category: post
slug: thoughts-on-developer-evangelism
date: 2015-01-30
link: http://kevinwhinnery.com/evangelism
author: Kevin Whinnery
github: https://github.com/kwhinnery
Stub.
| {
"content_hash": "a2a476a02e25343d77520d4f94af7cb8",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 41,
"avg_line_length": 22.1,
"alnum_prop": 0.8009049773755657,
"repo_name": "makaimc/devangel.io",
"id": "dc96e7a92436dc7115d9b6a378b8736e92effd01",
"size": "221",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "source/content/posts/150131-thoughts-developer-evangelism.markdown",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "134068"
},
{
"name": "HTML",
"bytes": "71516"
},
{
"name": "Makefile",
"bytes": "154"
},
{
"name": "Python",
"bytes": "629"
}
],
"symlink_target": ""
} |
package org.apache.hadoop.hdfs.server.namenode;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HANDLER_COUNT_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HANDLER_COUNT_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIFELINE_HANDLER_COUNT_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIFELINE_HANDLER_COUNT_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_HANDLER_COUNT_DEFAULT;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_HANDLER_COUNT_KEY;
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.MAX_PATH_DEPTH;
import static org.apache.hadoop.hdfs.server.common.HdfsServerConstants.MAX_PATH_LENGTH;
import static org.apache.hadoop.util.Time.now;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.Collection;
import java.util.EnumSet;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.google.common.collect.Lists;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.CryptoProtocolVersion;
import org.apache.hadoop.fs.BatchedRemoteIterator.BatchedEntries;
import org.apache.hadoop.fs.CacheFlag;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.CreateFlag;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FsServerDefaults;
import org.apache.hadoop.fs.InvalidPathException;
import org.apache.hadoop.fs.Options;
import org.apache.hadoop.fs.ParentNotDirectoryException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.StorageType;
import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.fs.XAttr;
import org.apache.hadoop.fs.XAttrSetFlag;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclStatus;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.PermissionStatus;
import org.apache.hadoop.ha.HAServiceStatus;
import org.apache.hadoop.ha.HealthCheckFailedException;
import org.apache.hadoop.ha.ServiceFailedException;
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceProtocolService;
import org.apache.hadoop.ha.protocolPB.HAServiceProtocolPB;
import org.apache.hadoop.ha.protocolPB.HAServiceProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HDFSPolicyProvider;
import org.apache.hadoop.hdfs.inotify.EventBatch;
import org.apache.hadoop.hdfs.inotify.EventBatchList;
import org.apache.hadoop.hdfs.protocol.AclException;
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
import org.apache.hadoop.hdfs.protocol.CorruptFileBlocks;
import org.apache.hadoop.hdfs.protocol.DSQuotaExceededException;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.FSLimitException;
import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.RollingUpgradeAction;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.protocol.NSQuotaExceededException;
import org.apache.hadoop.hdfs.protocol.QuotaByStorageTypeExceededException;
import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
import org.apache.hadoop.hdfs.protocol.RecoveryInProgressException;
import org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo;
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
import org.apache.hadoop.hdfs.protocol.UnregisteredNodeException;
import org.apache.hadoop.hdfs.protocol.UnresolvedPathException;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol;
import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeProtocolService;
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.NamenodeProtocolService;
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerFaultInjector;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
import org.apache.hadoop.hdfs.server.common.IncorrectVersionException;
import org.apache.hadoop.hdfs.server.namenode.NameNode.OperationCategory;
import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics;
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
import org.apache.hadoop.hdfs.server.protocol.BlockReportContext;
import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations;
import org.apache.hadoop.hdfs.server.protocol.DatanodeCommand;
import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol;
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport;
import org.apache.hadoop.hdfs.server.protocol.FinalizeCommand;
import org.apache.hadoop.hdfs.server.protocol.HeartbeatResponse;
import org.apache.hadoop.hdfs.server.protocol.NamenodeCommand;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.server.protocol.NodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
import org.apache.hadoop.hdfs.server.protocol.StorageBlockReport;
import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
import org.apache.hadoop.io.EnumSetWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RetriableException;
import org.apache.hadoop.ipc.RetryCache;
import org.apache.hadoop.ipc.RetryCache.CacheEntry;
import org.apache.hadoop.ipc.RetryCache.CacheEntryWithPayload;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.ipc.WritableRpcEngine;
import org.apache.hadoop.ipc.RefreshRegistry;
import org.apache.hadoop.ipc.RefreshResponse;
import org.apache.hadoop.net.Node;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshAuthorizationPolicyProtocolService;
import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshUserMappingsProtocolService;
import org.apache.hadoop.security.protocolPB.RefreshAuthorizationPolicyProtocolPB;
import org.apache.hadoop.security.protocolPB.RefreshAuthorizationPolicyProtocolServerSideTranslatorPB;
import org.apache.hadoop.security.protocolPB.RefreshUserMappingsProtocolPB;
import org.apache.hadoop.security.protocolPB.RefreshUserMappingsProtocolServerSideTranslatorPB;
import org.apache.hadoop.ipc.protocolPB.RefreshCallQueueProtocolPB;
import org.apache.hadoop.ipc.protocolPB.RefreshCallQueueProtocolServerSideTranslatorPB;
import org.apache.hadoop.ipc.proto.RefreshCallQueueProtocolProtos.RefreshCallQueueProtocolService;
import org.apache.hadoop.ipc.protocolPB.GenericRefreshProtocolPB;
import org.apache.hadoop.ipc.protocolPB.GenericRefreshProtocolServerSideTranslatorPB;
import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshProtocolService;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService;
import org.apache.hadoop.tools.protocolPB.GetUserMappingsProtocolPB;
import org.apache.hadoop.tools.protocolPB.GetUserMappingsProtocolServerSideTranslatorPB;
import org.apache.hadoop.tracing.SpanReceiverInfo;
import org.apache.hadoop.tracing.TraceAdminPB.TraceAdminService;
import org.apache.hadoop.tracing.TraceAdminProtocolPB;
import org.apache.hadoop.tracing.TraceAdminProtocolServerSideTranslatorPB;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.util.VersionUtil;
import org.slf4j.Logger;
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.BlockingService;
/**
* This class is responsible for handling all of the RPC calls to the NameNode.
* It is created, started, and stopped by {@link NameNode}.
*/
class NameNodeRpcServer implements NamenodeProtocols {
private static final Logger LOG = NameNode.LOG;
private static final Logger stateChangeLog = NameNode.stateChangeLog;
private static final Logger blockStateChangeLog = NameNode
.blockStateChangeLog;
// Dependencies from other parts of NN.
protected final FSNamesystem namesystem;
protected final NameNode nn;
private final NameNodeMetrics metrics;
private final RetryCache retryCache;
private final boolean serviceAuthEnabled;
/** The RPC server that listens to requests from DataNodes */
private final RPC.Server serviceRpcServer;
private final InetSocketAddress serviceRPCAddress;
/** The RPC server that listens to lifeline requests */
private final RPC.Server lifelineRpcServer;
private final InetSocketAddress lifelineRPCAddress;
/** The RPC server that listens to requests from clients */
protected final RPC.Server clientRpcServer;
protected final InetSocketAddress clientRpcAddress;
private final String minimumDataNodeVersion;
public NameNodeRpcServer(Configuration conf, NameNode nn)
throws IOException {
this.nn = nn;
this.namesystem = nn.getNamesystem();
this.retryCache = namesystem.getRetryCache();
this.metrics = NameNode.getNameNodeMetrics();
int handlerCount =
conf.getInt(DFS_NAMENODE_HANDLER_COUNT_KEY,
DFS_NAMENODE_HANDLER_COUNT_DEFAULT);
RPC.setProtocolEngine(conf, ClientNamenodeProtocolPB.class,
ProtobufRpcEngine.class);
ClientNamenodeProtocolServerSideTranslatorPB
clientProtocolServerTranslator =
new ClientNamenodeProtocolServerSideTranslatorPB(this);
BlockingService clientNNPbService = ClientNamenodeProtocol.
newReflectiveBlockingService(clientProtocolServerTranslator);
DatanodeProtocolServerSideTranslatorPB dnProtoPbTranslator =
new DatanodeProtocolServerSideTranslatorPB(this);
BlockingService dnProtoPbService = DatanodeProtocolService
.newReflectiveBlockingService(dnProtoPbTranslator);
NamenodeProtocolServerSideTranslatorPB namenodeProtocolXlator =
new NamenodeProtocolServerSideTranslatorPB(this);
BlockingService NNPbService = NamenodeProtocolService
.newReflectiveBlockingService(namenodeProtocolXlator);
RefreshAuthorizationPolicyProtocolServerSideTranslatorPB refreshAuthPolicyXlator =
new RefreshAuthorizationPolicyProtocolServerSideTranslatorPB(this);
BlockingService refreshAuthService = RefreshAuthorizationPolicyProtocolService
.newReflectiveBlockingService(refreshAuthPolicyXlator);
RefreshUserMappingsProtocolServerSideTranslatorPB refreshUserMappingXlator =
new RefreshUserMappingsProtocolServerSideTranslatorPB(this);
BlockingService refreshUserMappingService = RefreshUserMappingsProtocolService
.newReflectiveBlockingService(refreshUserMappingXlator);
RefreshCallQueueProtocolServerSideTranslatorPB refreshCallQueueXlator =
new RefreshCallQueueProtocolServerSideTranslatorPB(this);
BlockingService refreshCallQueueService = RefreshCallQueueProtocolService
.newReflectiveBlockingService(refreshCallQueueXlator);
GenericRefreshProtocolServerSideTranslatorPB genericRefreshXlator =
new GenericRefreshProtocolServerSideTranslatorPB(this);
BlockingService genericRefreshService = GenericRefreshProtocolService
.newReflectiveBlockingService(genericRefreshXlator);
GetUserMappingsProtocolServerSideTranslatorPB getUserMappingXlator =
new GetUserMappingsProtocolServerSideTranslatorPB(this);
BlockingService getUserMappingService = GetUserMappingsProtocolService
.newReflectiveBlockingService(getUserMappingXlator);
HAServiceProtocolServerSideTranslatorPB haServiceProtocolXlator =
new HAServiceProtocolServerSideTranslatorPB(this);
BlockingService haPbService = HAServiceProtocolService
.newReflectiveBlockingService(haServiceProtocolXlator);
TraceAdminProtocolServerSideTranslatorPB traceAdminXlator =
new TraceAdminProtocolServerSideTranslatorPB(this);
BlockingService traceAdminService = TraceAdminService
.newReflectiveBlockingService(traceAdminXlator);
WritableRpcEngine.ensureInitialized();
InetSocketAddress serviceRpcAddr = nn.getServiceRpcServerAddress(conf);
if (serviceRpcAddr != null) {
String bindHost = nn.getServiceRpcServerBindHost(conf);
if (bindHost == null) {
bindHost = serviceRpcAddr.getHostName();
}
LOG.info("Service RPC server is binding to " + bindHost + ":" +
serviceRpcAddr.getPort());
int serviceHandlerCount =
conf.getInt(DFS_NAMENODE_SERVICE_HANDLER_COUNT_KEY,
DFS_NAMENODE_SERVICE_HANDLER_COUNT_DEFAULT);
this.serviceRpcServer = new RPC.Builder(conf)
.setProtocol(
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB.class)
.setInstance(clientNNPbService)
.setBindAddress(bindHost)
.setPort(serviceRpcAddr.getPort()).setNumHandlers(serviceHandlerCount)
.setVerbose(false)
.setSecretManager(namesystem.getDelegationTokenSecretManager())
.build();
// Add all the RPC protocols that the namenode implements
DFSUtil.addPBProtocol(conf, HAServiceProtocolPB.class, haPbService,
serviceRpcServer);
DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
serviceRpcServer);
DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
serviceRpcServer);
DFSUtil.addPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
refreshAuthService, serviceRpcServer);
DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
refreshUserMappingService, serviceRpcServer);
// We support Refreshing call queue here in case the client RPC queue is full
DFSUtil.addPBProtocol(conf, RefreshCallQueueProtocolPB.class,
refreshCallQueueService, serviceRpcServer);
DFSUtil.addPBProtocol(conf, GenericRefreshProtocolPB.class,
genericRefreshService, serviceRpcServer);
DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class,
getUserMappingService, serviceRpcServer);
DFSUtil.addPBProtocol(conf, TraceAdminProtocolPB.class,
traceAdminService, serviceRpcServer);
// Update the address with the correct port
InetSocketAddress listenAddr = serviceRpcServer.getListenerAddress();
serviceRPCAddress = new InetSocketAddress(
serviceRpcAddr.getHostName(), listenAddr.getPort());
nn.setRpcServiceServerAddress(conf, serviceRPCAddress);
} else {
serviceRpcServer = null;
serviceRPCAddress = null;
}
InetSocketAddress lifelineRpcAddr = nn.getLifelineRpcServerAddress(conf);
if (lifelineRpcAddr != null) {
RPC.setProtocolEngine(conf, HAServiceProtocolPB.class,
ProtobufRpcEngine.class);
String bindHost = nn.getLifelineRpcServerBindHost(conf);
if (bindHost == null) {
bindHost = lifelineRpcAddr.getHostName();
}
LOG.info("Lifeline RPC server is binding to {}:{}", bindHost,
lifelineRpcAddr.getPort());
int lifelineHandlerCount = conf.getInt(
DFS_NAMENODE_LIFELINE_HANDLER_COUNT_KEY,
DFS_NAMENODE_LIFELINE_HANDLER_COUNT_DEFAULT);
lifelineRpcServer = new RPC.Builder(conf)
.setProtocol(HAServiceProtocolPB.class)
.setInstance(haPbService)
.setBindAddress(bindHost)
.setPort(lifelineRpcAddr.getPort())
.setNumHandlers(lifelineHandlerCount)
.setVerbose(false)
.setSecretManager(namesystem.getDelegationTokenSecretManager())
.build();
// Update the address with the correct port
InetSocketAddress listenAddr = lifelineRpcServer.getListenerAddress();
lifelineRPCAddress = new InetSocketAddress(lifelineRpcAddr.getHostName(),
listenAddr.getPort());
nn.setRpcLifelineServerAddress(conf, lifelineRPCAddress);
} else {
lifelineRpcServer = null;
lifelineRPCAddress = null;
}
InetSocketAddress rpcAddr = nn.getRpcServerAddress(conf);
String bindHost = nn.getRpcServerBindHost(conf);
if (bindHost == null) {
bindHost = rpcAddr.getHostName();
}
LOG.info("RPC server is binding to " + bindHost + ":" + rpcAddr.getPort());
this.clientRpcServer = new RPC.Builder(conf)
.setProtocol(
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB.class)
.setInstance(clientNNPbService).setBindAddress(bindHost)
.setPort(rpcAddr.getPort()).setNumHandlers(handlerCount)
.setVerbose(false)
.setSecretManager(namesystem.getDelegationTokenSecretManager()).build();
// Add all the RPC protocols that the namenode implements
DFSUtil.addPBProtocol(conf, HAServiceProtocolPB.class, haPbService,
clientRpcServer);
DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
clientRpcServer);
DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
clientRpcServer);
DFSUtil.addPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
refreshAuthService, clientRpcServer);
DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
refreshUserMappingService, clientRpcServer);
DFSUtil.addPBProtocol(conf, RefreshCallQueueProtocolPB.class,
refreshCallQueueService, clientRpcServer);
DFSUtil.addPBProtocol(conf, GenericRefreshProtocolPB.class,
genericRefreshService, clientRpcServer);
DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class,
getUserMappingService, clientRpcServer);
DFSUtil.addPBProtocol(conf, TraceAdminProtocolPB.class,
traceAdminService, clientRpcServer);
// set service-level authorization security policy
if (serviceAuthEnabled =
conf.getBoolean(
CommonConfigurationKeys.HADOOP_SECURITY_AUTHORIZATION, false)) {
clientRpcServer.refreshServiceAcl(conf, new HDFSPolicyProvider());
if (serviceRpcServer != null) {
serviceRpcServer.refreshServiceAcl(conf, new HDFSPolicyProvider());
}
if (lifelineRpcServer != null) {
lifelineRpcServer.refreshServiceAcl(conf, new HDFSPolicyProvider());
}
}
// The rpc-server port can be ephemeral... ensure we have the correct info
InetSocketAddress listenAddr = clientRpcServer.getListenerAddress();
clientRpcAddress = new InetSocketAddress(
rpcAddr.getHostName(), listenAddr.getPort());
nn.setRpcServerAddress(conf, clientRpcAddress);
minimumDataNodeVersion = conf.get(
DFSConfigKeys.DFS_NAMENODE_MIN_SUPPORTED_DATANODE_VERSION_KEY,
DFSConfigKeys.DFS_NAMENODE_MIN_SUPPORTED_DATANODE_VERSION_DEFAULT);
// Set terse exception whose stack trace won't be logged
this.clientRpcServer.addTerseExceptions(SafeModeException.class,
FileNotFoundException.class,
HadoopIllegalArgumentException.class,
FileAlreadyExistsException.class,
InvalidPathException.class,
ParentNotDirectoryException.class,
UnresolvedLinkException.class,
AlreadyBeingCreatedException.class,
QuotaExceededException.class,
RecoveryInProgressException.class,
AccessControlException.class,
InvalidToken.class,
LeaseExpiredException.class,
NSQuotaExceededException.class,
DSQuotaExceededException.class,
QuotaByStorageTypeExceededException.class,
AclException.class,
FSLimitException.PathComponentTooLongException.class,
FSLimitException.MaxDirectoryItemsExceededException.class,
UnresolvedPathException.class);
clientRpcServer.setTracer(nn.tracer);
if (serviceRpcServer != null) {
serviceRpcServer.setTracer(nn.tracer);
}
if (lifelineRpcServer != null) {
lifelineRpcServer.setTracer(nn.tracer);
}
}
/** Allow access to the lifeline RPC server for testing */
@VisibleForTesting
RPC.Server getLifelineRpcServer() {
return lifelineRpcServer;
}
/** Allow access to the client RPC server for testing */
@VisibleForTesting
RPC.Server getClientRpcServer() {
return clientRpcServer;
}
/** Allow access to the service RPC server for testing */
@VisibleForTesting
RPC.Server getServiceRpcServer() {
return serviceRpcServer;
}
/**
* Start client and service RPC servers.
*/
void start() {
clientRpcServer.start();
if (serviceRpcServer != null) {
serviceRpcServer.start();
}
if (lifelineRpcServer != null) {
lifelineRpcServer.start();
}
}
/**
* Wait until the RPC servers have shutdown.
*/
void join() throws InterruptedException {
clientRpcServer.join();
if (serviceRpcServer != null) {
serviceRpcServer.join();
}
if (lifelineRpcServer != null) {
lifelineRpcServer.join();
}
}
/**
* Stop client and service RPC servers.
*/
void stop() {
if (clientRpcServer != null) {
clientRpcServer.stop();
}
if (serviceRpcServer != null) {
serviceRpcServer.stop();
}
if (lifelineRpcServer != null) {
lifelineRpcServer.stop();
}
}
InetSocketAddress getLifelineRpcAddress() {
return lifelineRPCAddress;
}
InetSocketAddress getServiceRpcAddress() {
return serviceRPCAddress;
}
InetSocketAddress getRpcAddress() {
return clientRpcAddress;
}
private static UserGroupInformation getRemoteUser() throws IOException {
return NameNode.getRemoteUser();
}
/////////////////////////////////////////////////////
// NamenodeProtocol
/////////////////////////////////////////////////////
@Override // NamenodeProtocol
public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size)
throws IOException {
if(size <= 0) {
throw new IllegalArgumentException(
"Unexpected not positive size: "+size);
}
checkNNStartup();
namesystem.checkSuperuserPrivilege();
return namesystem.getBlockManager().getBlocks(datanode, size);
}
@Override // NamenodeProtocol
public ExportedBlockKeys getBlockKeys() throws IOException {
checkNNStartup();
namesystem.checkSuperuserPrivilege();
return namesystem.getBlockManager().getBlockKeys();
}
@Override // NamenodeProtocol
public void errorReport(NamenodeRegistration registration,
int errorCode,
String msg) throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.UNCHECKED);
namesystem.checkSuperuserPrivilege();
verifyRequest(registration);
LOG.info("Error report from " + registration + ": " + msg);
if (errorCode == FATAL) {
namesystem.releaseBackupNode(registration);
}
}
@Override // NamenodeProtocol
public NamenodeRegistration registerSubordinateNamenode(
NamenodeRegistration registration) throws IOException {
checkNNStartup();
namesystem.checkSuperuserPrivilege();
verifyLayoutVersion(registration.getVersion());
NamenodeRegistration myRegistration = nn.setRegistration();
namesystem.registerBackupNode(registration, myRegistration);
return myRegistration;
}
@Override // NamenodeProtocol
public NamenodeCommand startCheckpoint(NamenodeRegistration registration)
throws IOException {
checkNNStartup();
namesystem.checkSuperuserPrivilege();
verifyRequest(registration);
if(!nn.isRole(NamenodeRole.NAMENODE))
throw new IOException("Only an ACTIVE node can invoke startCheckpoint.");
CacheEntryWithPayload cacheEntry = RetryCache.waitForCompletion(retryCache,
null);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return (NamenodeCommand) cacheEntry.getPayload();
}
NamenodeCommand ret = null;
try {
ret = namesystem.startCheckpoint(registration, nn.setRegistration());
} finally {
RetryCache.setState(cacheEntry, ret != null, ret);
}
return ret;
}
@Override // NamenodeProtocol
public void endCheckpoint(NamenodeRegistration registration,
CheckpointSignature sig) throws IOException {
checkNNStartup();
namesystem.checkSuperuserPrivilege();
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
boolean success = false;
try {
namesystem.endCheckpoint(registration, sig);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // ClientProtocol
public Token<DelegationTokenIdentifier> getDelegationToken(Text renewer)
throws IOException {
checkNNStartup();
return namesystem.getDelegationToken(renewer);
}
@Override // ClientProtocol
public long renewDelegationToken(Token<DelegationTokenIdentifier> token)
throws InvalidToken, IOException {
checkNNStartup();
return namesystem.renewDelegationToken(token);
}
@Override // ClientProtocol
public void cancelDelegationToken(Token<DelegationTokenIdentifier> token)
throws IOException {
checkNNStartup();
namesystem.cancelDelegationToken(token);
}
@Override // ClientProtocol
public LocatedBlocks getBlockLocations(String src,
long offset,
long length)
throws IOException {
checkNNStartup();
metrics.incrGetBlockLocations();
return namesystem.getBlockLocations(getClientMachine(),
src, offset, length);
}
@Override // ClientProtocol
public FsServerDefaults getServerDefaults() throws IOException {
checkNNStartup();
return namesystem.getServerDefaults();
}
@Override // ClientProtocol
public HdfsFileStatus create(String src, FsPermission masked,
String clientName, EnumSetWritable<CreateFlag> flag,
boolean createParent, short replication, long blockSize,
CryptoProtocolVersion[] supportedVersions)
throws IOException {
checkNNStartup();
String clientMachine = getClientMachine();
if (stateChangeLog.isDebugEnabled()) {
stateChangeLog.debug("*DIR* NameNode.create: file "
+src+" for "+clientName+" at "+clientMachine);
}
if (!checkPathLength(src)) {
throw new IOException("create: Pathname too long. Limit "
+ MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels.");
}
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntryWithPayload cacheEntry = RetryCache.waitForCompletion(retryCache, null);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return (HdfsFileStatus) cacheEntry.getPayload();
}
HdfsFileStatus status = null;
try {
PermissionStatus perm = new PermissionStatus(getRemoteUser()
.getShortUserName(), null, masked);
status = namesystem.startFile(src, perm, clientName, clientMachine,
flag.get(), createParent, replication, blockSize, supportedVersions,
cacheEntry != null);
} finally {
RetryCache.setState(cacheEntry, status != null, status);
}
metrics.incrFilesCreated();
metrics.incrCreateFileOps();
return status;
}
@Override // ClientProtocol
public LastBlockWithStatus append(String src, String clientName,
EnumSetWritable<CreateFlag> flag) throws IOException {
checkNNStartup();
String clientMachine = getClientMachine();
if (stateChangeLog.isDebugEnabled()) {
stateChangeLog.debug("*DIR* NameNode.append: file "
+src+" for "+clientName+" at "+clientMachine);
}
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntryWithPayload cacheEntry = RetryCache.waitForCompletion(retryCache,
null);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return (LastBlockWithStatus) cacheEntry.getPayload();
}
LastBlockWithStatus info = null;
boolean success = false;
try {
info = namesystem.appendFile(src, clientName, clientMachine, flag.get(),
cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success, info);
}
metrics.incrFilesAppended();
return info;
}
@Override // ClientProtocol
public boolean recoverLease(String src, String clientName) throws IOException {
checkNNStartup();
String clientMachine = getClientMachine();
return namesystem.recoverLease(src, clientName, clientMachine);
}
@Override // ClientProtocol
public boolean setReplication(String src, short replication)
throws IOException {
checkNNStartup();
return namesystem.setReplication(src, replication);
}
@Override
public void setStoragePolicy(String src, String policyName)
throws IOException {
checkNNStartup();
namesystem.setStoragePolicy(src, policyName);
}
@Override
public BlockStoragePolicy getStoragePolicy(String path) throws IOException {
checkNNStartup();
return namesystem.getStoragePolicy(path);
}
@Override
public BlockStoragePolicy[] getStoragePolicies() throws IOException {
checkNNStartup();
return namesystem.getStoragePolicies();
}
@Override // ClientProtocol
public void setPermission(String src, FsPermission permissions)
throws IOException {
checkNNStartup();
namesystem.setPermission(src, permissions);
}
@Override // ClientProtocol
public void setOwner(String src, String username, String groupname)
throws IOException {
checkNNStartup();
namesystem.setOwner(src, username, groupname);
}
@Override
public LocatedBlock addBlock(String src, String clientName,
ExtendedBlock previous, DatanodeInfo[] excludedNodes, long fileId,
String[] favoredNodes)
throws IOException {
checkNNStartup();
LocatedBlock locatedBlock = namesystem.getAdditionalBlock(src, fileId,
clientName, previous, excludedNodes, favoredNodes);
if (locatedBlock != null) {
metrics.incrAddBlockOps();
}
return locatedBlock;
}
@Override // ClientProtocol
public LocatedBlock getAdditionalDatanode(final String src,
final long fileId, final ExtendedBlock blk,
final DatanodeInfo[] existings, final String[] existingStorageIDs,
final DatanodeInfo[] excludes,
final int numAdditionalNodes, final String clientName
) throws IOException {
checkNNStartup();
if (LOG.isDebugEnabled()) {
LOG.debug("getAdditionalDatanode: src=" + src
+ ", fileId=" + fileId
+ ", blk=" + blk
+ ", existings=" + Arrays.asList(existings)
+ ", excludes=" + Arrays.asList(excludes)
+ ", numAdditionalNodes=" + numAdditionalNodes
+ ", clientName=" + clientName);
}
metrics.incrGetAdditionalDatanodeOps();
Set<Node> excludeSet = null;
if (excludes != null) {
excludeSet = new HashSet<Node>(excludes.length);
for (Node node : excludes) {
excludeSet.add(node);
}
}
return namesystem.getAdditionalDatanode(src, fileId, blk, existings,
existingStorageIDs, excludeSet, numAdditionalNodes, clientName);
}
/**
* The client needs to give up on the block.
*/
@Override // ClientProtocol
public void abandonBlock(ExtendedBlock b, long fileId, String src,
String holder) throws IOException {
checkNNStartup();
namesystem.abandonBlock(b, fileId, src, holder);
}
@Override // ClientProtocol
public boolean complete(String src, String clientName,
ExtendedBlock last, long fileId)
throws IOException {
checkNNStartup();
return namesystem.completeFile(src, clientName, last, fileId);
}
/**
* The client has detected an error on the specified located blocks
* and is reporting them to the server. For now, the namenode will
* mark the block as corrupt. In the future we might
* check the blocks are actually corrupt.
*/
@Override // ClientProtocol, DatanodeProtocol
public void reportBadBlocks(LocatedBlock[] blocks) throws IOException {
checkNNStartup();
namesystem.reportBadBlocks(blocks);
}
@Override // ClientProtocol
public LocatedBlock updateBlockForPipeline(ExtendedBlock block, String clientName)
throws IOException {
checkNNStartup();
return namesystem.updateBlockForPipeline(block, clientName);
}
@Override // ClientProtocol
public void updatePipeline(String clientName, ExtendedBlock oldBlock,
ExtendedBlock newBlock, DatanodeID[] newNodes, String[] newStorageIDs)
throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
boolean success = false;
try {
namesystem.updatePipeline(clientName, oldBlock, newBlock, newNodes,
newStorageIDs, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // DatanodeProtocol
public void commitBlockSynchronization(ExtendedBlock block,
long newgenerationstamp, long newlength,
boolean closeFile, boolean deleteblock, DatanodeID[] newtargets,
String[] newtargetstorages)
throws IOException {
checkNNStartup();
namesystem.commitBlockSynchronization(block, newgenerationstamp,
newlength, closeFile, deleteblock, newtargets, newtargetstorages);
}
@Override // ClientProtocol
public long getPreferredBlockSize(String filename)
throws IOException {
checkNNStartup();
return namesystem.getPreferredBlockSize(filename);
}
@Deprecated
@Override // ClientProtocol
public boolean rename(String src, String dst) throws IOException {
checkNNStartup();
if(stateChangeLog.isDebugEnabled()) {
stateChangeLog.debug("*DIR* NameNode.rename: " + src + " to " + dst);
}
if (!checkPathLength(dst)) {
throw new IOException("rename: Pathname too long. Limit "
+ MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels.");
}
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return true; // Return previous response
}
boolean ret = false;
try {
ret = namesystem.renameTo(src, dst, cacheEntry != null);
} finally {
RetryCache.setState(cacheEntry, ret);
}
if (ret) {
metrics.incrFilesRenamed();
}
return ret;
}
@Override // ClientProtocol
public void concat(String trg, String[] src) throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
boolean success = false;
try {
namesystem.concat(trg, src, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // ClientProtocol
public void rename2(String src, String dst, Options.Rename... options)
throws IOException {
checkNNStartup();
if(stateChangeLog.isDebugEnabled()) {
stateChangeLog.debug("*DIR* NameNode.rename: " + src + " to " + dst);
}
if (!checkPathLength(dst)) {
throw new IOException("rename: Pathname too long. Limit "
+ MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels.");
}
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
boolean success = false;
try {
namesystem.renameTo(src, dst, cacheEntry != null, options);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
metrics.incrFilesRenamed();
}
@Override // ClientProtocol
public boolean truncate(String src, long newLength, String clientName)
throws IOException {
if(stateChangeLog.isDebugEnabled()) {
stateChangeLog.debug("*DIR* NameNode.truncate: " + src + " to " +
newLength);
}
String clientMachine = getClientMachine();
try {
return namesystem.truncate(
src, newLength, clientName, clientMachine, now());
} finally {
metrics.incrFilesTruncated();
}
}
@Override // ClientProtocol
public boolean delete(String src, boolean recursive) throws IOException {
checkNNStartup();
if (stateChangeLog.isDebugEnabled()) {
stateChangeLog.debug("*DIR* Namenode.delete: src=" + src
+ ", recursive=" + recursive);
}
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return true; // Return previous response
}
boolean ret = false;
try {
ret = namesystem.delete(src, recursive, cacheEntry != null);
} finally {
RetryCache.setState(cacheEntry, ret);
}
if (ret)
metrics.incrDeleteFileOps();
return ret;
}
/**
* Check path length does not exceed maximum. Returns true if
* length and depth are okay. Returns false if length is too long
* or depth is too great.
*/
private boolean checkPathLength(String src) {
Path srcPath = new Path(src);
return (src.length() <= MAX_PATH_LENGTH &&
srcPath.depth() <= MAX_PATH_DEPTH);
}
@Override // ClientProtocol
public boolean mkdirs(String src, FsPermission masked, boolean createParent)
throws IOException {
checkNNStartup();
if(stateChangeLog.isDebugEnabled()) {
stateChangeLog.debug("*DIR* NameNode.mkdirs: " + src);
}
if (!checkPathLength(src)) {
throw new IOException("mkdirs: Pathname too long. Limit "
+ MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels.");
}
return namesystem.mkdirs(src,
new PermissionStatus(getRemoteUser().getShortUserName(),
null, masked), createParent);
}
@Override // ClientProtocol
public void renewLease(String clientName) throws IOException {
checkNNStartup();
namesystem.renewLease(clientName);
}
@Override // ClientProtocol
public DirectoryListing getListing(String src, byte[] startAfter,
boolean needLocation) throws IOException {
checkNNStartup();
DirectoryListing files = namesystem.getListing(
src, startAfter, needLocation);
if (files != null) {
metrics.incrGetListingOps();
metrics.incrFilesInGetListingOps(files.getPartialListing().length);
}
return files;
}
@Override // ClientProtocol
public HdfsFileStatus getFileInfo(String src) throws IOException {
checkNNStartup();
metrics.incrFileInfoOps();
return namesystem.getFileInfo(src, true);
}
@Override // ClientProtocol
public boolean isFileClosed(String src) throws IOException{
checkNNStartup();
return namesystem.isFileClosed(src);
}
@Override // ClientProtocol
public HdfsFileStatus getFileLinkInfo(String src) throws IOException {
checkNNStartup();
metrics.incrFileInfoOps();
return namesystem.getFileInfo(src, false);
}
@Override // ClientProtocol
public long[] getStats() throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.READ);
return namesystem.getStats();
}
@Override // ClientProtocol
public DatanodeInfo[] getDatanodeReport(DatanodeReportType type)
throws IOException {
checkNNStartup();
DatanodeInfo results[] = namesystem.datanodeReport(type);
return results;
}
@Override // ClientProtocol
public DatanodeStorageReport[] getDatanodeStorageReport(
DatanodeReportType type) throws IOException {
checkNNStartup();
final DatanodeStorageReport[] reports = namesystem.getDatanodeStorageReport(type);
return reports;
}
@Override // ClientProtocol
public boolean setSafeMode(SafeModeAction action, boolean isChecked)
throws IOException {
checkNNStartup();
OperationCategory opCategory = OperationCategory.UNCHECKED;
if (isChecked) {
if (action == SafeModeAction.SAFEMODE_GET) {
opCategory = OperationCategory.READ;
} else {
opCategory = OperationCategory.WRITE;
}
}
namesystem.checkOperation(opCategory);
return namesystem.setSafeMode(action);
}
@Override // ClientProtocol
public boolean restoreFailedStorage(String arg) throws IOException {
checkNNStartup();
return namesystem.restoreFailedStorage(arg);
}
@Override // ClientProtocol
public void saveNamespace() throws IOException {
checkNNStartup();
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
boolean success = false;
try {
namesystem.saveNamespace();
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // ClientProtocol
public long rollEdits() throws AccessControlException, IOException {
checkNNStartup();
CheckpointSignature sig = namesystem.rollEditLog();
return sig.getCurSegmentTxId();
}
@Override // ClientProtocol
public void refreshNodes() throws IOException {
checkNNStartup();
namesystem.refreshNodes();
}
@Override // NamenodeProtocol
public long getTransactionID() throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.UNCHECKED);
namesystem.checkSuperuserPrivilege();
return namesystem.getFSImage().getLastAppliedOrWrittenTxId();
}
@Override // NamenodeProtocol
public long getMostRecentCheckpointTxId() throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.UNCHECKED);
namesystem.checkSuperuserPrivilege();
return namesystem.getFSImage().getMostRecentCheckpointTxId();
}
@Override // NamenodeProtocol
public CheckpointSignature rollEditLog() throws IOException {
checkNNStartup();
namesystem.checkSuperuserPrivilege();
return namesystem.rollEditLog();
}
@Override // NamenodeProtocol
public RemoteEditLogManifest getEditLogManifest(long sinceTxId)
throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.READ);
namesystem.checkSuperuserPrivilege();
return namesystem.getEditLog().getEditLogManifest(sinceTxId);
}
@Override // NamenodeProtocol
public boolean isUpgradeFinalized() throws IOException {
checkNNStartup();
namesystem.checkSuperuserPrivilege();
return namesystem.isUpgradeFinalized();
}
@Override // ClientProtocol
public void finalizeUpgrade() throws IOException {
checkNNStartup();
namesystem.finalizeUpgrade();
}
@Override // ClientProtocol
public RollingUpgradeInfo rollingUpgrade(RollingUpgradeAction action) throws IOException {
checkNNStartup();
LOG.info("rollingUpgrade " + action);
switch(action) {
case QUERY:
return namesystem.queryRollingUpgrade();
case PREPARE:
return namesystem.startRollingUpgrade();
case FINALIZE:
return namesystem.finalizeRollingUpgrade();
default:
throw new UnsupportedActionException(action + " is not yet supported.");
}
}
@Override // ClientProtocol
public void metaSave(String filename) throws IOException {
checkNNStartup();
namesystem.metaSave(filename);
}
@Override // ClientProtocol
public CorruptFileBlocks listCorruptFileBlocks(String path, String cookie)
throws IOException {
checkNNStartup();
String[] cookieTab = new String[] { cookie };
Collection<FSNamesystem.CorruptFileBlockInfo> fbs =
namesystem.listCorruptFileBlocks(path, cookieTab);
String[] files = new String[fbs.size()];
int i = 0;
for(FSNamesystem.CorruptFileBlockInfo fb: fbs) {
files[i++] = fb.path;
}
return new CorruptFileBlocks(files, cookieTab[0]);
}
/**
* Tell all datanodes to use a new, non-persistent bandwidth value for
* dfs.datanode.balance.bandwidthPerSec.
* @param bandwidth Balancer bandwidth in bytes per second for all datanodes.
* @throws IOException
*/
@Override // ClientProtocol
public void setBalancerBandwidth(long bandwidth) throws IOException {
checkNNStartup();
namesystem.setBalancerBandwidth(bandwidth);
}
@Override // ClientProtocol
public ContentSummary getContentSummary(String path) throws IOException {
checkNNStartup();
return namesystem.getContentSummary(path);
}
@Override // ClientProtocol
public void setQuota(String path, long namespaceQuota, long storagespaceQuota,
StorageType type)
throws IOException {
checkNNStartup();
namesystem.setQuota(path, namespaceQuota, storagespaceQuota, type);
}
@Override // ClientProtocol
public void fsync(String src, long fileId, String clientName,
long lastBlockLength)
throws IOException {
checkNNStartup();
namesystem.fsync(src, fileId, clientName, lastBlockLength);
}
@Override // ClientProtocol
public void setTimes(String src, long mtime, long atime)
throws IOException {
checkNNStartup();
namesystem.setTimes(src, mtime, atime);
}
@Override // ClientProtocol
public void createSymlink(String target, String link, FsPermission dirPerms,
boolean createParent) throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
/* We enforce the MAX_PATH_LENGTH limit even though a symlink target
* URI may refer to a non-HDFS file system.
*/
if (!checkPathLength(link)) {
throw new IOException("Symlink path exceeds " + MAX_PATH_LENGTH +
" character limit");
}
final UserGroupInformation ugi = getRemoteUser();
boolean success = false;
try {
PermissionStatus perm = new PermissionStatus(ugi.getShortUserName(),
null, dirPerms);
namesystem.createSymlink(target, link, perm, createParent,
cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // ClientProtocol
public String getLinkTarget(String path) throws IOException {
checkNNStartup();
metrics.incrGetLinkTargetOps();
HdfsFileStatus stat = null;
try {
stat = namesystem.getFileInfo(path, false);
} catch (UnresolvedPathException e) {
return e.getResolvedPath().toString();
} catch (UnresolvedLinkException e) {
// The NameNode should only throw an UnresolvedPathException
throw new AssertionError("UnresolvedLinkException thrown");
}
if (stat == null) {
throw new FileNotFoundException("File does not exist: " + path);
} else if (!stat.isSymlink()) {
throw new IOException("Path " + path + " is not a symbolic link");
}
return stat.getSymlink();
}
@Override // DatanodeProtocol
public DatanodeRegistration registerDatanode(DatanodeRegistration nodeReg)
throws IOException {
checkNNStartup();
verifySoftwareVersion(nodeReg);
namesystem.registerDatanode(nodeReg);
return nodeReg;
}
@Override // DatanodeProtocol
public HeartbeatResponse sendHeartbeat(DatanodeRegistration nodeReg,
StorageReport[] report, long dnCacheCapacity, long dnCacheUsed,
int xmitsInProgress, int xceiverCount,
int failedVolumes, VolumeFailureSummary volumeFailureSummary,
boolean requestFullBlockReportLease) throws IOException {
checkNNStartup();
verifyRequest(nodeReg);
return namesystem.handleHeartbeat(nodeReg, report,
dnCacheCapacity, dnCacheUsed, xceiverCount, xmitsInProgress,
failedVolumes, volumeFailureSummary, requestFullBlockReportLease);
}
@Override // DatanodeProtocol
public DatanodeCommand blockReport(DatanodeRegistration nodeReg,
String poolId, StorageBlockReport[] reports,
BlockReportContext context) throws IOException {
checkNNStartup();
verifyRequest(nodeReg);
if(blockStateChangeLog.isDebugEnabled()) {
blockStateChangeLog.debug("*BLOCK* NameNode.blockReport: "
+ "from " + nodeReg + ", reports.length=" + reports.length);
}
final BlockManager bm = namesystem.getBlockManager();
boolean noStaleStorages = false;
for (int r = 0; r < reports.length; r++) {
final BlockListAsLongs blocks = reports[r].getBlocks();
//
// BlockManager.processReport accumulates information of prior calls
// for the same node and storage, so the value returned by the last
// call of this loop is the final updated value for noStaleStorage.
//
noStaleStorages = bm.processReport(nodeReg, reports[r].getStorage(),
blocks, context, (r == reports.length - 1));
metrics.incrStorageBlockReportOps();
}
BlockManagerFaultInjector.getInstance().
incomingBlockReportRpc(nodeReg, context);
if (nn.getFSImage().isUpgradeFinalized() &&
!namesystem.isRollingUpgrade() &&
!nn.isStandbyState() &&
noStaleStorages) {
return new FinalizeCommand(poolId);
}
return null;
}
@Override
public DatanodeCommand cacheReport(DatanodeRegistration nodeReg,
String poolId, List<Long> blockIds) throws IOException {
checkNNStartup();
verifyRequest(nodeReg);
if (blockStateChangeLog.isDebugEnabled()) {
blockStateChangeLog.debug("*BLOCK* NameNode.cacheReport: "
+ "from " + nodeReg + " " + blockIds.size() + " blocks");
}
namesystem.getCacheManager().processCacheReport(nodeReg, blockIds);
return null;
}
@Override // DatanodeProtocol
public void blockReceivedAndDeleted(DatanodeRegistration nodeReg, String poolId,
StorageReceivedDeletedBlocks[] receivedAndDeletedBlocks) throws IOException {
checkNNStartup();
verifyRequest(nodeReg);
metrics.incrBlockReceivedAndDeletedOps();
if(blockStateChangeLog.isDebugEnabled()) {
blockStateChangeLog.debug("*BLOCK* NameNode.blockReceivedAndDeleted: "
+"from "+nodeReg+" "+receivedAndDeletedBlocks.length
+" blocks.");
}
for(StorageReceivedDeletedBlocks r : receivedAndDeletedBlocks) {
namesystem.processIncrementalBlockReport(nodeReg, r);
}
}
@Override // DatanodeProtocol
public void errorReport(DatanodeRegistration nodeReg,
int errorCode, String msg) throws IOException {
checkNNStartup();
String dnName =
(nodeReg == null) ? "Unknown DataNode" : nodeReg.toString();
if (errorCode == DatanodeProtocol.NOTIFY) {
LOG.info("Error report from " + dnName + ": " + msg);
return;
}
verifyRequest(nodeReg);
if (errorCode == DatanodeProtocol.DISK_ERROR) {
LOG.warn("Disk error on " + dnName + ": " + msg);
} else if (errorCode == DatanodeProtocol.FATAL_DISK_ERROR) {
LOG.warn("Fatal disk error on " + dnName + ": " + msg);
namesystem.getBlockManager().getDatanodeManager().removeDatanode(nodeReg);
} else {
LOG.info("Error report from " + dnName + ": " + msg);
}
}
@Override // DatanodeProtocol, NamenodeProtocol
public NamespaceInfo versionRequest() throws IOException {
checkNNStartup();
namesystem.checkSuperuserPrivilege();
return namesystem.getNamespaceInfo();
}
/**
* Verifies the given registration.
*
* @param nodeReg node registration
* @throws UnregisteredNodeException if the registration is invalid
*/
private void verifyRequest(NodeRegistration nodeReg) throws IOException {
// verify registration ID
final String id = nodeReg.getRegistrationID();
final String expectedID = namesystem.getRegistrationID();
if (!expectedID.equals(id)) {
LOG.warn("Registration IDs mismatched: the "
+ nodeReg.getClass().getSimpleName() + " ID is " + id
+ " but the expected ID is " + expectedID);
throw new UnregisteredNodeException(nodeReg);
}
}
@Override // RefreshAuthorizationPolicyProtocol
public void refreshServiceAcl() throws IOException {
checkNNStartup();
if (!serviceAuthEnabled) {
throw new AuthorizationException("Service Level Authorization not enabled!");
}
this.clientRpcServer.refreshServiceAcl(new Configuration(), new HDFSPolicyProvider());
if (this.serviceRpcServer != null) {
this.serviceRpcServer.refreshServiceAcl(new Configuration(), new HDFSPolicyProvider());
}
}
@Override // RefreshAuthorizationPolicyProtocol
public void refreshUserToGroupsMappings() throws IOException {
LOG.info("Refreshing all user-to-groups mappings. Requested by user: " +
getRemoteUser().getShortUserName());
Groups.getUserToGroupsMappingService().refresh();
}
@Override // RefreshAuthorizationPolicyProtocol
public void refreshSuperUserGroupsConfiguration() {
LOG.info("Refreshing SuperUser proxy group mapping list ");
ProxyUsers.refreshSuperUserGroupsConfiguration();
}
@Override // RefreshCallQueueProtocol
public void refreshCallQueue() {
LOG.info("Refreshing call queue.");
Configuration conf = new Configuration();
clientRpcServer.refreshCallQueue(conf);
if (this.serviceRpcServer != null) {
serviceRpcServer.refreshCallQueue(conf);
}
}
@Override // GenericRefreshProtocol
public Collection<RefreshResponse> refresh(String identifier, String[] args) {
// Let the registry handle as needed
return RefreshRegistry.defaultRegistry().dispatch(identifier, args);
}
@Override // GetUserMappingsProtocol
public String[] getGroupsForUser(String user) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Getting groups for user " + user);
}
return UserGroupInformation.createRemoteUser(user).getGroupNames();
}
@Override // HAServiceProtocol
public synchronized void monitorHealth() throws HealthCheckFailedException,
AccessControlException, IOException {
checkNNStartup();
nn.monitorHealth();
}
@Override // HAServiceProtocol
public synchronized void transitionToActive(StateChangeRequestInfo req)
throws ServiceFailedException, AccessControlException, IOException {
checkNNStartup();
nn.checkHaStateChange(req);
nn.transitionToActive();
}
@Override // HAServiceProtocol
public synchronized void transitionToStandby(StateChangeRequestInfo req)
throws ServiceFailedException, AccessControlException, IOException {
checkNNStartup();
nn.checkHaStateChange(req);
nn.transitionToStandby();
}
@Override // HAServiceProtocol
public synchronized HAServiceStatus getServiceStatus()
throws AccessControlException, ServiceFailedException, IOException {
checkNNStartup();
return nn.getServiceStatus();
}
/**
* Verify version.
* @param version layout version
* @throws IOException on layout version mismatch
*/
void verifyLayoutVersion(int version) throws IOException {
if (version != HdfsServerConstants.NAMENODE_LAYOUT_VERSION)
throw new IncorrectVersionException(
HdfsServerConstants.NAMENODE_LAYOUT_VERSION, version, "data node");
}
private void verifySoftwareVersion(DatanodeRegistration dnReg)
throws IncorrectVersionException {
String dnVersion = dnReg.getSoftwareVersion();
if (VersionUtil.compareVersions(dnVersion, minimumDataNodeVersion) < 0) {
IncorrectVersionException ive = new IncorrectVersionException(
minimumDataNodeVersion, dnVersion, "DataNode", "NameNode");
LOG.warn(ive.getMessage() + " DN: " + dnReg);
throw ive;
}
String nnVersion = VersionInfo.getVersion();
if (!dnVersion.equals(nnVersion)) {
String messagePrefix = "Reported DataNode version '" + dnVersion +
"' of DN " + dnReg + " does not match NameNode version '" +
nnVersion + "'";
long nnCTime = nn.getFSImage().getStorage().getCTime();
long dnCTime = dnReg.getStorageInfo().getCTime();
if (nnCTime != dnCTime) {
IncorrectVersionException ive = new IncorrectVersionException(
messagePrefix + " and CTime of DN ('" + dnCTime +
"') does not match CTime of NN ('" + nnCTime + "')");
LOG.warn(ive.toString(), ive);
throw ive;
} else {
LOG.info(messagePrefix +
". Note: This is normal during a rolling upgrade.");
}
}
}
private static String getClientMachine() {
String clientMachine = NamenodeWebHdfsMethods.getRemoteAddress();
if (clientMachine == null) { //not a web client
clientMachine = Server.getRemoteAddress();
}
if (clientMachine == null) { //not a RPC client
clientMachine = "";
}
return clientMachine;
}
@Override
public DataEncryptionKey getDataEncryptionKey() throws IOException {
checkNNStartup();
return namesystem.getBlockManager().generateDataEncryptionKey();
}
@Override
public String createSnapshot(String snapshotRoot, String snapshotName)
throws IOException {
checkNNStartup();
if (!checkPathLength(snapshotRoot)) {
throw new IOException("createSnapshot: Pathname too long. Limit "
+ MAX_PATH_LENGTH + " characters, " + MAX_PATH_DEPTH + " levels.");
}
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntryWithPayload cacheEntry = RetryCache.waitForCompletion(retryCache,
null);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return (String) cacheEntry.getPayload();
}
metrics.incrCreateSnapshotOps();
String ret = null;
try {
ret = namesystem.createSnapshot(snapshotRoot, snapshotName,
cacheEntry != null);
} finally {
RetryCache.setState(cacheEntry, ret != null, ret);
}
return ret;
}
@Override
public void deleteSnapshot(String snapshotRoot, String snapshotName)
throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
metrics.incrDeleteSnapshotOps();
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
boolean success = false;
try {
namesystem.deleteSnapshot(snapshotRoot, snapshotName, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override
// Client Protocol
public void allowSnapshot(String snapshotRoot) throws IOException {
checkNNStartup();
metrics.incrAllowSnapshotOps();
namesystem.allowSnapshot(snapshotRoot);
}
@Override
// Client Protocol
public void disallowSnapshot(String snapshot) throws IOException {
checkNNStartup();
metrics.incrDisAllowSnapshotOps();
namesystem.disallowSnapshot(snapshot);
}
@Override
// ClientProtocol
public void renameSnapshot(String snapshotRoot, String snapshotOldName,
String snapshotNewName) throws IOException {
checkNNStartup();
if (snapshotNewName == null || snapshotNewName.isEmpty()) {
throw new IOException("The new snapshot name is null or empty.");
}
namesystem.checkOperation(OperationCategory.WRITE);
metrics.incrRenameSnapshotOps();
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
boolean success = false;
try {
namesystem.renameSnapshot(snapshotRoot, snapshotOldName,
snapshotNewName, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // Client Protocol
public SnapshottableDirectoryStatus[] getSnapshottableDirListing()
throws IOException {
checkNNStartup();
SnapshottableDirectoryStatus[] status = namesystem
.getSnapshottableDirListing();
metrics.incrListSnapshottableDirOps();
return status;
}
@Override // ClientProtocol
public SnapshotDiffReport getSnapshotDiffReport(String snapshotRoot,
String earlierSnapshotName, String laterSnapshotName) throws IOException {
checkNNStartup();
SnapshotDiffReport report = namesystem.getSnapshotDiffReport(snapshotRoot,
earlierSnapshotName, laterSnapshotName);
metrics.incrSnapshotDiffReportOps();
return report;
}
@Override // ClientProtocol
public long addCacheDirective(
CacheDirectiveInfo path, EnumSet<CacheFlag> flags) throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntryWithPayload cacheEntry = RetryCache.waitForCompletion
(retryCache, null);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return (Long) cacheEntry.getPayload();
}
boolean success = false;
long ret = 0;
try {
ret = namesystem.addCacheDirective(path, flags, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success, ret);
}
return ret;
}
@Override // ClientProtocol
public void modifyCacheDirective(
CacheDirectiveInfo directive, EnumSet<CacheFlag> flags) throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return;
}
boolean success = false;
try {
namesystem.modifyCacheDirective(directive, flags, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // ClientProtocol
public void removeCacheDirective(long id) throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return;
}
boolean success = false;
try {
namesystem.removeCacheDirective(id, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // ClientProtocol
public BatchedEntries<CacheDirectiveEntry> listCacheDirectives(long prevId,
CacheDirectiveInfo filter) throws IOException {
checkNNStartup();
if (filter == null) {
filter = new CacheDirectiveInfo.Builder().build();
}
return namesystem.listCacheDirectives(prevId, filter);
}
@Override //ClientProtocol
public void addCachePool(CachePoolInfo info) throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
boolean success = false;
try {
namesystem.addCachePool(info, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // ClientProtocol
public void modifyCachePool(CachePoolInfo info) throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
boolean success = false;
try {
namesystem.modifyCachePool(info, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // ClientProtocol
public void removeCachePool(String cachePoolName) throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return;
}
boolean success = false;
try {
namesystem.removeCachePool(cachePoolName, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // ClientProtocol
public BatchedEntries<CachePoolEntry> listCachePools(String prevKey)
throws IOException {
checkNNStartup();
return namesystem.listCachePools(prevKey != null ? prevKey : "");
}
@Override // ClientProtocol
public void modifyAclEntries(String src, List<AclEntry> aclSpec)
throws IOException {
checkNNStartup();
namesystem.modifyAclEntries(src, aclSpec);
}
@Override // ClienProtocol
public void removeAclEntries(String src, List<AclEntry> aclSpec)
throws IOException {
checkNNStartup();
namesystem.removeAclEntries(src, aclSpec);
}
@Override // ClientProtocol
public void removeDefaultAcl(String src) throws IOException {
checkNNStartup();
namesystem.removeDefaultAcl(src);
}
@Override // ClientProtocol
public void removeAcl(String src) throws IOException {
checkNNStartup();
namesystem.removeAcl(src);
}
@Override // ClientProtocol
public void setAcl(String src, List<AclEntry> aclSpec) throws IOException {
checkNNStartup();
namesystem.setAcl(src, aclSpec);
}
@Override // ClientProtocol
public AclStatus getAclStatus(String src) throws IOException {
checkNNStartup();
return namesystem.getAclStatus(src);
}
@Override // ClientProtocol
public void createEncryptionZone(String src, String keyName)
throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
final CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return;
}
boolean success = false;
try {
namesystem.createEncryptionZone(src, keyName, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // ClientProtocol
public EncryptionZone getEZForPath(String src)
throws IOException {
checkNNStartup();
return namesystem.getEZForPath(src);
}
@Override // ClientProtocol
public BatchedEntries<EncryptionZone> listEncryptionZones(
long prevId) throws IOException {
checkNNStartup();
return namesystem.listEncryptionZones(prevId);
}
@Override // ClientProtocol
public void setXAttr(String src, XAttr xAttr, EnumSet<XAttrSetFlag> flag)
throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
boolean success = false;
try {
namesystem.setXAttr(src, xAttr, flag, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
@Override // ClientProtocol
public List<XAttr> getXAttrs(String src, List<XAttr> xAttrs)
throws IOException {
checkNNStartup();
return namesystem.getXAttrs(src, xAttrs);
}
@Override // ClientProtocol
public List<XAttr> listXAttrs(String src) throws IOException {
checkNNStartup();
return namesystem.listXAttrs(src);
}
@Override // ClientProtocol
public void removeXAttr(String src, XAttr xAttr) throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.WRITE);
CacheEntry cacheEntry = RetryCache.waitForCompletion(retryCache);
if (cacheEntry != null && cacheEntry.isSuccess()) {
return; // Return previous response
}
boolean success = false;
try {
namesystem.removeXAttr(src, xAttr, cacheEntry != null);
success = true;
} finally {
RetryCache.setState(cacheEntry, success);
}
}
private void checkNNStartup() throws IOException {
if (!this.nn.isStarted()) {
throw new RetriableException(this.nn.getRole() + " still not started");
}
}
@Override // ClientProtocol
public void checkAccess(String path, FsAction mode) throws IOException {
checkNNStartup();
namesystem.checkAccess(path, mode);
}
@Override // ClientProtocol
public long getCurrentEditLogTxid() throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.READ); // only active
namesystem.checkSuperuserPrivilege();
// if it's not yet open for write, we may be in the process of transitioning
// from standby to active and may not yet know what the latest committed
// txid is
return namesystem.getEditLog().isOpenForWrite() ?
namesystem.getEditLog().getLastWrittenTxId() : -1;
}
private static FSEditLogOp readOp(EditLogInputStream elis)
throws IOException {
try {
return elis.readOp();
// we can get the below two exceptions if a segment is deleted
// (because we have accumulated too many edits) or (for the local journal/
// no-QJM case only) if a in-progress segment is finalized under us ...
// no need to throw an exception back to the client in this case
} catch (FileNotFoundException e) {
LOG.debug("Tried to read from deleted or moved edit log segment", e);
return null;
} catch (TransferFsImage.HttpGetFailedException e) {
LOG.debug("Tried to read from deleted edit log segment", e);
return null;
}
}
@Override // ClientProtocol
public EventBatchList getEditsFromTxid(long txid) throws IOException {
checkNNStartup();
namesystem.checkOperation(OperationCategory.READ); // only active
namesystem.checkSuperuserPrivilege();
int maxEventsPerRPC = nn.conf.getInt(
DFSConfigKeys.DFS_NAMENODE_INOTIFY_MAX_EVENTS_PER_RPC_KEY,
DFSConfigKeys.DFS_NAMENODE_INOTIFY_MAX_EVENTS_PER_RPC_DEFAULT);
FSEditLog log = namesystem.getFSImage().getEditLog();
long syncTxid = log.getSyncTxId();
// If we haven't synced anything yet, we can only read finalized
// segments since we can't reliably determine which txns in in-progress
// segments have actually been committed (e.g. written to a quorum of JNs).
// If we have synced txns, we can definitely read up to syncTxid since
// syncTxid is only updated after a transaction is committed to all
// journals. (In-progress segments written by old writers are already
// discarded for us, so if we read any in-progress segments they are
// guaranteed to have been written by this NameNode.)
boolean readInProgress = syncTxid > 0;
List<EventBatch> batches = Lists.newArrayList();
int totalEvents = 0;
long maxSeenTxid = -1;
long firstSeenTxid = -1;
if (syncTxid > 0 && txid > syncTxid) {
// we can't read past syncTxid, so there's no point in going any further
return new EventBatchList(batches, firstSeenTxid, maxSeenTxid, syncTxid);
}
Collection<EditLogInputStream> streams = null;
try {
streams = log.selectInputStreams(txid, 0, null, readInProgress);
} catch (IllegalStateException e) { // can happen if we have
// transitioned out of active and haven't yet transitioned to standby
// and are using QJM -- the edit log will be closed and this exception
// will result
LOG.info("NN is transitioning from active to standby and FSEditLog " +
"is closed -- could not read edits");
return new EventBatchList(batches, firstSeenTxid, maxSeenTxid, syncTxid);
}
boolean breakOuter = false;
for (EditLogInputStream elis : streams) {
// our assumption in this code is the EditLogInputStreams are ordered by
// starting txid
try {
FSEditLogOp op = null;
while ((op = readOp(elis)) != null) {
// break out of here in the unlikely event that syncTxid is so
// out of date that its segment has already been deleted, so the first
// txid we get is greater than syncTxid
if (syncTxid > 0 && op.getTransactionId() > syncTxid) {
breakOuter = true;
break;
}
EventBatch eventBatch = InotifyFSEditLogOpTranslator.translate(op);
if (eventBatch != null) {
batches.add(eventBatch);
totalEvents += eventBatch.getEvents().length;
}
if (op.getTransactionId() > maxSeenTxid) {
maxSeenTxid = op.getTransactionId();
}
if (firstSeenTxid == -1) {
firstSeenTxid = op.getTransactionId();
}
if (totalEvents >= maxEventsPerRPC || (syncTxid > 0 &&
op.getTransactionId() == syncTxid)) {
// we're done
breakOuter = true;
break;
}
}
} finally {
elis.close();
}
if (breakOuter) {
break;
}
}
return new EventBatchList(batches, firstSeenTxid, maxSeenTxid, syncTxid);
}
@Override // TraceAdminProtocol
public SpanReceiverInfo[] listSpanReceivers() throws IOException {
checkNNStartup();
namesystem.checkSuperuserPrivilege();
return nn.tracerConfigurationManager.listSpanReceivers();
}
@Override // TraceAdminProtocol
public long addSpanReceiver(SpanReceiverInfo info) throws IOException {
checkNNStartup();
namesystem.checkSuperuserPrivilege();
return nn.tracerConfigurationManager.addSpanReceiver(info);
}
@Override // TraceAdminProtocol
public void removeSpanReceiver(long id) throws IOException {
checkNNStartup();
namesystem.checkSuperuserPrivilege();
nn.tracerConfigurationManager.removeSpanReceiver(id);
}
}
| {
"content_hash": "cc218e6265d4618d8874b6239e4c0bcf",
"timestamp": "",
"source": "github",
"line_count": 2093,
"max_line_length": 123,
"avg_line_length": 36.78499761108457,
"alnum_prop": 0.7215648582301828,
"repo_name": "Microsoft-CISL/hadoop-prototype",
"id": "783490167945ca84c7015d12c091577000d6f58c",
"size": "77797",
"binary": false,
"copies": "1",
"ref": "refs/heads/tier-2.8",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AspectJ",
"bytes": "1544"
},
{
"name": "Batchfile",
"bytes": "66323"
},
{
"name": "C",
"bytes": "1443935"
},
{
"name": "C++",
"bytes": "88380"
},
{
"name": "CMake",
"bytes": "44726"
},
{
"name": "CSS",
"bytes": "57441"
},
{
"name": "HTML",
"bytes": "2370613"
},
{
"name": "Java",
"bytes": "54626011"
},
{
"name": "JavaScript",
"bytes": "48514"
},
{
"name": "Perl",
"bytes": "9496"
},
{
"name": "Protocol Buffer",
"bytes": "245323"
},
{
"name": "Python",
"bytes": "30791"
},
{
"name": "Shell",
"bytes": "262167"
},
{
"name": "TLA",
"bytes": "14993"
},
{
"name": "TeX",
"bytes": "19322"
},
{
"name": "XSLT",
"bytes": "15460"
}
],
"symlink_target": ""
} |
'use strict';
module.exports = {
ExpressApp: require('./lib/express-app'),
ExpressRouteHandler: require('./services/express-route-handler')
}; | {
"content_hash": "7768b90cc53beea9e919fe9655e7060e",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 66,
"avg_line_length": 24.5,
"alnum_prop": 0.7142857142857143,
"repo_name": "DecentCMS/DecentCMS",
"id": "5f79c4ddc31897b07389bd195b3ed4118810dc34",
"size": "236",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/core/express/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3429"
},
{
"name": "CSS",
"bytes": "139731"
},
{
"name": "JavaScript",
"bytes": "564541"
},
{
"name": "Less",
"bytes": "195400"
}
],
"symlink_target": ""
} |
package com.haulmont.cuba.core.global;
import com.haulmont.cuba.core.global.queryconditions.Condition;
import javax.persistence.TemporalType;
import java.util.Date;
import java.util.Map;
public interface DataLoadContextQuery {
DataLoadContextQuery setParameter(String name, Object value);
DataLoadContextQuery setParameter(String name, Date value, TemporalType temporalType);
Map<String, Object> getParameters();
DataLoadContextQuery setParameters(Map<String, Object> parameters);
int getFirstResult();
DataLoadContextQuery setFirstResult(int firstResult);
int getMaxResults();
DataLoadContextQuery setMaxResults(int maxResults);
Condition getCondition();
DataLoadContextQuery setCondition(Condition condition);
}
| {
"content_hash": "238302983c70b772a471ec41a4441372",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 90,
"avg_line_length": 27.321428571428573,
"alnum_prop": 0.788235294117647,
"repo_name": "dimone-kun/cuba",
"id": "e3c03d62aaf2bdc12ba9402f252b1fe79f37afcb",
"size": "1366",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "modules/global/src/com/haulmont/cuba/core/global/DataLoadContextQuery.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "77"
},
{
"name": "CSS",
"bytes": "262124"
},
{
"name": "FreeMarker",
"bytes": "3996"
},
{
"name": "GAP",
"bytes": "33866"
},
{
"name": "Groovy",
"bytes": "402320"
},
{
"name": "HTML",
"bytes": "6405"
},
{
"name": "Java",
"bytes": "18662263"
},
{
"name": "PLSQL",
"bytes": "30350"
},
{
"name": "PLpgSQL",
"bytes": "1723"
},
{
"name": "SQLPL",
"bytes": "93321"
},
{
"name": "Shell",
"bytes": "88"
},
{
"name": "XSLT",
"bytes": "63258"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>wings-portal</artifactId>
<name>Wings Portal</name>
<packaging>war</packaging>
<description>Wings portal web application</description>
<parent>
<groupId>edu.isi.wings</groupId>
<artifactId>wings-core</artifactId>
<version>4.2.7</version>
<relativePath>../core/pom.xml</relativePath>
</parent>
<properties>
<!-- rewrite.version>2.0.12.Final</rewrite.version -->
<wings-opmm.version>1.2.4</wings-opmm.version>
<servlet-api.version>2.5</servlet-api.version>
<httpclient.version>4.5.2</httpclient.version>
<jersey.version>2.27</jersey.version>
<jackson.version>2.8.6</jackson.version>
<javax.validation.version>1.1.0.Final</javax.validation.version>
<commons-fileupload.version>1.3.3</commons-fileupload.version>
</properties>
<repositories>
<repository>
<id>github</id>
<name>Github</name>
<url>https://maven.pkg.github.com/KnowledgeCaptureAndDiscovery/WINGS-OPMW-Mapper</url>
</repository>
</repositories>
<dependencies>
<!-- Wings Planner -->
<dependency>
<groupId>edu.isi.wings</groupId>
<artifactId>wings-planner</artifactId>
<version>${project.parent.version}</version>
</dependency>
<!-- Wings Pegasus Adapter -->
<!--dependency>
<groupId>edu.isi.wings</groupId>
<artifactId>wings-pegasus-adapter</artifactId>
<version>${project.parent.version}</version>
</dependency-->
<!-- Wings OODT Adapter -->
<!--dependency>
<groupId>edu.isi.wings</groupId>
<artifactId>wings-oodt-adapter</artifactId>
<version>${project.parent.version}</version>
</dependency>
<dependency>
<groupId>edu.isi.wings</groupId>
<artifactId>wings-oodt-wmservices</artifactId>
<version>${project.parent.version}</version>
<type>war</type>
</dependency-->
<!-- Wings OPM Mapper -->
<dependency>
<groupId>edu.isi.wings</groupId>
<artifactId>wings-opmm</artifactId>
<version>${wings-opmm.version}</version>
</dependency>
<!-- Apache Commons FileUpload -->
<dependency>
<groupId>commons-fileupload</groupId>
<artifactId>commons-fileupload</artifactId>
<version>${commons-fileupload.version}</version>
</dependency>
<!-- Jersey (REST API) -->
<dependency>
<groupId>org.glassfish.jersey.containers</groupId>
<artifactId>jersey-container-servlet</artifactId>
<version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.inject</groupId>
<artifactId>jersey-hk2</artifactId>
<version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>javax.activation</groupId>
<artifactId>activation</artifactId>
<version>1.1.1</version>
</dependency>
<!-- Bean Validation API support -->
<dependency>
<groupId>javax.validation</groupId>
<artifactId>validation-api</artifactId>
<version>${javax.validation.version}</version>
</dependency>
<!-- Jersey Multipart (for upload) -->
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-multipart</artifactId>
<version>${jersey.version}</version>
</dependency>
<!-- JSON Processing -->
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-json-jackson</artifactId>
<version>${jersey.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.jaxrs</groupId>
<artifactId>jackson-jaxrs-json-provider</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- Rewrite (url rewriter) -->
<!-- dependency>
<groupId>org.ocpsoft.rewrite</groupId>
<artifactId>rewrite-servlet</artifactId>
<version>${rewrite.version}</version>
</dependency-->
<!-- Mime type detection -->
<dependency>
<groupId>org.apache.tika</groupId>
<artifactId>tika-core</artifactId>
<version>1.22</version>
</dependency>
<!-- Servlet (only for compilation, not in WAR file) -->
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>${servlet-api.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.tomcat</groupId>
<artifactId>tomcat-catalina</artifactId>
<version>7.0.55</version>
<scope>provided</scope>
</dependency>
<!-- HTTP Client -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>${httpclient.version}</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpmime</artifactId>
<version>${httpclient.version}</version>
</dependency>
<dependency>
<groupId>org.asynchttpclient</groupId>
<artifactId>async-http-client</artifactId>
<version>2.7.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<!-- tomcat:run-war configuration -->
<groupId>org.codehaus.mojo</groupId>
<artifactId>tomcat-maven-plugin</artifactId>
<version>1.1</version>
<configuration>
<port>9090</port>
<path>/wings-portal</path>
</configuration>
</plugin>
<plugin>
<!-- ant plugin to download extjs, plupload libraries -->
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<version>1.7</version>
<executions>
<execution>
<id>generate-sources</id>
<phase>generate-sources</phase>
<configuration>
<target>
<ant antfile="build.xml" target="download" />
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
<execution>
<id>clean</id>
<phase>clean</phase>
<configuration>
<target>
<ant antfile="build.xml" target="clean" />
</target>
</configuration>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project> | {
"content_hash": "e7490c206ccfd3c9775a984bc0a079bf",
"timestamp": "",
"source": "github",
"line_count": 212,
"max_line_length": 158,
"avg_line_length": 28.995283018867923,
"alnum_prop": 0.6744753538311371,
"repo_name": "IKCAP/wings",
"id": "1962a2d1d20818fcb9fae134601c544438c08474",
"size": "6147",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "portal/pom.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "15768"
},
{
"name": "HTML",
"bytes": "28078"
},
{
"name": "Java",
"bytes": "1444134"
},
{
"name": "JavaScript",
"bytes": "697675"
},
{
"name": "Shell",
"bytes": "2702"
},
{
"name": "Web Ontology Language",
"bytes": "1249042"
}
],
"symlink_target": ""
} |
namespace dcpu { namespace emulator {
class HardwareDevice {
protected:
Dcpu &cpu;
uint32_t manufacturerId;
uint32_t hardwareId;
uint16_t version;
public:
HardwareDevice(Dcpu &cpu, uint32_t manufacturerId, uint32_t hardwareId, uint16_t version);
virtual ~HardwareDevice();
virtual void tick()=0;
virtual uint16_t interrupt()=0;
uint32_t getHardwareId();
uint32_t getManufacturerId();
uint16_t getVersion();
};
}}
| {
"content_hash": "39f8079b0ce4585bc4d7bc00042e24c0",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 92,
"avg_line_length": 22.1,
"alnum_prop": 0.7194570135746606,
"repo_name": "storance/dcpu16",
"id": "b73324877166f93c87057306a67b19d7e68fffb8",
"size": "477",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "emulator/src/hardware.hpp",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "305492"
},
{
"name": "JavaScript",
"bytes": "2516"
}
],
"symlink_target": ""
} |
class AddDescriptionToDiplomaProjects < ActiveRecord::Migration
def change
add_column :diploma_projects, :description, :text
end
end
| {
"content_hash": "e3f2350aebe467072f3dcefc552aef95",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 63,
"avg_line_length": 28.2,
"alnum_prop": 0.7872340425531915,
"repo_name": "adelamtuduce/thesisapp",
"id": "8bd3bcc409984f52cbdca506a79c5ad19e3e559a",
"size": "141",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20160504073403_add_description_to_diploma_projects.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "443834"
},
{
"name": "Cucumber",
"bytes": "423"
},
{
"name": "HTML",
"bytes": "81527"
},
{
"name": "JavaScript",
"bytes": "1853188"
},
{
"name": "Ruby",
"bytes": "173687"
}
],
"symlink_target": ""
} |
package com.amazonaws.services.pinpointemail.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.pinpointemail.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.protocol.Protocol;
import com.amazonaws.annotation.SdkInternalApi;
/**
* ListDomainDeliverabilityCampaignsRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class ListDomainDeliverabilityCampaignsRequestProtocolMarshaller implements
Marshaller<Request<ListDomainDeliverabilityCampaignsRequest>, ListDomainDeliverabilityCampaignsRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.REST_JSON)
.requestUri("/v1/email/deliverability-dashboard/domains/{SubscribedDomain}/campaigns").httpMethodName(HttpMethodName.GET)
.hasExplicitPayloadMember(false).hasPayloadMembers(false).serviceName("AmazonPinpointEmail").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public ListDomainDeliverabilityCampaignsRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<ListDomainDeliverabilityCampaignsRequest> marshall(ListDomainDeliverabilityCampaignsRequest listDomainDeliverabilityCampaignsRequest) {
if (listDomainDeliverabilityCampaignsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<ListDomainDeliverabilityCampaignsRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(
SDK_OPERATION_BINDING, listDomainDeliverabilityCampaignsRequest);
protocolMarshaller.startMarshalling();
ListDomainDeliverabilityCampaignsRequestMarshaller.getInstance().marshall(listDomainDeliverabilityCampaignsRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| {
"content_hash": "bad01c820b41b11dad5dc933fd111863",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 154,
"avg_line_length": 45.132075471698116,
"alnum_prop": 0.7859531772575251,
"repo_name": "aws/aws-sdk-java",
"id": "f77295bb90b21a91bcedbabe0189f8a0078d0266",
"size": "2972",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-java-sdk-pinpointemail/src/main/java/com/amazonaws/services/pinpointemail/model/transform/ListDomainDeliverabilityCampaignsRequestProtocolMarshaller.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?php
namespace DesignPatterns\Behavioral\NullObject;
/**
* Class ConsoleLogger
* @package DesignPatterns\Behavioral\NullObject
*/
final class ConsoleLogger implements LoggerInterface {
/**
* @param string $message
*/
final public function log(string $message): void {
echo $message;
}
}
| {
"content_hash": "7d85865d0ad1a7986d24a544dc750851",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 54,
"avg_line_length": 20.058823529411764,
"alnum_prop": 0.6480938416422287,
"repo_name": "JShadowMan/PHPDesignPatterns",
"id": "ac9a724b845844ff0a1d63a1020dbeca1774bb3d",
"size": "585",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Behavioral/NullObject/ConsoleLogger.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "211033"
}
],
"symlink_target": ""
} |
<?php
/**
*
*/
class Admin extends CI_Controller
{
public function __construct()
{
parent::__construct();
if ($this->session->userdata('level') == 'admin') {
return true;
} else {
if ($this->session->userdata('level') == 'kasir') {
redirect('kasir');
} else {
redirect('login');
}
}
}
public function index()
{
$data['induk'] = 'Admin';
$data['title'] = 'Dashboard';
$this->load->view('admin/header', $data);
$this->load->view('admin/sidebar');
$this->load->view('admin/footer');
}
public function logout()
{
$data['title'] = "Logout";
#destroy session
$array = array("username", "level");
$this->session->unset_userdata($array);
$data['berhasil'] = "Berhasil Logout";
$this->load->view('login/header', $data);
$this->load->view('login/alert-berhasil', $data);
$this->load->view('login/form-login');
$this->load->view('login/footer');
}
} | {
"content_hash": "653bd127ef3073765907182549ef01b2",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 54,
"avg_line_length": 19.0625,
"alnum_prop": 0.5879781420765028,
"repo_name": "yogigr/reloadshop",
"id": "5f302f0b45494d8f490031691ea95c1f9f91fa56",
"size": "915",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "application/controllers/Admin.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "4336"
},
{
"name": "HTML",
"bytes": "38221"
},
{
"name": "JavaScript",
"bytes": "431"
},
{
"name": "PHP",
"bytes": "1857931"
}
],
"symlink_target": ""
} |
layout: post
status: publish
published: true
title: Using Quartz.NET, Spring.NET and NHibernate to run Scheduled Tasks in ASP.NET
author:
display_name: Geoff Lane
login: admin
email: geoff@zorched.net
url: http://www.zorched.net
author_login: admin
author_email: geoff@zorched.net
author_url: http://www.zorched.net
date: '2009-03-07 17:02:15 -0500'
date_gmt: '2009-03-07 23:02:15 -0500'
categories:
- ".NET"
- Web
tags:
- hibernate
- orm
- software-development
- csharp
- scheduled tasks
comments:
- id: 15078
author: Marko Lahma
author_email: marko.lahma@gmail.com
author_url: ''
date: '2009-05-12 21:45:08 -0400'
date_gmt: '2009-05-13 03:45:08 -0400'
content: "Actually the preferred way to configure the locking mechanism is to define
semaphore implementation:\r\n\r\nproperties[\"quartz.jobStore.lockHandler.type\"]
= \"Quartz.Impl.AdoJobStore.UpdateLockRowSemaphore, Quartz\";\r\n\r\nThis hides
the SQL details.\r\n\r\nYou have a good point there about not being able to run
out of the box with SQL Server. Maybe there should be at least a warning issued
about not having the correct row locking implementation. SQLite actually needs
this configuration also."
- id: 15888
author: nur kan
author_email: ilknurcapkan@gmail.com
author_url: ''
date: '2009-10-26 01:37:14 -0400'
date_gmt: '2009-10-26 07:37:14 -0400'
content: can we use this solution with sybase database ? do you have any idea ?
- id: 15889
author: Geoff Lane
author_email: geoff@zorched.net
author_url: http://www.zorched.net
date: '2009-10-26 08:44:32 -0400'
date_gmt: '2009-10-26 14:44:32 -0400'
content: "@nur kan,\r\nI haven't tried it with Sybase and it does not seem that
the Quartz.net distribution distributes Sybase SQL files. Behind the scenes Quartz
uses ADO.NET so, assuming you have a Sybase provider, you should be able to get
it to work.\r\n\r\nInternally they have some DB specific Delegates for dealing
with differences. I'm sure they would love for someone to contribute a Sybase
specific implementation..."
- id: 16030
author: Jérôme De Cuyper
author_email: jerome.decuyper@gmail.com
author_url: http://www.jdecuyper.com/
date: '2010-01-04 17:44:11 -0500'
date_gmt: '2010-01-04 23:44:11 -0500'
content: Excellent post, thanks a lot for sharing!
- id: 16039
author: Steve Edward
author_email: steve.repp@gmail.com
author_url: ''
date: '2010-01-26 12:12:33 -0500'
date_gmt: '2010-01-26 18:12:33 -0500'
content: Do you happen to have the working code example available for this. I am
new to Spring/NHibernate and that would really help me fill in some blanks.
- id: 16226
author: "Как реализовать
daemon-процеÑ\x81Ñ\x81Ñ‹
длÑ\x8F ASP.NET приложениÑ\x8F?
- CodeHelper"
author_email: ''
author_url: http://codehelper.ru/questions/100/new/find-answer-151
date: '2010-07-15 13:52:12 -0400'
date_gmt: '2010-07-15 19:52:12 -0400'
content: "[...] и триггеры
в контейнере.
Вот Ñ\x81татьÑ\x8F
по Ñ\x8Dтому
поводу:
Using Quartz.NET, Spring.NET and NHibernate to run Scheduled Tasks in ASP.NET
\ Комментировать
Ñ\x81Ñ\x81ылка Ответил
\ admax 262 дн., 4 чаÑ\x81.,
30 [...]"
- id: 17868
author: Robert Delahoz
author_email: robertdelahoz@yahoo.com
author_url: ''
date: '2011-03-11 09:29:46 -0500'
date_gmt: '2011-03-11 15:29:46 -0500'
content: Excellent post!. It saved my day.
- id: 17946
author: Ehe
author_email: eric.leihe@gmail.com
author_url: ''
date: '2011-09-26 23:59:59 -0400'
date_gmt: '2011-09-27 05:59:59 -0400'
content: "Excellent guide, thank you very much!\r\n\r\nBy the way, I met the problem
when try to change the cron table to a new setting.\r\n\r\nNot matter how many
times I restart the web application, the Cron setting is keeping as the same as
the first time setting, never change in the db table CRON_TRIGGER and the job
frequency also never change.\r\n\r\nSo I just think, the Spring framework did
not tell the Quartz.NET that there is a change in the crontable setting. Till
now I am still looking for a solution to this problem.\r\n\r\n\r\nThanks &
Best Regards!\r\nDo you have any advice or hint on dynamically change the Crontab?"
- id: 18111
author: Viv
author_email: v.farrell@gmail.com
author_url: ''
date: '2012-10-14 20:58:24 -0400'
date_gmt: '2012-10-15 02:58:24 -0400'
content: 5 star article. Gave me exactly what I was looking for in a clear and concise
way. Thanks.
---
<p>Running scheduled tasks in web applications is not normally a straightforward thing to do. Web applications are built to respond to requests from users and respond to that request. This request/response lifecycle doesn't always match well to a long running thread that wakes up to run a task every 10 minutes or at 2 AM every day.</p>
<h3>ASP.NET Scheduled Task Options</h3>
Using ASP.NET running on Windows, there are a number of different options that you could choose to implement this. Windows built in Scheduled Tasks can be run to periodically perform execute a program. A Windows Service could be constructed that used a Timer or a Thread to periodically do the work. Scheduled Tasks and Windows Service require you to write a standalone program. You can share DLLs from your Web application but in the end it is a separate app that needs to be maintained. Another option if you go this route is to turn the Scheduled Task or Service being run into a simple Web Service or REST client that can call your Web application but doesn't need any knowledge of the jobs themselves.</p>
<p>Another option is an Open Source tool called <a href="http://quartznet.sourceforge.net/">Quartz.NET</a>. Quartz.NET is based on the popular Java scheduled task runner called (not surprisingly) Quartz. Quartz.NET is a full-featured system that manages Jobs that do the work and Triggers that allow you to specify when you want those jobs run. It can run in your web application itself or as an external service.</p>
<p>The simplest approach to get started is to run directly in your Web application as a process in IIS. The downside to this is that IIS will periodically recycle it's processes and won't necessarily start a new one until a new web request is made. Assuming you can deal with this indeterministic behavior then in an IIS process will be fine. It also creates a relatively easy path that will allow you to migrate to the external service process at a later point if need be.</p>
<p>I'm an <a href="http://altdotnet.org">ALT.NET</a> kind of .NET developer, so I like to use tools like <a href="http://www.hibernate.org/343.html">NHibernate</a> for ORM and <a href="http://springframework.net/">Spring.NET</a> for Dependency Injection, AOP and generally wiring everything together. The good news is that Spring.NET supports Quartz.NET through its <a href="http://springframework.net/docs/1.2.0/reference/html/scheduling.html">Scheduling API</a>. Start with that for some basic information on using Quartz.NET with Spring. The bad news is that the documentation is a bit thin and the examples basic. I attempt to remedy that in part here.</p>
<h3>Using Quartz.NET, NHibernate and Spring.NET to run Scheduled Tasks</h3>
The goal is to integrate an existing Spring managed object like a Service or a DAL that uses NHibernate with a Quartz Job that will run on a periodic basis.</p>
<p>To start with you need to create an interface for your service and then implement that interface. The implementation I'll leave to you and your problem, but the example below you can image uses one or more NHibernate DALs to lookup Users, find their email preferences, etc.</p>
<h4>Implementing Services and Jobs</h4>
{% highlight csharp %}
public interface IEmailService
{
void SendEveryoneEmails();
}
{% endhighlight %}</p>
<p>When implementing your Job you need to know a few details about how Quartz works:</p>
<ol>
<li>The first thing to understand is that if you are going to use the <em>AdoJobScheduler</em> to store your Jobs and triggers in the database the Job needs to be <em>Serializable</em>. Generally speaking your DAL classes and NHibernate sessions and the like are not going to be serializable. To get around that, we make the properties <strong>set-only</strong> so that they will not be serialized when they are stored in the database.</li>
<li>The second thing to understand is that your Job will not be running in the context of the Web application or a request so anything you have to set up connections (such as an OpenSessionInView filter) will not apply to Jobs run by Quartz. This means that you will need to setup your own NHibernate session for all of the dependent objects to use. Luckily Spring provides some help with this in the <em>SessionScope</em> class. This is the same base class as is used by the OpenSessionInView filter.</li>
</ol></p>
<p>Using the Service interface you created, you then create a Job that Quartz.NET can run. Quartz.NET provides the <em>IJob</em> interface that you can implement. Spring.NET provides a base class that implements that interface called <em>QuartzJobObject</em> helps deal with injecting dependencies.</p>
<p>{% highlight csharp %}
using NHibernate;
using Quartz;
using Spring.Data.NHibernate.Support;
using Spring.Scheduling.Quartz;</p>
<p>public class CustomJob : QuartzJobObject
{
private ISessionFactory sessionFactory;
private IEmailService emailService;</p>
<p> // Set only so they don't get serialized
public ISessionFactory SessionFactory { set { sessionFactory = value; } }
public IEmailService EmailService { set { emailService = value; } }</p>
<p> protected override void ExecuteInternal(JobExecutionContext ctx)
{
// Session scope is the same thing as used by OpenSessionInView
using (var ss = new SessionScope(sessionFactory, true))
{
emailService.SendEveryoneEmails();
ss.Close();
}
}
}
{% endhighlight %}</p>
<h4>Wiring Services and Jobs Together with Spring</h4>
Now that you have your classes created you need to wire everything together using Spring.</p>
<p>First we have our DALs and Services wired in to Spring with something like the following:
{% highlight xml %}
<object id="UserDAL" type="MyApp.DAL.UserDAL, MyApp.Data"></p>
<property name="SessionFactory" ref="NHibernateSessionFactory" />
</object>
<object id="EmailService" type="MyApp.Service.EmailService, MyApp.Service"></p>
<property name="UserDAL" ref="UserDAL" />
</object>
{% endhighlight %}</p>
<p>Next you create a Job that references the <em>Type</em> of the Job that you just created. The type is referenced instead of the instance because the lifecycle of the Job is managed by Quartz itself. It deals with instantiation, serialization and deserialization of the object itself. This is a bit different than what you might expect from a Spring service normally.
{% highlight xml %}
<object id="CustomJob" type="Spring.Scheduling.Quartz.JobDetailObject, Spring.Scheduling.Quartz"></p>
<property name="JobType" value="MyApp.Jobs.CustomJob, MyApp.Jobs" />
</object>
{% endhighlight %}</p>
<p>Once your Job is created, you create a Trigger that will run the Job based on your rules. Quartz (and Spring) offer two types of Jobs <em>SimpleTriggers</em> and <em>CronTriggers</em>. SimpleTriggers allow you to specify things like "Run this task every 30 minutes". CronTriggers follow a <a href="http://en.wikipedia.org/wiki/Cron">crontab</a> format for specifying when Jobs should run. The CronTrigger is very flexible but could be a little confusing if you aren't familiar with cron. It's worth getting to know for that flexibility though.</p>
<p>{% highlight xml %}
<object id="CustomJobTrigger" type="Spring.Scheduling.Quartz.CronTriggerObject, Spring.Scheduling.Quartz"></p>
<property name="JobDetail" ref="CustomJob"/>
<property name="CronExpressionString" value="0 0 2 * * ?" /> <!-- run every morning at 2 AM --></p>
<property name="MisfireInstructionName" value="FireOnceNow" />
</object>
{% endhighlight %}</p>
<p>The last piece that needs to be done is the integration of the <em>SchedulerFactory</em>. The SchedulerFactory brings together Jobs and Triggers with all of the other configuration needed to run Quartz.NET jobs.</p>
<p>A couple of things to understand about configuring the SchedulerFactory:</p>
<ol>
<li>Specifying <em>
<property name="DbProvider" ref="DbProvider"/></em> (where DbProvider is the db:provider setup used by your Nhibernate configuration) tells the SchedulerFactory to use the AdoJobProvider and store the Jobs and Trigger information in the database. The tables will need to exist already and Quartz provides a script for this task.</li></p>
<li>Running on SQL Server requires a slight change to Quartz. It uses a locking mechanism to prevent Jobs from running concurrently. For some reason the default configuration uses a <strong>FOR UPDATE</strong> query that is not supported by SQL Server. (I don't understand exactly why a .NET utility wouldn't work with SQL Server out of the box?)
To fix the locking a QuartzProperty needs to be set:
<em><entry key="quartz.jobStore.selectWithLockSQL" value="SELECT * FROM {0}LOCKS WHERE LOCK_NAME=@lockName"/></em></li></p>
<li>The JobFactory is set to the <em>SpringObjectJobFactory</em> because it handles the injection of dependencies into QuartzJobObject like the one we created above.</li>
<li>SchedulerContextAsMap is a property on the SchedulerFactory that allows you to set properties that will be passed to your Jobs when they are created by the SpringObjectJobFactory. This is where you set all of the Property names and the corresponding instance references to Spring configured objects. Those objects will be set into your Job instances whenever they are deserialized and run by Quartz.</li>
</ol></p>
<p>Here's the whole ScheduleFactory configuration put together:
{% highlight xml %}
<object id="SchedulerFactory" type="Spring.Scheduling.Quartz.SchedulerFactoryObject, Spring.Scheduling.Quartz"></p>
<property name="JobFactory">
<object type="Spring.Scheduling.Quartz.SpringObjectJobFactory, Spring.Scheduling.Quartz"/>
</property></p>
<property name="SchedulerContextAsMap">
<dictionary>
<entry key="EmailService" value-ref="EmailService" />
<entry key="SessionFactory" value-ref="NHibernateSessionFactory" />
</dictionary>
</property></p>
<property name="DbProvider" ref="DbProvider"/>
<property name="QuartzProperties">
<dictionary>
<entry key="quartz.jobStore.selectWithLockSQL" value="SELECT * FROM {0}LOCKS WHERE LOCK_NAME=@lockName"/>
</dictionary>
</property></p>
<property name="triggers">
<list>
<ref object="CustomJobTrigger" />
</list>
</property>
</object>
{% endhighlight %}</p>
<h3>Conclusion</h3>
Scheduled tasks in ASP.NET applications shouldn't be too much trouble anymore. Reusing existing Service and DAL classes allows you to easily create scheduled tasks using existing, tested code. Quartz.NET looks to be a good solution for these situations.</p>
| {
"content_hash": "546809c366ecfb4c4653a1426846f499",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 710,
"avg_line_length": 71.3391304347826,
"alnum_prop": 0.7400658215504632,
"repo_name": "geofflane/geofflane.github.io",
"id": "2f87af3bef0f77f20db71cb1197b58bc150004e7",
"size": "16413",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2009-03-07-using-quartznet-springnet-and-nhibernate-to-run-scheduled-tasks-in-aspnet.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "37826"
},
{
"name": "HTML",
"bytes": "2032704"
},
{
"name": "JavaScript",
"bytes": "289186"
}
],
"symlink_target": ""
} |
package cmd
import (
"io"
"strconv"
"github.com/spf13/cobra"
)
func (f *Factory) NewCmdLog(out io.Writer) *cobra.Command {
cmd := &cobra.Command{
Use: "log [-f] <pod> [<container>]",
Short: "Print the logs for a container in a pod.",
Long: `Print the logs for a container in a pod. If the pod has only one container, the container name is optional
Examples:
$ kubectl log 123456-7890 ruby-container
<returns snapshot of ruby-container logs from pod 123456-7890>
$ kubectl log -f 123456-7890 ruby-container
<starts streaming of ruby-container logs from pod 123456-7890>`,
Run: func(cmd *cobra.Command, args []string) {
if len(args) == 0 {
usageError(cmd, "<pod> is required for log")
}
if len(args) > 2 {
usageError(cmd, "log <pod> [<container>]")
}
namespace := GetKubeNamespace(cmd)
client, err := f.Client(cmd)
checkErr(err)
podID := args[0]
pod, err := client.Pods(namespace).Get(podID)
checkErr(err)
var container string
if len(args) == 1 {
if len(pod.Spec.Containers) != 1 {
usageError(cmd, "<container> is required for pods with multiple containers")
}
// Get logs for the only container in the pod
container = pod.Spec.Containers[0].Name
} else {
container = args[1]
}
follow := false
if GetFlagBool(cmd, "follow") {
follow = true
}
readCloser, err := client.RESTClient.Get().
Prefix("proxy").
Resource("minions").
Name(pod.Status.Host).
Suffix("containerLogs", namespace, podID, container).
Param("follow", strconv.FormatBool(follow)).
Stream()
checkErr(err)
defer readCloser.Close()
_, err = io.Copy(out, readCloser)
checkErr(err)
},
}
cmd.Flags().BoolP("follow", "f", false, "Specify if the logs should be streamed.")
return cmd
}
| {
"content_hash": "d796583a57f8290c95d5aaa84c60921e",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 115,
"avg_line_length": 24.45945945945946,
"alnum_prop": 0.6475138121546962,
"repo_name": "sosiouxme/origin",
"id": "1e88248e2f4c8fb24316710357641397ea3d2d01",
"size": "2388",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Godeps/_workspace/src/github.com/GoogleCloudPlatform/kubernetes/pkg/kubectl/cmd/log.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "12317"
},
{
"name": "Go",
"bytes": "7606473"
},
{
"name": "JavaScript",
"bytes": "57106"
},
{
"name": "Makefile",
"bytes": "1910"
},
{
"name": "Shell",
"bytes": "64261"
}
],
"symlink_target": ""
} |
const int ISP_SEND_FLASH_COMMAND = 0;
const int ISP_SEND_DHCP_COMMAND_NO_LANIP = 1;
const int ISP_SEND_DHCP_COMMAND_HAS_LANIP = 2;
const int ISP_Send_TFTP_PAKAGE = 3;
const int ISP_Send_TFTP_OVER = 4;
const int ISP_Flash_Done = 5;
/////////////////////////////////////////////////////////////////////////////
// CFanceSocket command target
class MySocket : public CAsyncSocket
{
// Attributes
public:
// Operations
public:
MySocket();
virtual ~MySocket();
// Overrides
public:
public:
virtual void OnAccept(int nErrorCode);
virtual void OnReceive(int nErrorCode);
virtual void OnClose(int nErrorCode);
CString m_hex_bin_filepath;
protected:
public:
virtual void OnConnect(int nErrorCode);
private:
char receive_buf[4096];
int Receive_data_length;
};
//#endif
| {
"content_hash": "a72ecf67a381a826bd54288dd6174766",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 77,
"avg_line_length": 20.44736842105263,
"alnum_prop": 0.6640926640926641,
"repo_name": "temcocontrols/T3000_Building_Automation_System",
"id": "50f16c0b0616c1c5fd8528e5f59a6004349d1384",
"size": "1075",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "T3000/MySocket.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "555"
},
{
"name": "C",
"bytes": "4704332"
},
{
"name": "C#",
"bytes": "9337521"
},
{
"name": "C++",
"bytes": "15665753"
},
{
"name": "CMake",
"bytes": "169395"
},
{
"name": "CSS",
"bytes": "111688"
},
{
"name": "Dockerfile",
"bytes": "210"
},
{
"name": "HTML",
"bytes": "125316"
},
{
"name": "Inno Setup",
"bytes": "5879"
},
{
"name": "JavaScript",
"bytes": "1789138"
},
{
"name": "Makefile",
"bytes": "6851"
},
{
"name": "Meson",
"bytes": "7623"
},
{
"name": "NASL",
"bytes": "14427"
},
{
"name": "Objective-C",
"bytes": "7094"
},
{
"name": "Perl",
"bytes": "40922"
},
{
"name": "PowerShell",
"bytes": "4726"
},
{
"name": "Python",
"bytes": "1992"
},
{
"name": "Shell",
"bytes": "143"
}
],
"symlink_target": ""
} |
#include <linux/init.h>
#include <linux/kernel.h>
#include <linux/types.h>
#include <linux/initrd.h>
#include <linux/irq.h>
#include <linux/ide.h>
#include <linux/ioport.h>
#include <linux/param.h> /* for HZ */
#include <linux/root_dev.h>
#include <linux/serial.h>
#include <linux/serial_core.h>
#include <asm/cpu.h>
#include <asm/bootinfo.h>
#include <asm/addrspace.h>
#include <asm/time.h>
#include <asm/bcache.h>
#include <asm/irq.h>
#include <asm/reboot.h>
#include <asm/gdb-stub.h>
#include <asm/traps.h>
#include <asm/debug.h>
#include <asm/emma2rh/emma2rh.h>
#define USE_CPU_COUNTER_TIMER /* whether we use cpu counter */
extern void markeins_led(const char *);
static int bus_frequency = 0;
static void markeins_machine_restart(char *command)
{
static void (*back_to_prom) (void) = (void (*)(void))0xbfc00000;
printk("cannot EMMA2RH Mark-eins restart.\n");
markeins_led("restart.");
back_to_prom();
}
static void markeins_machine_halt(void)
{
printk("EMMA2RH Mark-eins halted.\n");
markeins_led("halted.");
while (1) ;
}
static void markeins_machine_power_off(void)
{
printk("EMMA2RH Mark-eins halted. Please turn off the power.\n");
markeins_led("poweroff.");
while (1) ;
}
static unsigned long clock[4] = { 166500000, 187312500, 199800000, 210600000 };
static unsigned int __init detect_bus_frequency(unsigned long rtc_base)
{
u32 reg;
/* detect from boot strap */
reg = emma2rh_in32(EMMA2RH_BHIF_STRAP_0);
reg = (reg >> 4) & 0x3;
return clock[reg];
}
static void __init emma2rh_time_init(void)
{
u32 reg;
if (bus_frequency == 0)
bus_frequency = detect_bus_frequency(0);
reg = emma2rh_in32(EMMA2RH_BHIF_STRAP_0);
if ((reg & 0x3) == 0)
reg = (reg >> 6) & 0x3;
else {
reg = emma2rh_in32(EMMA2RH_BHIF_MAIN_CTRL);
reg = (reg >> 4) & 0x3;
}
mips_hpt_frequency = (bus_frequency * (4 + reg)) / 4 / 2;
}
void __init plat_timer_setup(struct irqaction *irq)
{
/* we are using the cpu counter for timer interrupts */
setup_irq(CPU_IRQ_BASE + 7, irq);
}
static void markeins_board_init(void);
extern void markeins_irq_setup(void);
static void inline __init markeins_sio_setup(void)
{
}
void __init plat_mem_setup(void)
{
/* initialize board - we don't trust the loader */
markeins_board_init();
set_io_port_base(KSEG1ADDR(EMMA2RH_PCI_IO_BASE));
board_time_init = emma2rh_time_init;
_machine_restart = markeins_machine_restart;
_machine_halt = markeins_machine_halt;
pm_power_off = markeins_machine_power_off;
/* setup resource limits */
ioport_resource.start = EMMA2RH_PCI_IO_BASE;
ioport_resource.end = EMMA2RH_PCI_IO_BASE + EMMA2RH_PCI_IO_SIZE - 1;
iomem_resource.start = EMMA2RH_IO_BASE;
iomem_resource.end = EMMA2RH_ROM_BASE - 1;
/* Reboot on panic */
panic_timeout = 180;
markeins_sio_setup();
}
static void __init markeins_board_init(void)
{
u32 val;
val = emma2rh_in32(EMMA2RH_PBRD_INT_EN); /* open serial interrupts. */
emma2rh_out32(EMMA2RH_PBRD_INT_EN, val | 0xaa);
val = emma2rh_in32(EMMA2RH_PBRD_CLKSEL); /* set serial clocks. */
emma2rh_out32(EMMA2RH_PBRD_CLKSEL, val | 0x5); /* 18MHz */
emma2rh_out32(EMMA2RH_PCI_CONTROL, 0);
markeins_led("MVL E2RH");
}
| {
"content_hash": "4b4b97118613584e884bd31dfd6eb5e0",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 79,
"avg_line_length": 23.669172932330827,
"alnum_prop": 0.6893265565438373,
"repo_name": "ut-osa/laminar",
"id": "2f060e1ed36c674c0e9da4e0d11f281edc8d07a0",
"size": "4143",
"binary": false,
"copies": "40",
"ref": "refs/heads/master",
"path": "linux-2.6.22.6/arch/mips/emma2rh/markeins/setup.c",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ASP",
"bytes": "4526"
},
{
"name": "Assembly",
"bytes": "7753785"
},
{
"name": "Awk",
"bytes": "5239"
},
{
"name": "Bison",
"bytes": "75151"
},
{
"name": "C",
"bytes": "209779557"
},
{
"name": "C++",
"bytes": "5954668"
},
{
"name": "CSS",
"bytes": "11885"
},
{
"name": "Java",
"bytes": "12132154"
},
{
"name": "Makefile",
"bytes": "731243"
},
{
"name": "Objective-C",
"bytes": "564040"
},
{
"name": "Perl",
"bytes": "196100"
},
{
"name": "Python",
"bytes": "11786"
},
{
"name": "Ruby",
"bytes": "3219"
},
{
"name": "Scala",
"bytes": "12158"
},
{
"name": "Scilab",
"bytes": "22980"
},
{
"name": "Shell",
"bytes": "205177"
},
{
"name": "TeX",
"bytes": "62636"
},
{
"name": "UnrealScript",
"bytes": "20822"
},
{
"name": "XSLT",
"bytes": "6544"
}
],
"symlink_target": ""
} |
using System.Runtime.Serialization;
namespace MyParcelApi.Net.Models
{
[DataContract]
public class Invoice
{
[DataMember(Name = "id", EmitDefaultValue = false)]
public int Id { get; set; }
[DataMember(Name = "number", EmitDefaultValue = false)]
public string Number { get; set; }
[DataMember(Name = "status", EmitDefaultValue = false)]
public int Status { get; set; }
}
}
| {
"content_hash": "f65e49f3031ef9d26e6bfabfb63b97df",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 63,
"avg_line_length": 26.88235294117647,
"alnum_prop": 0.5929978118161926,
"repo_name": "janssenr/MyParcelApi.Net",
"id": "22917f53d5a346b75fa56d0e09b2eab951a22635",
"size": "459",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/MyParcelApi.Net/Models/Invoice.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "132643"
}
],
"symlink_target": ""
} |
import numpy as np
from nose.tools import assert_equal, assert_raises
from mne import create_info
from mne.io import RawArray
from mne.utils import logger, catch_logging, slow_test, run_tests_if_main
def bad_1(x):
return # bad return type
def bad_2(x):
return x[:-1] # bad shape
def printer(x):
logger.info('exec')
return x
@slow_test
def test_apply_function_verbose():
"""Test apply function verbosity
"""
n_chan = 2
n_times = 3
ch_names = [str(ii) for ii in range(n_chan)]
raw = RawArray(np.zeros((n_chan, n_times)),
create_info(ch_names, 1., 'mag'))
# test return types in both code paths (parallel / 1 job)
assert_raises(TypeError, raw.apply_function, bad_1,
None, None, 1)
assert_raises(ValueError, raw.apply_function, bad_2,
None, None, 1)
assert_raises(TypeError, raw.apply_function, bad_1,
None, None, 2)
assert_raises(ValueError, raw.apply_function, bad_2,
None, None, 2)
# check our arguments
with catch_logging() as sio:
raw.apply_function(printer, None, None, 1, verbose=False)
assert_equal(len(sio.getvalue()), 0)
raw.apply_function(printer, None, None, 1, verbose=True)
assert_equal(sio.getvalue().count('\n'), n_chan)
run_tests_if_main()
| {
"content_hash": "b67dcab1225c10f0b09e5317eb588949",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 73,
"avg_line_length": 28.270833333333332,
"alnum_prop": 0.6182756079587325,
"repo_name": "alexandrebarachant/mne-python",
"id": "1fb935b65cd4cd027172f25340b4d28a7c9e61e0",
"size": "1435",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "mne/io/tests/test_apply_function.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Csound Document",
"bytes": "69806"
},
{
"name": "Makefile",
"bytes": "3679"
},
{
"name": "Python",
"bytes": "5469295"
},
{
"name": "Shell",
"bytes": "936"
}
],
"symlink_target": ""
} |
layout: base
title: 'Statistics of AdvType in UD_Spanish-AnCora'
udver: '2'
---
## Treebank Statistics: UD_Spanish-AnCora: Features: `AdvType`
This feature is language-specific.
It occurs with 1 different values: `Tim`.
1898 tokens (0%) have a non-empty value of `AdvType`.
263 types (1%) occur at least once with a non-empty value of `AdvType`.
263 lemmas (1%) occur at least once with a non-empty value of `AdvType`.
The feature is used with 3 part-of-speech tags: <tt><a href="es_ancora-pos-NOUN.html">NOUN</a></tt> (1883; 0% instances), <tt><a href="es_ancora-pos-ADJ.html">ADJ</a></tt> (13; 0% instances), <tt><a href="es_ancora-pos-ADV.html">ADV</a></tt> (2; 0% instances).
### `NOUN`
1883 <tt><a href="es_ancora-pos-NOUN.html">NOUN</a></tt> tokens (2% of all `NOUN` tokens) have a non-empty value of `AdvType`.
The most frequent other feature values with which `NOUN` and `AdvType` co-occurred: <tt><a href="es_ancora-feat-Gender.html">Gender</a></tt><tt>=EMPTY</tt> (1882; 100%), <tt><a href="es_ancora-feat-Number.html">Number</a></tt><tt>=EMPTY</tt> (1882; 100%).
`NOUN` tokens may have the following values of `AdvType`:
* `Tim` (1883; 100% of non-empty `AdvType`): <em>1999, domingo, viernes, miércoles, sábado, lunes, martes, jueves, 1998, año</em>
`AdvType` seems to be **lexical feature** of `NOUN`. 100% lemmas (260) occur only with one value of `AdvType`.
### `ADJ`
13 <tt><a href="es_ancora-pos-ADJ.html">ADJ</a></tt> tokens (0% of all `ADJ` tokens) have a non-empty value of `AdvType`.
The most frequent other feature values with which `ADJ` and `AdvType` co-occurred: <tt><a href="es_ancora-feat-Gender.html">Gender</a></tt><tt>=EMPTY</tt> (13; 100%), <tt><a href="es_ancora-feat-Number.html">Number</a></tt><tt>=EMPTY</tt> (13; 100%), <tt><a href="es_ancora-feat-VerbForm.html">VerbForm</a></tt><tt>=EMPTY</tt> (13; 100%).
`ADJ` tokens may have the following values of `AdvType`:
* `Tim` (13; 100% of non-empty `AdvType`): <em>próximo</em>
### `ADV`
2 <tt><a href="es_ancora-pos-ADV.html">ADV</a></tt> tokens (0% of all `ADV` tokens) have a non-empty value of `AdvType`.
The most frequent other feature values with which `ADV` and `AdvType` co-occurred: <tt><a href="es_ancora-feat-Degree.html">Degree</a></tt><tt>=EMPTY</tt> (2; 100%), <tt><a href="es_ancora-feat-Polarity.html">Polarity</a></tt><tt>=EMPTY</tt> (2; 100%).
`ADV` tokens may have the following values of `AdvType`:
* `Tim` (2; 100% of non-empty `AdvType`): <em>después, tarde</em>
## Relations with Agreement in `AdvType`
The 10 most frequent relations where parent and child node agree in `AdvType`:
<tt>NOUN --[<tt><a href="es_ancora-dep-conj.html">conj</a></tt>]--> NOUN</tt> (59; 76%).
| {
"content_hash": "5cff8c1b86255b15de01485730b330e8",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 338,
"avg_line_length": 51.92307692307692,
"alnum_prop": 0.6737037037037037,
"repo_name": "UniversalDependencies/docs",
"id": "2737f62fcc3ae7b2ffe66ecbec87367266977ecb",
"size": "2709",
"binary": false,
"copies": "1",
"ref": "refs/heads/pages-source",
"path": "treebanks/es_ancora/es_ancora-feat-AdvType.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "64420"
},
{
"name": "HTML",
"bytes": "1458943"
},
{
"name": "JavaScript",
"bytes": "238859"
},
{
"name": "Perl",
"bytes": "7788"
},
{
"name": "Python",
"bytes": "21203"
},
{
"name": "Ruby",
"bytes": "578"
},
{
"name": "Shell",
"bytes": "7253"
}
],
"symlink_target": ""
} |
package bytes
import (
"errors"
"io"
"unicode/utf8"
)
// A Reader implements the io.Reader, io.ReaderAt, io.WriterTo, io.Seeker,
// io.ByteScanner, and io.RuneScanner interfaces by reading from
// a byte slice.
// Unlike a Buffer, a Reader is read-only and supports seeking.
// The zero value for Reader operates like a Reader of an empty slice.
type Reader struct {
s []byte
i int64 // current reading index
prevRune int // index of previous rune; or < 0
}
// Len returns the number of bytes of the unread portion of the
// slice.
func (r *Reader) Len() int {
if r.i >= int64(len(r.s)) {
return 0
}
return int(int64(len(r.s)) - r.i)
}
// Size returns the original length of the underlying byte slice.
// Size is the number of bytes available for reading via ReadAt.
// The result is unaffected by any method calls except Reset.
func (r *Reader) Size() int64 { return int64(len(r.s)) }
// Read implements the io.Reader interface.
func (r *Reader) Read(b []byte) (n int, err error) {
if r.i >= int64(len(r.s)) {
return 0, io.EOF
}
r.prevRune = -1
n = copy(b, r.s[r.i:])
r.i += int64(n)
return
}
// ReadAt implements the io.ReaderAt interface.
func (r *Reader) ReadAt(b []byte, off int64) (n int, err error) {
// cannot modify state - see io.ReaderAt
if off < 0 {
return 0, errors.New("bytes.Reader.ReadAt: negative offset")
}
if off >= int64(len(r.s)) {
return 0, io.EOF
}
n = copy(b, r.s[off:])
if n < len(b) {
err = io.EOF
}
return
}
// ReadByte implements the io.ByteReader interface.
func (r *Reader) ReadByte() (byte, error) {
r.prevRune = -1
if r.i >= int64(len(r.s)) {
return 0, io.EOF
}
b := r.s[r.i]
r.i++
return b, nil
}
// UnreadByte complements ReadByte in implementing the io.ByteScanner interface.
func (r *Reader) UnreadByte() error {
if r.i <= 0 {
return errors.New("bytes.Reader.UnreadByte: at beginning of slice")
}
r.prevRune = -1
r.i--
return nil
}
// ReadRune implements the io.RuneReader interface.
func (r *Reader) ReadRune() (ch rune, size int, err error) {
if r.i >= int64(len(r.s)) {
r.prevRune = -1
return 0, 0, io.EOF
}
r.prevRune = int(r.i)
if c := r.s[r.i]; c < utf8.RuneSelf {
r.i++
return rune(c), 1, nil
}
ch, size = utf8.DecodeRune(r.s[r.i:])
r.i += int64(size)
return
}
// UnreadRune complements ReadRune in implementing the io.RuneScanner interface.
func (r *Reader) UnreadRune() error {
if r.i <= 0 {
return errors.New("bytes.Reader.UnreadRune: at beginning of slice")
}
if r.prevRune < 0 {
return errors.New("bytes.Reader.UnreadRune: previous operation was not ReadRune")
}
r.i = int64(r.prevRune)
r.prevRune = -1
return nil
}
// Seek implements the io.Seeker interface.
func (r *Reader) Seek(offset int64, whence int) (int64, error) {
r.prevRune = -1
var abs int64
switch whence {
case io.SeekStart:
abs = offset
case io.SeekCurrent:
abs = r.i + offset
case io.SeekEnd:
abs = int64(len(r.s)) + offset
default:
return 0, errors.New("bytes.Reader.Seek: invalid whence")
}
if abs < 0 {
return 0, errors.New("bytes.Reader.Seek: negative position")
}
r.i = abs
return abs, nil
}
// WriteTo implements the io.WriterTo interface.
func (r *Reader) WriteTo(w io.Writer) (n int64, err error) {
r.prevRune = -1
if r.i >= int64(len(r.s)) {
return 0, nil
}
b := r.s[r.i:]
m, err := w.Write(b)
if m > len(b) {
panic("bytes.Reader.WriteTo: invalid Write count")
}
r.i += int64(m)
n = int64(m)
if m != len(b) && err == nil {
err = io.ErrShortWrite
}
return
}
// Reset resets the Reader to be reading from b.
func (r *Reader) Reset(b []byte) { *r = Reader{b, 0, -1} }
// NewReader returns a new Reader reading from b.
func NewReader(b []byte) *Reader { return &Reader{b, 0, -1} }
| {
"content_hash": "5e6140c0fdb66bb3139c71eb55dca2d8",
"timestamp": "",
"source": "github",
"line_count": 155,
"max_line_length": 83,
"avg_line_length": 24.174193548387098,
"alnum_prop": 0.6533226581265013,
"repo_name": "golang/go",
"id": "81c22aa0295d6d04a08a63f47b38d4c75659044b",
"size": "3907",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "src/bytes/reader.go",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "2705689"
},
{
"name": "Awk",
"bytes": "450"
},
{
"name": "Batchfile",
"bytes": "8497"
},
{
"name": "C",
"bytes": "127970"
},
{
"name": "C++",
"bytes": "917"
},
{
"name": "Dockerfile",
"bytes": "2789"
},
{
"name": "Fortran",
"bytes": "100"
},
{
"name": "Go",
"bytes": "41103717"
},
{
"name": "HTML",
"bytes": "2621340"
},
{
"name": "JavaScript",
"bytes": "20492"
},
{
"name": "Makefile",
"bytes": "748"
},
{
"name": "Perl",
"bytes": "31365"
},
{
"name": "Python",
"bytes": "15738"
},
{
"name": "Shell",
"bytes": "62900"
}
],
"symlink_target": ""
} |
package info.gianlucacosta.osgitest.hotswap.gui
object RunnableConverter {
implicit def functionToRunnable(action: () => Unit): Runnable = new Runnable {
override def run(): Unit = {
action()
}
}
}
| {
"content_hash": "1f7e330f93c8e336cabfab0073223d4f",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 80,
"avg_line_length": 19.90909090909091,
"alnum_prop": 0.6712328767123288,
"repo_name": "giancosta86/OSGi-Test",
"id": "237211fb9bf74a2d3939f1dff05a0723dd1cc157",
"size": "1129",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hotswap-gui/src/main/scala/info/gianlucacosta/osgitest/hotswap/gui/RunnableConverter.scala",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "3479"
},
{
"name": "Scala",
"bytes": "13870"
}
],
"symlink_target": ""
} |
<?php
require __DIR__ . '/vendor/autoload.php';
require __DIR__ . '/includes/autoload.php';
require __DIR__ . '/includes/init.php';
$args = array();
if (count($argv) < 2) {
$help = new \Consh\Core\Commands\Help();
$help->showDefaultHelp();
exit;
} else if (count($argv) > 2) {
$args = array_slice($argv, 2);
}
$userCommand = $argv[1];
\Consh\Core\CommandParser::run($userCommand, $args);
| {
"content_hash": "05eb3b1b5a2cc2d114b97d780eb97dee",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 52,
"avg_line_length": 23.941176470588236,
"alnum_prop": 0.6068796068796068,
"repo_name": "triplei/consh",
"id": "869176244917e6233ad30bf49027e83d50e0edeb",
"size": "407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "consh.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "27657"
},
{
"name": "Shell",
"bytes": "875"
}
],
"symlink_target": ""
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<title>Uses of Class org.apache.poi.hssf.usermodel.HSSFFormulaEvaluator (POI API Documentation)</title>
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.poi.hssf.usermodel.HSSFFormulaEvaluator (POI API Documentation)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFFormulaEvaluator.html" title="class in org.apache.poi.hssf.usermodel">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>PREV</li>
<li>NEXT</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/apache/poi/hssf/usermodel//class-useHSSFFormulaEvaluator.html" target="_top">FRAMES</a></li>
<li><a href="HSSFFormulaEvaluator.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.apache.poi.hssf.usermodel.HSSFFormulaEvaluator" class="title">Uses of Class<br>org.apache.poi.hssf.usermodel.HSSFFormulaEvaluator</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFFormulaEvaluator.html" title="class in org.apache.poi.hssf.usermodel">HSSFFormulaEvaluator</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.apache.poi.hssf.usermodel">org.apache.poi.hssf.usermodel</a></td>
<td class="colLast">
<div class="block">usermodel package maps HSSF low level strutures to familiar workbook/sheet model</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.apache.poi.hssf.usermodel">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFFormulaEvaluator.html" title="class in org.apache.poi.hssf.usermodel">HSSFFormulaEvaluator</a> in <a href="../../../../../../org/apache/poi/hssf/usermodel/package-summary.html">org.apache.poi.hssf.usermodel</a></h3>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../org/apache/poi/hssf/usermodel/package-summary.html">org.apache.poi.hssf.usermodel</a> that return <a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFFormulaEvaluator.html" title="class in org.apache.poi.hssf.usermodel">HSSFFormulaEvaluator</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>static <a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFFormulaEvaluator.html" title="class in org.apache.poi.hssf.usermodel">HSSFFormulaEvaluator</a></code></td>
<td class="colLast"><span class="strong">HSSFFormulaEvaluator.</span><code><strong><a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFFormulaEvaluator.html#create(org.apache.poi.hssf.usermodel.HSSFWorkbook, org.apache.poi.ss.formula.IStabilityClassifier, org.apache.poi.ss.formula.udf.UDFFinder)">create</a></strong>(<a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFWorkbook.html" title="class in org.apache.poi.hssf.usermodel">HSSFWorkbook</a> workbook,
<a href="../../../../../../org/apache/poi/ss/formula/IStabilityClassifier.html" title="interface in org.apache.poi.ss.formula">IStabilityClassifier</a> stabilityClassifier,
<a href="../../../../../../org/apache/poi/ss/formula/udf/UDFFinder.html" title="interface in org.apache.poi.ss.formula.udf">UDFFinder</a> udfFinder)</code> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code><a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFFormulaEvaluator.html" title="class in org.apache.poi.hssf.usermodel">HSSFFormulaEvaluator</a></code></td>
<td class="colLast"><span class="strong">HSSFCreationHelper.</span><code><strong><a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFCreationHelper.html#createFormulaEvaluator()">createFormulaEvaluator</a></strong>()</code>
<div class="block">Creates a HSSFFormulaEvaluator, the object that evaluates formula cells.</div>
</td>
</tr>
</tbody>
</table>
<table border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../org/apache/poi/hssf/usermodel/package-summary.html">org.apache.poi.hssf.usermodel</a> with parameters of type <a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFFormulaEvaluator.html" title="class in org.apache.poi.hssf.usermodel">HSSFFormulaEvaluator</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>static void</code></td>
<td class="colLast"><span class="strong">HSSFFormulaEvaluator.</span><code><strong><a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFFormulaEvaluator.html#setupEnvironment(java.lang.String[], org.apache.poi.hssf.usermodel.HSSFFormulaEvaluator[])">setupEnvironment</a></strong>(java.lang.String[] workbookNames,
<a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFFormulaEvaluator.html" title="class in org.apache.poi.hssf.usermodel">HSSFFormulaEvaluator</a>[] evaluators)</code>
<div class="block">Coordinates several formula evaluators together so that formulas that involve external
references can be evaluated.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/apache/poi/hssf/usermodel/HSSFFormulaEvaluator.html" title="class in org.apache.poi.hssf.usermodel">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>PREV</li>
<li>NEXT</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/apache/poi/hssf/usermodel//class-useHSSFFormulaEvaluator.html" target="_top">FRAMES</a></li>
<li><a href="HSSFFormulaEvaluator.html" target="_top">NO FRAMES</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>
<i>Copyright 2016 The Apache Software Foundation or
its licensors, as applicable.</i>
</small></p>
</body>
</html>
| {
"content_hash": "12cf8e90eb6cba2b570772075c73ac84",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 481,
"avg_line_length": 50.10326086956522,
"alnum_prop": 0.6611346132986224,
"repo_name": "Aarhus-BSS/Aarhus-Research-Rebuilt",
"id": "1caf60c3dde5f7e26751e7c2ca395e40ddcecc8a",
"size": "9219",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/poi-3.16-beta1/docs/apidocs/org/apache/poi/hssf/usermodel/class-use/HSSFFormulaEvaluator.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25053"
},
{
"name": "HTML",
"bytes": "99455840"
},
{
"name": "Java",
"bytes": "12711046"
},
{
"name": "Lua",
"bytes": "152042"
},
{
"name": "Python",
"bytes": "8613"
},
{
"name": "R",
"bytes": "20655"
},
{
"name": "Shell",
"bytes": "913"
}
],
"symlink_target": ""
} |
package com.klinker.android.link_builder_example;
import android.content.Intent;
import android.graphics.Color;
import android.graphics.Typeface;
import android.net.Uri;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.widget.TextView;
import android.widget.Toast;
import com.klinker.android.link_builder.Link;
import com.klinker.android.link_builder.LinkBuilder;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
public class JavaMainActivity extends AppCompatActivity {
private static final String GITHUB_LINK = "https://github.com/klinker24";
private static final String TWITTER_PROFILE = "https://twitter.com/";
private static final String PLAY_STORE = "https://play.google.com/store/apps/developer?id=Klinker+Apps&hl=en";
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// set the content view. Contains a scrollview with a text view inside
setContentView(R.layout.activity_main);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
toolbar.setTitle(R.string.app_name);
// find the text view. Used to create the link builder
TextView demoText = (TextView) findViewById(R.id.test_text);
// Add the links and make the links clickable
LinkBuilder.on(demoText)
.addLinks(getExampleLinks())
.build();
}
private List<Link> getExampleLinks() {
List<Link> links = new ArrayList<>();
// create a single click link to the github page
Link github = new Link("TextView-LinkBuilder");
github.setTypeface(Typeface.DEFAULT_BOLD)
.setOnClickListener(new Link.OnClickListener() {
@Override
public void onClick(String clickedText) {
openLink(GITHUB_LINK);
}
});
// create a single click link to the matched twitter profiles
Link mentions = new Link(Pattern.compile("@\\w{1,15}"));
mentions.setTextColor(Color.parseColor("#00BCD4"));
mentions.setHighlightAlpha(.4f);
mentions.setOnClickListener(new Link.OnClickListener() {
@Override
public void onClick(String clickedText) {
openLink(TWITTER_PROFILE + clickedText.replace("@", ""));
}
});
// match the numbers that I created
Link numbers = new Link(Pattern.compile("[0-9]+"));
numbers.setTextColor(Color.parseColor("#FF9800"));
numbers.setOnClickListener(new Link.OnClickListener() {
@Override
public void onClick(String clickedText) {
showToast("Clicked: " + clickedText);
}
});
// action on a long click instead of a short click
Link longClickHere = new Link("here");
longClickHere.setTextColor(Color.parseColor("#259B24"));
longClickHere.setOnLongClickListener(new Link.OnLongClickListener() {
@Override
public void onLongClick(String clickedText) {
showToast("You long clicked. Nice job.");
}
});
// underlined
Link yes = new Link("Yes");
yes.setUnderlined(true);
yes.setTextColor(Color.parseColor("#FFEB3B"));
// not underlined
Link no = new Link("No");
no.setUnderlined(false);
no.setTextColor(Color.parseColor("#FFEB3B"));
// bold
Link bold = new Link("bold");
bold.setBold(true);
bold.setTextColor(Color.parseColor("#FF0000"));
// prepended text
Link prepend = new Link("prepended");
prepend.setPrependedText("(!)");
Link appended = new Link("appended");
appended.setAppendedText("(!)");
// link to our play store page
Link playStore = new Link("Play Store");
playStore.setTextColor(Color.parseColor("#FF9800"));
playStore.setTextColorOfHighlightedLink(Color.parseColor("#FF6600"));
playStore.setHighlightAlpha(0f);
playStore.setOnClickListener(new Link.OnClickListener() {
@Override
public void onClick(String clickedText) {
openLink(PLAY_STORE);
}
});
// add the links to the list
links.add(github);
links.add(mentions);
links.add(numbers);
links.add(longClickHere);
links.add(yes);
links.add(no);
links.add(bold);
links.add(prepend);
links.add(appended);
links.add(playStore);
return links;
}
private void openLink(String link) {
Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(link));
startActivity(browserIntent);
}
private void showToast(String text) {
Toast.makeText(this, text, Toast.LENGTH_SHORT).show();
}
} | {
"content_hash": "6d0c97cd8981b677fc4fac953379f9cd",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 114,
"avg_line_length": 34.17567567567568,
"alnum_prop": 0.6213918544879399,
"repo_name": "klinker24/Android-TextView-LinkBuilder",
"id": "d6ce46a5740b699a551b60630b2b00309152262a",
"size": "5058",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example/src/main/java/com/klinker/android/link_builder_example/JavaMainActivity.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "5058"
},
{
"name": "Kotlin",
"bytes": "38303"
}
],
"symlink_target": ""
} |
package com.mark.embedding
import com.mark.similarity._
/**
* A point represents an n-dimensional coordinate in an Euclidean space.
*
* Along with the coordinate values across each dimension, this class contains
* methods to compute Euclidean distances Point objects.
*
*/
class Point[N : Numeric](args: N*) {
private val coordinates = args.toList
private val dim = args.size
private val num = implicitly[Numeric[N]]
/**
* Returns the value of the ith dimension of the point
*/
def apply(i: Int) = {
coordinates (i);
}
/**
* Returns the Euclidean distance between this point and the input point
*/
def distanceTo = {
def distance = new Distance[Point[N], Double] {
/**
* The distance between two points x and y, where
*
* x = (x1,x2,...,xn) and
* y = (y1,y2,...,yn) is given by
*
* the magnitude of vector x-y, which can be computed as:
*
* d(x,y) = |x - y|
* = sqrt[(x1 - y1)^2 + (x2 - y2)^2 + ... + (xn - yn)^2]
*/
def apply(origin: Point[N], dest: Point[N]): Double = {
if (origin == null || dest == null)
throw new IllegalArgumentException(
"distance() requires two non-null points")
if ((origin dim) != (dest dim))
throw new IllegalArgumentException(
"distance() requires two points of equal dimensions")
math.sqrt(
((origin coordinates) zip (dest coordinates))
.map({case (x, y) => math.pow(num.toDouble(num.minus(x,y)), 2)})
.foldRight(0.0)(_ + _))
}
}
distance(Point.this, _ : Point[N])
}
override def toString =
"Point(" + (coordinates mkString(", ")) + ")";
}
| {
"content_hash": "d5c758bde7c12290a9f73c0cb5e61b7d",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 78,
"avg_line_length": 26.029850746268657,
"alnum_prop": 0.5682339449541285,
"repo_name": "joseamuniz/mark",
"id": "7e6bdbec472e7cd9ddd99d7c0e33c4ef163f8261",
"size": "1744",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/mark/embedding/Point.scala",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Scala",
"bytes": "43947"
}
],
"symlink_target": ""
} |
// Copyright (c) 2017 ASMlover. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list ofconditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in
// the documentation and/or other materialsprovided with the
// distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
#pragma once
#if defined(_WINDOWS_) || defined(_MSC_VER)
# include <Windows.h>
#else
# include <sys/time.h>
#endif
#include <cstdlib>
#include <cstring>
#include <string>
namespace KcpNet {
#if defined(_WINDOWS_) || defined(_MSC_VER)
inline int gettimeofday(struct timeval* tv, struct timezone* /*tz*/) {
if (tv) {
FILETIME ft;
SYSTEMTIME st;
ULARGE_INTEGER uli;
GetSystemTime(&st);
SystemTimeToFileTime(&st, &ft);
uli.LowPart = ft.dwLowDateTime;
uli.HighPart = ft.dwHighDateTime;
tv->tv_sec = static_cast<long>(
(uli.QuadPart - 116444736000000000ULL) / 10000000L);
tv->tv_usec = static_cast<long>(st.wMilliseconds * 1000);
}
return 0;
}
#endif
inline std::uint64_t get_clock64(void) {
struct timeval t;
gettimeofday(&t, nullptr);
return ((std::uint64_t)t.tv_sec) * 1000 + (t.tv_usec / 1000);
}
inline std::uint32_t get_clock32(void) {
return (std::uint32_t)(get_clock64() & 0xFFFFFFFF);
}
#define KCPNET_CONNECT_PACKET "kcpnet-connect-packet"
#define KCPNET_SENDBACK_PACKET "kcpnet-sendback-packet"
#define KCPNET_CONNECT_FAILED "kcpnet-connect-failed"
#define KCPNET_CONNECT_TIMEOUT "kcpnet-connect-timeout"
inline std::string make_connect_packet(void) {
return std::string(KCPNET_CONNECT_PACKET, sizeof(KCPNET_CONNECT_PACKET));
}
inline bool is_connect_packet(const char* buf, std::size_t len) {
return (len == sizeof(KCPNET_CONNECT_PACKET) &&
std::memcmp(buf,
KCPNET_CONNECT_PACKET, sizeof(KCPNET_CONNECT_PACKET) - 1) == 0);
}
inline bool is_connect_sendback_packet(const char* buf, std::size_t len) {
return (len > sizeof(KCPNET_SENDBACK_PACKET) &&
std::memcmp(buf,
KCPNET_SENDBACK_PACKET, sizeof(KCPNET_SENDBACK_PACKET) - 1) == 0);
}
inline std::string make_sendback_packet(std::uint32_t conv) {
char buf[256]{};
std::size_t n = std::snprintf(buf,
sizeof(buf), "%s %u", KCPNET_SENDBACK_PACKET, conv);
return std::string(buf, n);
}
inline std::uint32_t get_conv_from_sendback_packet(const char* buf) {
return std::atol(buf + sizeof(KCPNET_SENDBACK_PACKET));
}
}
| {
"content_hash": "0f9c722e19ffd727a57def14741950de",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 75,
"avg_line_length": 33.90196078431372,
"alnum_prop": 0.7154424522845575,
"repo_name": "ASMlover/study",
"id": "185429cdd50c005d15c5578ba2beec57f9213375",
"size": "3458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "network/KcpNet/KcpNet/Utility.h",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "3055440"
},
{
"name": "Batchfile",
"bytes": "4662"
},
{
"name": "Brainfuck",
"bytes": "571"
},
{
"name": "C",
"bytes": "13569580"
},
{
"name": "C#",
"bytes": "3959"
},
{
"name": "C++",
"bytes": "14741264"
},
{
"name": "CMake",
"bytes": "543917"
},
{
"name": "CSS",
"bytes": "11505"
},
{
"name": "Common Lisp",
"bytes": "114"
},
{
"name": "Emacs Lisp",
"bytes": "6042"
},
{
"name": "Go",
"bytes": "105203"
},
{
"name": "Groovy",
"bytes": "2907"
},
{
"name": "HTML",
"bytes": "911945"
},
{
"name": "Lex",
"bytes": "9370"
},
{
"name": "Lua",
"bytes": "32829"
},
{
"name": "Makefile",
"bytes": "1000611"
},
{
"name": "NASL",
"bytes": "3609"
},
{
"name": "NewLisp",
"bytes": "5805"
},
{
"name": "Perl",
"bytes": "594"
},
{
"name": "Python",
"bytes": "2752752"
},
{
"name": "SWIG",
"bytes": "91"
},
{
"name": "Shell",
"bytes": "9993"
},
{
"name": "Vim script",
"bytes": "92204"
},
{
"name": "Yacc",
"bytes": "6278"
}
],
"symlink_target": ""
} |
'use strict';
angular.module('s3UploadApp')
.controller('MainCtrl', function ($scope, $http, $location, $upload, $rootScope) {
$scope.imageUploads = [];
$scope.abort = function(index) {
$scope.upload[index].abort();
$scope.upload[index] = null;
};
$scope.onFileSelect = function ($files) {
$scope.files = $files;
$scope.upload = [];
for (var i = 0; i < $files.length; i++) {
var file = $files[i];
file.progress = parseInt(0);
(function (file, i) {
$http.get('/api/s3Policy?mimeType='+ file.type).success(function(response) {
var s3Params = response;
$scope.upload[i] = $upload.upload({
url: 'https://' + $rootScope.config.awsConfig.bucket + '.s3.amazonaws.com/',
method: 'POST',
transformRequest: function (data, headersGetter) {
//Headers change here
var headers = headersGetter();
delete headers['Authorization'];
return data;
},
data: {
'key' : 's3UploadExample/'+ Math.round(Math.random()*10000) + '$$' + file.name,
'acl' : 'public-read',
'Content-Type' : file.type,
'AWSAccessKeyId': s3Params.AWSAccessKeyId,
'success_action_status' : '201',
'Policy' : s3Params.s3Policy,
'Signature' : s3Params.s3Signature
},
file: file,
});
$scope.upload[i]
.then(function(response) {
file.progress = parseInt(100);
if (response.status === 201) {
var data = xml2json.parser(response.data),
parsedData;
parsedData = {
location: data.postresponse.location,
bucket: data.postresponse.bucket,
key: data.postresponse.key,
etag: data.postresponse.etag
};
$scope.imageUploads.push(parsedData);
} else {
alert('Upload Failed');
}
}, null, function(evt) {
file.progress = parseInt(100.0 * evt.loaded / evt.total);
});
});
}(file, i));
}
};
});
| {
"content_hash": "338022ffc040509f9a14525a4b3ac047",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 111,
"avg_line_length": 47.578125,
"alnum_prop": 0.36880131362889984,
"repo_name": "nukulb/s3-angular-file-upload",
"id": "5aeaac97eb129699048387ad845a6d2744e3976d",
"size": "3045",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "app/scripts/controllers/main.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1411"
},
{
"name": "JavaScript",
"bytes": "38197"
}
],
"symlink_target": ""
} |
layout: page
title: Archive Systems Seminar
date: 2016-05-24
author: Harry Haynes
tags: weekly links, java
status: published
summary: Donec tempor ligula sit amet pharetra.
banner: images/banner/people.jpg
booking:
startDate: 11/01/2017
endDate: 11/05/2017
ctyhocn: SCECLHX
groupCode: ASS
published: true
---
Aenean at orci laoreet, dapibus dui in, iaculis nunc. Fusce pellentesque quis leo finibus iaculis. Donec ultrices mi nec ornare viverra. In fringilla nulla diam, ut imperdiet orci suscipit ac. Class aptent taciti sociosqu ad litora torquent per conubia nostra, per inceptos himenaeos. Maecenas eleifend diam a nisi consequat posuere. Integer fringilla magna vel nisl venenatis, nec convallis nunc cursus. Nulla placerat dolor sit amet urna fringilla, vitae cursus nisi elementum. Duis tempus augue vitae nulla viverra porttitor. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Morbi malesuada lectus eget lacus gravida, non fringilla purus malesuada. Sed vitae orci massa. Fusce vel feugiat massa. Morbi sed ligula nibh.
* Praesent ut enim consectetur, sollicitudin ex sit amet, consequat eros.
Sed nec metus aliquam, imperdiet tellus aliquet, dapibus risus. Sed vel facilisis dolor, a pulvinar leo. Proin cursus pellentesque lacus. Donec quis blandit ex. Nam non sapien sodales, faucibus mauris in, iaculis erat. Cras quis maximus elit, et cursus arcu. Nunc sed quam posuere, aliquet nisl nec, posuere purus. Ut tempor sapien nec mi posuere ornare. Aliquam at sollicitudin tortor, non accumsan velit. Maecenas eget nulla vitae metus suscipit lacinia eu vel tortor. Aenean dui arcu, rhoncus vitae placerat vitae, pharetra id nunc. Mauris leo urna, varius id mi vel, dapibus rutrum est. Morbi ac sem convallis, maximus arcu ut, convallis lectus.
| {
"content_hash": "01df0ef0164fc23458a50b2fb7c825d9",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 729,
"avg_line_length": 88.65,
"alnum_prop": 0.8020304568527918,
"repo_name": "KlishGroup/prose-pogs",
"id": "f0dc6e200d40fde092007e3b3b3ffb538144f5ad",
"size": "1777",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "pogs/S/SCECLHX/ASS/index.md",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
<?php
class Google_Service_Sheets_EmbeddedObjectBorder extends Google_Model
{
protected $colorType = 'Google_Service_Sheets_Color';
protected $colorDataType = '';
protected $colorStyleType = 'Google_Service_Sheets_ColorStyle';
protected $colorStyleDataType = '';
/**
* @param Google_Service_Sheets_Color
*/
public function setColor(Google_Service_Sheets_Color $color)
{
$this->color = $color;
}
/**
* @return Google_Service_Sheets_Color
*/
public function getColor()
{
return $this->color;
}
/**
* @param Google_Service_Sheets_ColorStyle
*/
public function setColorStyle(Google_Service_Sheets_ColorStyle $colorStyle)
{
$this->colorStyle = $colorStyle;
}
/**
* @return Google_Service_Sheets_ColorStyle
*/
public function getColorStyle()
{
return $this->colorStyle;
}
}
| {
"content_hash": "2e49d6beb4026752e5fc8b5556f5fb73",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 77,
"avg_line_length": 21.82051282051282,
"alnum_prop": 0.6756756756756757,
"repo_name": "bshaffer/google-api-php-client-services",
"id": "8478ee7c83005520bb771f083d9376a4894cb2e8",
"size": "1441",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/Google/Service/Sheets/EmbeddedObjectBorder.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PHP",
"bytes": "9540154"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.