text
stringlengths
2
99k
meta
dict
/*- * Copyright (c) 2004 Lev Walkin <vlm@lionet.info>. All rights reserved. * Redistribution and modifications are permitted subject to BSD license. */ /* * Read the NativeInteger.h for the explanation wrt. differences between * INTEGER and NativeInteger. * Basically, both are decoders and encoders of ASN.1 INTEGER type, but this * implementation deals with the standard (machine-specific) representation * of them instead of using the platform-independent buffer. */ #include <asn_internal.h> #include <NativeEnumerated.h> /* * NativeEnumerated basic type description. */ static ber_tlv_tag_t asn_DEF_NativeEnumerated_tags[] = { (ASN_TAG_CLASS_UNIVERSAL | (10 << 2)) }; asn_TYPE_descriptor_t asn_DEF_NativeEnumerated = { "ENUMERATED", /* The ASN.1 type is still ENUMERATED */ "ENUMERATED", NativeInteger_free, NULL, asn_generic_no_constraint, NativeInteger_decode_ber, NativeInteger_encode_der, NULL, NULL, /* NativeEnumerated_encode_xer, */ NativeEnumerated_decode_uper, NativeEnumerated_encode_uper, 0, /* Use generic outmost tag fetcher */ asn_DEF_NativeEnumerated_tags, sizeof(asn_DEF_NativeEnumerated_tags) / sizeof(asn_DEF_NativeEnumerated_tags[0]), asn_DEF_NativeEnumerated_tags, /* Same as above */ sizeof(asn_DEF_NativeEnumerated_tags) / sizeof(asn_DEF_NativeEnumerated_tags[0]), 0, /* No PER visible constraints */ 0, 0, /* No members */ 0 /* No specifics */ }; #if 0 asn_enc_rval_t NativeEnumerated_encode_xer(asn_TYPE_descriptor_t *td, void *sptr, int ilevel, enum xer_encoder_flags_e flags, asn_app_consume_bytes_f *cb, void *app_key) { asn_INTEGER_specifics_t *specs=(asn_INTEGER_specifics_t *)td->specifics; asn_enc_rval_t er; const long *native = (const long *)sptr; const asn_INTEGER_enum_map_t *el; (void)ilevel; (void)flags; if(!native) _ASN_ENCODE_FAILED; el = INTEGER_map_value2enum(specs, *native); if(el) { size_t srcsize = el->enum_len + 5; char *src = (char *)alloca(srcsize); er.encoded = snprintf(src, srcsize, "<%s/>", el->enum_name); assert(er.encoded > 0 && (size_t)er.encoded < srcsize); if(cb(src, er.encoded, app_key) < 0) _ASN_ENCODE_FAILED; _ASN_ENCODED_OK(er); } else { ASN_DEBUG("ASN.1 forbids dealing with " "unknown value of ENUMERATED type"); _ASN_ENCODE_FAILED; } } #endif asn_dec_rval_t NativeEnumerated_decode_uper(asn_codec_ctx_t *opt_codec_ctx, asn_TYPE_descriptor_t *td, asn_per_constraints_t *constraints, void **sptr, asn_per_data_t *pd) { asn_INTEGER_specifics_t *specs = (asn_INTEGER_specifics_t *)td->specifics; asn_dec_rval_t rval = { RC_OK, 0 }; long *native = (long *)*sptr; asn_per_constraint_t *ct; long value; (void)opt_codec_ctx; if(constraints) ct = &constraints->value; else if(td->per_constraints) ct = &td->per_constraints->value; else _ASN_DECODE_FAILED; /* Mandatory! */ if(!specs) _ASN_DECODE_FAILED; if(!native) { native = (long *)(*sptr = CALLOC(1, sizeof(*native))); if(!native) _ASN_DECODE_FAILED; } ASN_DEBUG("Decoding %s as NativeEnumerated", td->name); if(ct->flags & APC_EXTENSIBLE) { int inext = per_get_few_bits(pd, 1); if(inext < 0) _ASN_DECODE_STARVED; if(inext) ct = 0; } if(ct && ct->range_bits >= 0) { value = per_get_few_bits(pd, ct->range_bits); if(value < 0) _ASN_DECODE_STARVED; if(value >= (specs->extension ? specs->extension - 1 : specs->map_count)) _ASN_DECODE_FAILED; } else { if(!specs->extension) _ASN_DECODE_FAILED; /* * X.691, #10.6: normally small non-negative whole number; */ value = uper_get_nsnnwn(pd); if(value < 0) _ASN_DECODE_STARVED; value += specs->extension - 1; if(value >= specs->map_count) _ASN_DECODE_FAILED; } *native = specs->value2enum[value].nat_value; ASN_DEBUG("Decoded %s = %ld", td->name, *native); return rval; } static int NativeEnumerated__compar_value2enum(const void *ap, const void *bp) { const asn_INTEGER_enum_map_t *a = ap; const asn_INTEGER_enum_map_t *b = bp; if(a->nat_value == b->nat_value) return 0; if(a->nat_value < b->nat_value) return -1; return 1; } asn_enc_rval_t NativeEnumerated_encode_uper(asn_TYPE_descriptor_t *td, asn_per_constraints_t *constraints, void *sptr, asn_per_outp_t *po) { asn_INTEGER_specifics_t *specs = (asn_INTEGER_specifics_t *)td->specifics; asn_enc_rval_t er; long native, value; asn_per_constraint_t *ct; int inext = 0; asn_INTEGER_enum_map_t key; asn_INTEGER_enum_map_t *kf; if(!sptr) _ASN_ENCODE_FAILED; if(!specs) _ASN_ENCODE_FAILED; if(constraints) ct = &constraints->value; else if(td->per_constraints) ct = &td->per_constraints->value; else _ASN_ENCODE_FAILED; /* Mandatory! */ ASN_DEBUG("Encoding %s as NativeEnumerated", td->name); er.encoded = 0; native = *(long *)sptr; if(native < 0) _ASN_ENCODE_FAILED; key.nat_value = native; kf = bsearch(&key, specs->value2enum, specs->map_count, sizeof(key), NativeEnumerated__compar_value2enum); if(!kf) { ASN_DEBUG("No element corresponds to %ld", native); _ASN_ENCODE_FAILED; } value = kf - specs->value2enum; if(ct->range_bits >= 0) { int cmpWith = specs->extension ? specs->extension - 1 : specs->map_count; if(value >= cmpWith) inext = 1; } if(ct->flags & APC_EXTENSIBLE) { if(per_put_few_bits(po, inext, 0)) _ASN_ENCODE_FAILED; ct = 0; } else if(inext) { _ASN_ENCODE_FAILED; } if(ct && ct->range_bits >= 0) { if(per_put_few_bits(po, value, ct->range_bits)) _ASN_ENCODE_FAILED; _ASN_ENCODED_OK(er); } if(!specs->extension) _ASN_ENCODE_FAILED; /* * X.691, #10.6: normally small non-negative whole number; */ if(uper_put_nsnnwn(po, value - (specs->extension - 1))) _ASN_ENCODE_FAILED; _ASN_ENCODED_OK(er); }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <ItemGroup> <Filter Include="源文件"> <UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier> <Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions> </Filter> <Filter Include="头文件"> <UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier> <Extensions>h;hh;hpp;hxx;hm;inl;inc;xsd</Extensions> </Filter> <Filter Include="资源文件"> <UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier> <Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions> </Filter> </ItemGroup> <ItemGroup> <ClCompile Include="win32_main.c"> <Filter>源文件</Filter> </ClCompile> </ItemGroup> </Project>
{ "pile_set_name": "Github" }
<HTML> <HEAD> <meta charset="UTF-8"> <title>MediaFile.name - tock</title> <link rel="stylesheet" href="../../../style.css"> </HEAD> <BODY> <a href="../../index.html">tock</a>&nbsp;/&nbsp;<a href="../index.html">ai.tock.bot.connector.media</a>&nbsp;/&nbsp;<a href="index.html">MediaFile</a>&nbsp;/&nbsp;<a href="./name.html">name</a><br/> <br/> <h1>name</h1> <a name="ai.tock.bot.connector.media.MediaFile$name"></a> <code><span class="keyword">val </span><span class="identifier">name</span><span class="symbol">: </span><a href="https://kotlinlang.org/api/latest/jvm/stdlib/kotlin/-string/index.html"><span class="identifier">String</span></a></code> <a href="https://github.com/theopenconversationkit/tock/blob/master/bot/engine/src/main/kotlin/connector/media/MediaFile.kt#L28">(source)</a> </BODY> </HTML>
{ "pile_set_name": "Github" }
import {Feature} from '../feature'; import {createElm, createText, elm, removeElm} from '../dom'; import {addEvt, targetEvt, removeEvt} from '../event'; import {NONE} from '../const'; import {root} from '../root'; import {isEmpty, isNull} from '../types'; import {defaultsStr, defaultsNb} from '../settings'; import {RIGHT} from './toolbar'; const WIKI_URL = 'https://github.com/koalyptus/TableFilter/wiki/' + '4.-Filter-operators'; const WEBSITE_URL = 'https://www.tablefilter.com/'; /** * Help UI component */ export class Help extends Feature { /** * Creates an instance of Help * @param {TableFilter} tf TableFilter instance */ constructor(tf) { super(tf, Help); let f = this.config.help_instructions || {}; /** * ID of main custom container element * @type {String} */ this.tgtId = defaultsStr(f.target_id, null); /** * ID of custom container element for instructions * @type {String} */ this.contTgtId = defaultsStr(f.container_target_id, null); /** * Instructions text (accepts HTML) * @type {String} */ this.instrText = !isEmpty(f.text) ? f.text : 'Use the filters above each column to filter and limit table ' + 'data. Advanced searches can be performed by using the following ' + 'operators: <br /><b>&lt;</b>, <b>&lt;=</b>, <b>&gt;</b>, ' + '<b>&gt;=</b>, <b>=</b>, <b>*</b>, <b>!</b>, <b>{</b>, <b>}</b>, ' + '<b>||</b>,<b>&amp;&amp;</b>, <b>[empty]</b>, <b>[nonempty]</b>, ' + '<b>rgx:</b><br/><a href="' + WIKI_URL + '" target="_blank">' + 'Learn more</a><hr/>'; /** * Instructions HTML * @type {String} */ this.instrHtml = defaultsStr(f.html, null); /** * Help button text ('?') * @type {String} */ this.btnText = defaultsStr(f.btn_text, '?'); /** * Custom help button HTML * @type {String} */ this.btnHtml = defaultsStr(f.btn_html, null); /** * Css class for help button * @type {String} */ this.btnCssClass = defaultsStr(f.btn_css_class, 'helpBtn'); /** * Css class for help container element * @type {String} */ this.contCssClass = defaultsStr(f.container_css_class, 'helpCont'); /** * Button DOM element * @type {DOMElement} */ this.btn = null; /** * Help container DOM element * @type {DOMElement} */ this.cont = null; /** * Adjust container left position when table's horizontal scroll is * on, typically when `responsive` option is enabled. * @type {Number} * @defaultValue 25 */ this.contAdjustLeftPosition = defaultsNb(f.container_adjust_left_position, 25); /** * Bound mouseup wrapper * @private */ this.boundMouseup = null; /** * Default HTML appended to instructions text * @type {String} */ this.defaultHtml = '<div class="helpFooter"><h4>TableFilter ' + 'v' + tf.version + '</h4>' + '<a href="' + WEBSITE_URL + '" target="_blank">' + WEBSITE_URL + '</a>' + '<br/><span>&copy;2015-' + tf.year + ' {AUTHOR}</span>' + '<div align="center" style="margin-top:8px;">' + '<a href="javascript:void(0);" class="close">Close</a></div></div>'; /** * Default position in toolbar ('left'|'center'|'right') * @type {String} */ this.toolbarPosition = defaultsStr(f.toolbar_position, RIGHT); this.emitter.on(['init-help'], () => this.init()); } /** * Mouse-up event handler handling popup auto-close behaviour * @private */ onMouseup(evt) { let targetElm = targetEvt(evt); while (targetElm && targetElm !== this.cont && targetElm !== this.btn) { targetElm = targetElm.parentNode; } if (targetElm !== this.cont && targetElm !== this.btn) { this.toggle(); } return; } /** * Initialise Help instance */ init() { if (this.initialized) { return; } this.emitter.emit('initializing-feature', this, !isNull(this.tgtId)); let tf = this.tf; let btn = createElm('span'); let cont = createElm('div'); this.boundMouseup = this.onMouseup.bind(this); //help button is added to defined element let targetEl = !this.tgtId ? tf.feature('toolbar').container(this.toolbarPosition) : elm(this.tgtId); targetEl.appendChild(btn); let divContainer = !this.contTgtId ? btn : elm(this.contTgtId); if (!this.btnHtml) { divContainer.appendChild(cont); let helplink = createElm('a', ['href', 'javascript:void(0);']); helplink.className = this.btnCssClass; helplink.appendChild(createText(this.btnText)); btn.appendChild(helplink); addEvt(helplink, 'click', () => this.toggle()); } else { btn.innerHTML = this.btnHtml; let helpEl = btn.firstChild; addEvt(helpEl, 'click', () => this.toggle()); divContainer.appendChild(cont); } if (!this.instrHtml) { cont.innerHTML = this.instrText; cont.className = this.contCssClass; } else { if (this.contTgtId) { divContainer.appendChild(cont); } cont.innerHTML = this.instrHtml; if (!this.contTgtId) { cont.className = this.contCssClass; } } cont.innerHTML += this.defaultHtml; addEvt(cont, 'click', () => this.toggle()); this.cont = cont; this.btn = btn; /** @inherited */ this.initialized = true; this.emitter.emit('feature-initialized', this); } /** * Toggle help pop-up */ toggle() { // check only if explicitily disabled as in this case undefined // signifies the help feature is enabled by default if (!this.isEnabled()) { return; } // ensure mouseup event handler is removed removeEvt(root, 'mouseup', this.boundMouseup); let divDisplay = this.cont.style.display; if (divDisplay === '' || divDisplay === NONE) { this.cont.style.display = 'inline'; // if table element has an horizontal scrollbar adjust container // left position accordingly if (this.tf.dom().scrollLeft > 0) { this.cont.style.left = `${ this.btn.offsetLeft - this.tf.dom().scrollLeft + this.contAdjustLeftPosition }px`; } addEvt(root, 'mouseup', this.boundMouseup); } else { this.cont.style.display = NONE; this.cont.style.left = ''; } } /** * Remove help UI */ destroy() { if (!this.initialized) { return; } removeElm(this.btn); this.btn = null; removeElm(this.cont); this.cont = null; this.boundMouseup = null; this.initialized = false; } } // TODO: remove as soon as feature name is fixed Help.meta = {alwaysInstantiate: true};
{ "pile_set_name": "Github" }
package utils.string { /** * Specifies whether the specified string is either non-null, or contains * characters (i.e. length is greater that 0) * * @param s The string which is being checked for a value * * @langversion ActionScript 3.0 * @playerversion Flash 9.0 * @tiptext */ public function stringHasValue(s:String):Boolean { return (s != null && s.length > 0); } }
{ "pile_set_name": "Github" }
### Example 1: {{ Add title here }} ```powershell PS C:\> {{ Add code here }} {{ Add output here }} ``` {{ Add description here }} ### Example 2: {{ Add title here }} ```powershell PS C:\> {{ Add code here }} {{ Add output here }} ``` {{ Add description here }}
{ "pile_set_name": "Github" }
/****************************************************************************** * $Id: tif_ovrcache.c,v 1.9 2010-06-08 18:55:15 bfriesen Exp $ * * Project: TIFF Overview Builder * Purpose: Library functions to maintain two rows of tiles or two strips * of data for output overviews as an output cache. * Author: Frank Warmerdam, warmerdam@pobox.com * ****************************************************************************** * Copyright (c) 2000, Frank Warmerdam * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. ****************************************************************************** */ #include "tiffiop.h" #include "tif_ovrcache.h" #include <assert.h> /************************************************************************/ /* TIFFCreateOvrCache() */ /* */ /* Create an overview cache to hold two rows of blocks from an */ /* existing TIFF directory. */ /************************************************************************/ TIFFOvrCache *TIFFCreateOvrCache( TIFF *hTIFF, toff_t nDirOffset ) { TIFFOvrCache *psCache; toff_t nBaseDirOffset; psCache = (TIFFOvrCache *) _TIFFmalloc(sizeof(TIFFOvrCache)); psCache->nDirOffset = nDirOffset; psCache->hTIFF = hTIFF; /* -------------------------------------------------------------------- */ /* Get definition of this raster from the TIFF file itself. */ /* -------------------------------------------------------------------- */ nBaseDirOffset = TIFFCurrentDirOffset( psCache->hTIFF ); TIFFSetSubDirectory( hTIFF, nDirOffset ); TIFFGetField( hTIFF, TIFFTAG_IMAGEWIDTH, &(psCache->nXSize) ); TIFFGetField( hTIFF, TIFFTAG_IMAGELENGTH, &(psCache->nYSize) ); TIFFGetField( hTIFF, TIFFTAG_BITSPERSAMPLE, &(psCache->nBitsPerPixel) ); TIFFGetField( hTIFF, TIFFTAG_SAMPLESPERPIXEL, &(psCache->nSamples) ); TIFFGetField( hTIFF, TIFFTAG_PLANARCONFIG, &(psCache->nPlanarConfig) ); if( !TIFFIsTiled( hTIFF ) ) { TIFFGetField( hTIFF, TIFFTAG_ROWSPERSTRIP, &(psCache->nBlockYSize) ); psCache->nBlockXSize = psCache->nXSize; psCache->nBytesPerBlock = TIFFStripSize(hTIFF); psCache->bTiled = FALSE; } else { TIFFGetField( hTIFF, TIFFTAG_TILEWIDTH, &(psCache->nBlockXSize) ); TIFFGetField( hTIFF, TIFFTAG_TILELENGTH, &(psCache->nBlockYSize) ); psCache->nBytesPerBlock = TIFFTileSize(hTIFF); psCache->bTiled = TRUE; } /* -------------------------------------------------------------------- */ /* Compute some values from this. */ /* -------------------------------------------------------------------- */ psCache->nBlocksPerRow = (psCache->nXSize + psCache->nBlockXSize - 1) / psCache->nBlockXSize; psCache->nBlocksPerColumn = (psCache->nYSize + psCache->nBlockYSize - 1) / psCache->nBlockYSize; if (psCache->nPlanarConfig == PLANARCONFIG_SEPARATE) psCache->nBytesPerRow = psCache->nBytesPerBlock * psCache->nBlocksPerRow * psCache->nSamples; else psCache->nBytesPerRow = psCache->nBytesPerBlock * psCache->nBlocksPerRow; /* -------------------------------------------------------------------- */ /* Allocate and initialize the data buffers. */ /* -------------------------------------------------------------------- */ psCache->pabyRow1Blocks = (unsigned char *) _TIFFmalloc(psCache->nBytesPerRow); psCache->pabyRow2Blocks = (unsigned char *) _TIFFmalloc(psCache->nBytesPerRow); if( psCache->pabyRow1Blocks == NULL || psCache->pabyRow2Blocks == NULL ) { TIFFErrorExt( hTIFF->tif_clientdata, hTIFF->tif_name, "Can't allocate memory for overview cache." ); /* TODO: use of TIFFError is inconsistent with use of fprintf in addtiffo.c, sort out */ return NULL; } _TIFFmemset( psCache->pabyRow1Blocks, 0, psCache->nBytesPerRow ); _TIFFmemset( psCache->pabyRow2Blocks, 0, psCache->nBytesPerRow ); psCache->nBlockOffset = 0; TIFFSetSubDirectory( psCache->hTIFF, nBaseDirOffset ); return psCache; } /************************************************************************/ /* TIFFWriteOvrRow() */ /* */ /* Write one entire row of blocks (row 1) to the tiff file, and */ /* then rotate the block buffers, essentially moving things */ /* down by one block. */ /************************************************************************/ static void TIFFWriteOvrRow( TIFFOvrCache * psCache ) { int nRet, iTileX, iTileY = psCache->nBlockOffset; unsigned char *pabyData; toff_t nBaseDirOffset; uint32 RowsInStrip; /* -------------------------------------------------------------------- */ /* If the output cache is multi-byte per sample, and the file */ /* being written to is of a different byte order than the current */ /* platform, we will need to byte swap the data. */ /* -------------------------------------------------------------------- */ if( TIFFIsByteSwapped(psCache->hTIFF) ) { if( psCache->nBitsPerPixel == 16 ) TIFFSwabArrayOfShort( (uint16 *) psCache->pabyRow1Blocks, (psCache->nBytesPerBlock * psCache->nSamples) / 2 ); else if( psCache->nBitsPerPixel == 32 ) TIFFSwabArrayOfLong( (uint32 *) psCache->pabyRow1Blocks, (psCache->nBytesPerBlock * psCache->nSamples) / 4 ); else if( psCache->nBitsPerPixel == 64 ) TIFFSwabArrayOfDouble( (double *) psCache->pabyRow1Blocks, (psCache->nBytesPerBlock * psCache->nSamples) / 8 ); } /* -------------------------------------------------------------------- */ /* Record original directory position, so we can restore it at */ /* end. */ /* -------------------------------------------------------------------- */ nBaseDirOffset = TIFFCurrentDirOffset( psCache->hTIFF ); nRet = TIFFSetSubDirectory( psCache->hTIFF, psCache->nDirOffset ); assert( nRet == 1 ); /* -------------------------------------------------------------------- */ /* Write blocks to TIFF file. */ /* -------------------------------------------------------------------- */ for( iTileX = 0; iTileX < psCache->nBlocksPerRow; iTileX++ ) { int nTileID; if (psCache->nPlanarConfig == PLANARCONFIG_SEPARATE) { int iSample; for( iSample = 0; iSample < psCache->nSamples; iSample++ ) { pabyData = TIFFGetOvrBlock( psCache, iTileX, iTileY, iSample ); if( psCache->bTiled ) { nTileID = TIFFComputeTile( psCache->hTIFF, iTileX * psCache->nBlockXSize, iTileY * psCache->nBlockYSize, 0, (tsample_t) iSample ); TIFFWriteEncodedTile( psCache->hTIFF, nTileID, pabyData, TIFFTileSize(psCache->hTIFF) ); } else { nTileID = TIFFComputeStrip( psCache->hTIFF, iTileY * psCache->nBlockYSize, (tsample_t) iSample ); RowsInStrip=psCache->nBlockYSize; if ((iTileY+1)*psCache->nBlockYSize>psCache->nYSize) RowsInStrip=psCache->nYSize-iTileY*psCache->nBlockYSize; TIFFWriteEncodedStrip( psCache->hTIFF, nTileID, pabyData, TIFFVStripSize(psCache->hTIFF,RowsInStrip) ); } } } else { pabyData = TIFFGetOvrBlock( psCache, iTileX, iTileY, 0 ); if( psCache->bTiled ) { nTileID = TIFFComputeTile( psCache->hTIFF, iTileX * psCache->nBlockXSize, iTileY * psCache->nBlockYSize, 0, 0 ); TIFFWriteEncodedTile( psCache->hTIFF, nTileID, pabyData, TIFFTileSize(psCache->hTIFF) ); } else { nTileID = TIFFComputeStrip( psCache->hTIFF, iTileY * psCache->nBlockYSize, 0 ); RowsInStrip=psCache->nBlockYSize; if ((iTileY+1)*psCache->nBlockYSize>psCache->nYSize) RowsInStrip=psCache->nYSize-iTileY*psCache->nBlockYSize; TIFFWriteEncodedStrip( psCache->hTIFF, nTileID, pabyData, TIFFVStripSize(psCache->hTIFF,RowsInStrip) ); } } } /* TODO: add checks on error status return of TIFFWriteEncodedTile and TIFFWriteEncodedStrip */ /* -------------------------------------------------------------------- */ /* Rotate buffers. */ /* -------------------------------------------------------------------- */ pabyData = psCache->pabyRow1Blocks; psCache->pabyRow1Blocks = psCache->pabyRow2Blocks; psCache->pabyRow2Blocks = pabyData; _TIFFmemset( pabyData, 0, psCache->nBytesPerRow ); psCache->nBlockOffset++; /* -------------------------------------------------------------------- */ /* Restore access to original directory. */ /* -------------------------------------------------------------------- */ TIFFFlush( psCache->hTIFF ); /* TODO: add checks on error status return of TIFFFlush */ TIFFSetSubDirectory( psCache->hTIFF, nBaseDirOffset ); /* TODO: add checks on error status return of TIFFSetSubDirectory */ } /************************************************************************/ /* TIFFGetOvrBlock() */ /************************************************************************/ /* TODO: make TIFF_Downsample handle iSample offset, so that we can * do with a single TIFFGetOvrBlock and no longer need TIFFGetOvrBlock_Subsampled */ unsigned char *TIFFGetOvrBlock( TIFFOvrCache *psCache, int iTileX, int iTileY, int iSample ) { int nRowOffset; if( iTileY > psCache->nBlockOffset + 1 ) TIFFWriteOvrRow( psCache ); assert( iTileX >= 0 && iTileX < psCache->nBlocksPerRow ); assert( iTileY >= 0 && iTileY < psCache->nBlocksPerColumn ); assert( iTileY >= psCache->nBlockOffset && iTileY < psCache->nBlockOffset+2 ); assert( iSample >= 0 && iSample < psCache->nSamples ); if (psCache->nPlanarConfig == PLANARCONFIG_SEPARATE) nRowOffset = ((iTileX * psCache->nSamples) + iSample) * psCache->nBytesPerBlock; else nRowOffset = iTileX * psCache->nBytesPerBlock + (psCache->nBitsPerPixel + 7) / 8 * iSample; if( iTileY == psCache->nBlockOffset ) return psCache->pabyRow1Blocks + nRowOffset; else return psCache->pabyRow2Blocks + nRowOffset; } /************************************************************************/ /* TIFFGetOvrBlock_Subsampled() */ /************************************************************************/ unsigned char *TIFFGetOvrBlock_Subsampled( TIFFOvrCache *psCache, int iTileX, int iTileY ) { int nRowOffset; if( iTileY > psCache->nBlockOffset + 1 ) TIFFWriteOvrRow( psCache ); assert( iTileX >= 0 && iTileX < psCache->nBlocksPerRow ); assert( iTileY >= 0 && iTileY < psCache->nBlocksPerColumn ); assert( iTileY >= psCache->nBlockOffset && iTileY < psCache->nBlockOffset+2 ); assert( psCache->nPlanarConfig != PLANARCONFIG_SEPARATE ); nRowOffset = iTileX * psCache->nBytesPerBlock; if( iTileY == psCache->nBlockOffset ) return psCache->pabyRow1Blocks + nRowOffset; else return psCache->pabyRow2Blocks + nRowOffset; } /************************************************************************/ /* TIFFDestroyOvrCache() */ /************************************************************************/ void TIFFDestroyOvrCache( TIFFOvrCache * psCache ) { while( psCache->nBlockOffset < psCache->nBlocksPerColumn ) TIFFWriteOvrRow( psCache ); _TIFFfree( psCache->pabyRow1Blocks ); _TIFFfree( psCache->pabyRow2Blocks ); _TIFFfree( psCache ); } /* * Local Variables: * mode: c * c-basic-offset: 8 * fill-column: 78 * End: */
{ "pile_set_name": "Github" }
# 注意,slice 和数组是不同的类型,但它们通过 `fmt.Println` 打印的输出结果是类似的。 $ go run slices.go emp: [ ] set: [a b c] get: c len: 3 apd: [a b c d e f] cpy: [a b c d e f] sl1: [c d e] sl2: [a b c d e] sl3: [c d e f] dcl: [g h i] 2d: [[0] [1 2] [2 3 4]] # 看看这个由 Go 团队撰写的一篇[很棒的博文](http://blog.golang.org/2011/01/go-slices-usage-and-internals.html),了解更多关于 Go 中 slice 的设计和实现细节。 # 现在,我们已经学习了数组和 slice,接下来我们将学习 Go 中的另一个重要的内建数据类型:map。
{ "pile_set_name": "Github" }
// (C) Copyright Tobias Schwinger // // Use modification and distribution are subject to the boost Software License, // Version 1.0. (See http://www.boost.org/LICENSE_1_0.txt). //------------------------------------------------------------------------------ #ifndef BOOST_FT_DETAIL_CLASSIFIER_HPP_INCLUDED #define BOOST_FT_DETAIL_CLASSIFIER_HPP_INCLUDED #include <boost/type.hpp> #include <boost/config.hpp> #include <boost/type_traits/config.hpp> #include <boost/type_traits/is_reference.hpp> #include <boost/type_traits/add_reference.hpp> #include <boost/function_types/config/config.hpp> #include <boost/function_types/property_tags.hpp> namespace boost { namespace function_types { namespace detail { template<typename T> struct classifier; template<std::size_t S> struct char_array { typedef char (&type)[S]; }; template<bits_t Flags, bits_t CCID, std::size_t Arity> struct encode_charr { typedef typename char_array< ::boost::function_types::detail::encode_charr_impl<Flags,CCID,Arity>::value >::type type; }; char BOOST_TT_DECL classifier_impl(...); #define BOOST_FT_variations BOOST_FT_function|BOOST_FT_pointer|\ BOOST_FT_member_pointer #define BOOST_FT_type_function(cc,name) BOOST_FT_SYNTAX( \ R BOOST_PP_EMPTY,BOOST_PP_LPAREN,cc,* BOOST_PP_EMPTY,name,BOOST_PP_RPAREN) #define BOOST_FT_type_function_pointer(cc,name) BOOST_FT_SYNTAX( \ R BOOST_PP_EMPTY,BOOST_PP_LPAREN,cc,** BOOST_PP_EMPTY,name,BOOST_PP_RPAREN) #define BOOST_FT_type_member_function_pointer(cc,name) BOOST_FT_SYNTAX( \ R BOOST_PP_EMPTY,BOOST_PP_LPAREN,cc,T0::** BOOST_PP_EMPTY,name,BOOST_PP_RPAREN) #define BOOST_FT_al_path boost/function_types/detail/classifier_impl #include <boost/function_types/detail/pp_loop.hpp> template<typename T> struct classifier_bits { static typename boost::add_reference<T>::type tester; BOOST_STATIC_CONSTANT(bits_t,value = (bits_t)sizeof( boost::function_types::detail::classifier_impl(& tester) )-1); }; template<typename T> struct classifier { typedef detail::constant< ::boost::function_types::detail::decode_bits< ::boost::function_types::detail::classifier_bits<T>::value >::tag_bits > bits; typedef detail::full_mask mask; typedef detail::constant< ::boost::function_types::detail::decode_bits< ::boost::function_types::detail::classifier_bits<T>::value >::arity > function_arity; }; } } } // namespace ::boost::function_types::detail #endif
{ "pile_set_name": "Github" }
MORSE execution loop ==================== The following figure depicts Morse's general behaviour: .. image:: ../../media/simulation_main_loop_overview.svg :class: full_image :width: 600 :align: center After the initialisation phase described :doc:`here <entry_point>`, the simulator goes in this big loop, including the execution of each sensor, each actuator (depending their frequencies), and the handling of services. It is important to understand that, during each loop execution, every sensor and actuator is called with the same graphical and physical context (robot positions, sensor position, etc.). .. warning:: The execution order between a scene's various sensors and actuators is not defined. So, do not rely on any particular order when implementing the behaviour of your component. .. warning:: If the behaviour of one component takes too much time, it is the whole simulation loop which will slow down (including the physics). Make sure your components are fast enough. It is possible to rewrite the logic in C if the Python version is too slow. Behaviour of a sensor --------------------- When Blender calls the :py:meth:`morse.core.sensor.Sensor.action` method, the following things happen: #. the sensor's position is updated #. the sensor's overridden ``default_action`` is called #. each of the ``output_modifiers`` functions is applied in order (to modify the sensor's content) #. each of the ``output_functions`` functions is applied in order (to output the sensor's content to different clients) Actuator Behaviour ------------------ When Blender calls the :py:meth:`morse.core.actuator.Actuator.action` method, the following things happen: #. each of the ``input_functions`` functions is applied in order (to receive input from different clients) #. each of the ``input_modifiers`` functions is applied in order (if needed) #. the actuator's overridden ``default_action`` method is called .. warning:: Although it is possible for an actuator to have multiples client, in practice, the behaviour is such cases is not well defined, so it is better to make sure that you have only one client for each actuator. Service handling ---------------- This part is explained in detail :doc:`here <services_internal>`.
{ "pile_set_name": "Github" }
/* * Copyright (c) 2003, 2005, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ #ifndef BlittingIncludesDefined #define BlittingIncludesDefined #include "jni.h" #include "GlyphImageRef.h" #include "SurfaceData.h" #ifdef __cplusplus extern "C" { #endif typedef struct { int numGlyphs; ImageRef *glyphs; } GlyphBlitVector; extern jint RefineBounds(GlyphBlitVector *gbv, SurfaceDataBounds *bounds); extern GlyphBlitVector* setupBlitVector(JNIEnv *env, jobject glyphlist, jint fromGlyph, jint toGlyph); extern GlyphBlitVector* setupLCDBlitVector(JNIEnv *env, jobject glyphlist, jint fromGlyph, jint toGlyph); #ifdef __cplusplus } #endif #endif
{ "pile_set_name": "Github" }
{ "session": "lupppTest", "version_major": 1, "version_minor": 0, "version_patch": 0, "master": { "fader": 0.780000, "bpm": 120, "sceneNames": ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10"] }, "tracks": [{ "ID": 0, "name": "Track 1", "fader": 0.780000, "side": 0, "post": 0, "reverb": 0, "clips": ["", "", "", "", "", "", "", "", "", ""] }, { "ID": 1, "name": "Track 2", "fader": 0.780000, "side": 0, "post": 0, "reverb": 0, "clips": ["", "", "", "", "", "", "", "", "", ""] }, { "ID": 2, "name": "Track 3", "fader": 0.780000, "side": 0, "post": 0, "reverb": 0, "clips": ["", "", "", "", "", "", "", "", "", ""] }, { "ID": 3, "name": "Track 4", "fader": 0.780000, "side": 0, "post": 0, "reverb": 0, "clips": ["", "", "", "", "", "", "", "", "", ""] }, { "ID": 4, "name": "Track 5", "fader": 0.780000, "side": 0, "post": 0, "reverb": 0, "clips": ["", "", "", "", "", "", "", "", "", ""] }, { "ID": 5, "name": "Track 6", "fader": 0.780000, "side": 0, "post": 0, "reverb": 0, "clips": ["", "", "", "", "", "", "", "", "", ""] }, { "ID": 6, "name": "Track 7", "fader": 0.780000, "side": 0, "post": 0, "reverb": 0, "clips": ["", "", "", "", "", "", "", "", "", ""] }, { "ID": 7, "name": "Track 8", "fader": 0.780000, "side": 0, "post": 0, "reverb": 0, "clips": ["", "", "", "", "", "", "", "", "", ""] }] }
{ "pile_set_name": "Github" }
//----------------------------------------------------------------------------- // Copyright (c) 2013 GarageGames, LLC // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. //----------------------------------------------------------------------------- ConsoleMethodGroupBeginWithDocs(NetObject, SimObject) /*! Use the scopeToClient method to force this object to be SCOPE_ALWAYS on client. When an object is SCOPE_ALWAYS it is always ghosted. Therefore, if you have an object that should always be ghosted to a client, use this method. @param client The ID of the client to force this object to be SCOPE_ALWAYS for. @return No return value. @sa clearScopeToClient, setScopeAlways */ ConsoleMethodWithDocs(NetObject,scopeToClient, ConsoleVoid,3,3, ( client )) { NetConnection *conn; if(!Sim::findObject(argv[2], conn)) { Con::errorf(ConsoleLogEntry::General, "NetObject::scopeToClient: Couldn't find connection %s", argv[2]); return; } conn->objectLocalScopeAlways(object); } /*! Use the clearScopeToClient method to undo the effects of a previous call to scopeToClient. @param client The ID of the client to stop forcing scoping this object for. @return No return value. @sa scopeToClient */ ConsoleMethodWithDocs(NetObject,clearScopeToClient, ConsoleVoid,3,3, ( client )) { NetConnection *conn; if(!Sim::findObject(argv[2], conn)) { Con::errorf(ConsoleLogEntry::General, "NetObject::clearScopeToClient: Couldn't find connection %s", argv[2]); return; } conn->objectLocalClearAlways(object); } /*! Use the setScopeAlways method to force an object to be SCOPE_ALWAYS for all clients. When an object is SCOPE_ALWAYS it is always ghosted. Therefore, if you have an object that should always be ghosted to all clients, use this method. @return No return value. @sa scopeToClient */ ConsoleMethodWithDocs(NetObject,setScopeAlways, ConsoleVoid,2,2, ()) { object->setScopeAlways(); } /*! @return Returns the ghost ID of the object */ ConsoleMethodWithDocs( NetObject, getGhostID, ConsoleInt, 2, 2, ()) { return object->getNetIndex(); } ConsoleMethodGroupEndWithDocs(NetObject) extern "C" { DLL_PUBLIC NetObject* NetObjectCreateInstance() { return new NetObject(); } DLL_PUBLIC void NetObjectScopeToClient(NetObject* netObj, NetConnection* client) { client->objectLocalScopeAlways(netObj); } DLL_PUBLIC void NetObjectClearScopeToClient(NetObject* netObj, NetConnection* client) { client->objectLocalClearAlways(netObj); } DLL_PUBLIC void NetObjectSetScopeAlways(NetObject* netObj) { netObj->setScopeAlways(); } DLL_PUBLIC int NetObjectGetGhostID(NetObject* netObj) { return netObj->getNetIndex(); } }
{ "pile_set_name": "Github" }
<?php /** * Copyright © Magento, Inc. All rights reserved. * See COPYING.txt for license details. */ namespace Magento\Framework\Indexer; use Magento\Framework\Indexer\Handler\DefaultHandler; use Magento\Framework\ObjectManagerInterface; /** * @api Instantiate save handler when implementing custom Indexer\Action */ class HandlerPool { /** * @var ObjectManagerInterface */ protected $objectManager; /** * @var HandlerInterface */ protected $defaultHandler; /** * @param ObjectManagerInterface $objectManager * @param DefaultHandler $defaultHandler */ public function __construct( ObjectManagerInterface $objectManager, DefaultHandler $defaultHandler ) { $this->defaultHandler = $defaultHandler; $this->objectManager = $objectManager; } /** * Get handler class instance * * @param string $handlerClass * @throws \InvalidArgumentException * @return HandlerInterface */ public function get($handlerClass = null) { if ($handlerClass === null) { return $this->defaultHandler; } $handler = $this->objectManager->get($handlerClass); if (!$handler instanceof HandlerInterface) { throw new \InvalidArgumentException( $handlerClass . ' doesn\'t implement \Magento\Framework\Indexer\HandlerInterface' ); } return $handler; } }
{ "pile_set_name": "Github" }
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package mergepatch import ( "fmt" "reflect" "github.com/davecgh/go-spew/spew" "sigs.k8s.io/yaml" ) // PreconditionFunc asserts that an incompatible change is not present within a patch. type PreconditionFunc func(interface{}) bool // RequireKeyUnchanged returns a precondition function that fails if the provided key // is present in the patch (indicating that its value has changed). func RequireKeyUnchanged(key string) PreconditionFunc { return func(patch interface{}) bool { patchMap, ok := patch.(map[string]interface{}) if !ok { return true } // The presence of key means that its value has been changed, so the test fails. _, ok = patchMap[key] return !ok } } // RequireMetadataKeyUnchanged creates a precondition function that fails // if the metadata.key is present in the patch (indicating its value // has changed). func RequireMetadataKeyUnchanged(key string) PreconditionFunc { return func(patch interface{}) bool { patchMap, ok := patch.(map[string]interface{}) if !ok { return true } patchMap1, ok := patchMap["metadata"] if !ok { return true } patchMap2, ok := patchMap1.(map[string]interface{}) if !ok { return true } _, ok = patchMap2[key] return !ok } } func ToYAMLOrError(v interface{}) string { y, err := toYAML(v) if err != nil { return err.Error() } return y } func toYAML(v interface{}) (string, error) { y, err := yaml.Marshal(v) if err != nil { return "", fmt.Errorf("yaml marshal failed:%v\n%v\n", err, spew.Sdump(v)) } return string(y), nil } // HasConflicts returns true if the left and right JSON interface objects overlap with // different values in any key. All keys are required to be strings. Since patches of the // same Type have congruent keys, this is valid for multiple patch types. This method // supports JSON merge patch semantics. // // NOTE: Numbers with different types (e.g. int(0) vs int64(0)) will be detected as conflicts. // Make sure the unmarshaling of left and right are consistent (e.g. use the same library). func HasConflicts(left, right interface{}) (bool, error) { switch typedLeft := left.(type) { case map[string]interface{}: switch typedRight := right.(type) { case map[string]interface{}: for key, leftValue := range typedLeft { rightValue, ok := typedRight[key] if !ok { continue } if conflict, err := HasConflicts(leftValue, rightValue); err != nil || conflict { return conflict, err } } return false, nil default: return true, nil } case []interface{}: switch typedRight := right.(type) { case []interface{}: if len(typedLeft) != len(typedRight) { return true, nil } for i := range typedLeft { if conflict, err := HasConflicts(typedLeft[i], typedRight[i]); err != nil || conflict { return conflict, err } } return false, nil default: return true, nil } case string, float64, bool, int64, nil: return !reflect.DeepEqual(left, right), nil default: return true, fmt.Errorf("unknown type: %v", reflect.TypeOf(left)) } }
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: fc84197643ebf584cb9871671f22e99b NativeFormatImporter: externalObjects: {} mainObjectFileID: 0 userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
// RUN: %clang_cc1 -fsyntax-only -verify -pedantic %s // expected-no-diagnostics // PR4287 #include <stdarg.h> char *foo = "test"; int test(char*,...); int test(fmt) char*fmt; { va_list ap; char*a; int x; va_start(ap,fmt); a=va_arg(ap,char*); x=(a!=foo); va_end(ap); return x; } void exit(); int main(argc,argv) int argc;char**argv; { exit(test("",foo)); }
{ "pile_set_name": "Github" }
/* Copyright (c) 2015 VMware, Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package mo import ( "context" "fmt" "github.com/vmware/govmomi/vim25/soap" "github.com/vmware/govmomi/vim25/types" ) // Ancestors returns the entire ancestry tree of a specified managed object. // The return value includes the root node and the specified object itself. func Ancestors(ctx context.Context, rt soap.RoundTripper, pc, obj types.ManagedObjectReference) ([]ManagedEntity, error) { ospec := types.ObjectSpec{ Obj: obj, SelectSet: []types.BaseSelectionSpec{ &types.TraversalSpec{ SelectionSpec: types.SelectionSpec{Name: "traverseParent"}, Type: "ManagedEntity", Path: "parent", Skip: types.NewBool(false), SelectSet: []types.BaseSelectionSpec{ &types.SelectionSpec{Name: "traverseParent"}, }, }, &types.TraversalSpec{ SelectionSpec: types.SelectionSpec{}, Type: "VirtualMachine", Path: "parentVApp", Skip: types.NewBool(false), SelectSet: []types.BaseSelectionSpec{ &types.SelectionSpec{Name: "traverseParent"}, }, }, }, Skip: types.NewBool(false), } pspec := []types.PropertySpec{ { Type: "ManagedEntity", PathSet: []string{"name", "parent"}, }, { Type: "VirtualMachine", PathSet: []string{"parentVApp"}, }, } req := types.RetrieveProperties{ This: pc, SpecSet: []types.PropertyFilterSpec{ { ObjectSet: []types.ObjectSpec{ospec}, PropSet: pspec, }, }, } var ifaces []interface{} err := RetrievePropertiesForRequest(ctx, rt, req, &ifaces) if err != nil { return nil, err } var out []ManagedEntity // Build ancestry tree by iteratively finding a new child. for len(out) < len(ifaces) { var find types.ManagedObjectReference if len(out) > 0 { find = out[len(out)-1].Self } // Find entity we're looking for given the last entity in the current tree. for _, iface := range ifaces { me := iface.(IsManagedEntity).GetManagedEntity() if me.Name == "" { // The types below have their own 'Name' field, so ManagedEntity.Name (me.Name) is empty. // We only hit this case when the 'obj' param is one of these types. // In most cases, 'obj' is a Folder so Name isn't collected in this call. switch x := iface.(type) { case Network: me.Name = x.Name case DistributedVirtualSwitch: me.Name = x.Name case DistributedVirtualPortgroup: me.Name = x.Name case OpaqueNetwork: me.Name = x.Name default: // ManagedEntity always has a Name, if we hit this point we missed a case above. panic(fmt.Sprintf("%#v Name is empty", me.Reference())) } } if me.Parent == nil { // Special case for VirtualMachine within VirtualApp, // unlikely to hit this other than via Finder.Element() switch x := iface.(type) { case VirtualMachine: me.Parent = x.ParentVApp } } if me.Parent == nil { out = append(out, me) break } if *me.Parent == find { out = append(out, me) break } } } return out, nil }
{ "pile_set_name": "Github" }
require('../../modules/es6.typed.uint16-array'); module.exports = require('../../modules/_core').Uint16Array;
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Jun 9 2015 22:53:21). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2014 by Steve Nygard. // #import "TViewController.h" @class NSString, TAirDropDiscoveryController; @interface TAirDropLegacyModeButtonController : TViewController { struct TNSRef<TAirDropDiscoveryController, void> _discoveryController; struct TNSRef<TAirDropLegacyModePopoverViewController, void> _popoverViewContoller; struct TNotificationCenterObserver _popoverWillCloseObserver; } + (id)keyPathsForValuesAffectingButtonTitle; - (id).cxx_construct; - (void).cxx_destruct; - (void)popoverWillClose; - (void)buttonPressed:(id)arg1; @property(readonly, retain, nonatomic) NSString *buttonTitle; // @dynamic buttonTitle; @property(readonly) TAirDropDiscoveryController *discoveryController; - (void)initCommon; @end
{ "pile_set_name": "Github" }
#!/bin/bash # tester for other programs in pmu-tools # PERF=... override perf binary # NORES=1 don't check measurement results . ./cpumap.sh set -e PERF=${PERF:-perf} failed() { echo FAILED } PATH=$PATH:. trap failed ERR 0 set -x # XXX cgroup untested for args in "" "-A" "--per-socket" "--per-core" "-r2" ; do # interval-normalize.py ${PERF} stat -I100 $args -a -x, -o x$$.csv sleep 1 $WRAP interval-normalize.py --error-exit < x$$.csv $WRAP interval-normalize.py --error-exit < x$$.csv > y$$.csv grep -vq PARSE-ERROR y$$.csv if [ -z "$NORES" ] ; then for i in branch-misses branches context-switches cycles instructions page-faults ; do grep -q $i y$$.csv done fi grep -vq PARSE-ERROR x$$.csv # plot-normalized.py $WRAP plot-normalized.py -o x$$-2.png y$$.csv # interval-plot.py $WRAP interval-plot.py x$$.csv -o x$$.png done DYGRAPHS="" # original url http://dygraphs.com/1.0.1/dygraph-combined.js disappeared if [ ! -r dygraph-combined.js ] && wget https://cdnjs.cloudflare.com/ajax/libs/dygraph/1.0.1/dygraph-combined.js ; then DYGRAPHS=1 fi for args in "-l2" "--all -v" "-l3 --single-thread" "--all -a -A"; do FORCEHT=1 $WRAP toplev.py -v --force-cpu ${DCPU:-hsw} --node +CPU_Utilization -I 100 $args -o x$$.csv -x, ./workloads/BC1s $WRAP toplev.py -v --force-cpu ${DCPU:-hsw} --node +CPU_Utilization -I 100 $args -o xn$$.csv -x, ./workloads/BC1s $WRAP interval-normalize.py --error-exit < x$$.csv $WRAP interval-normalize.py --error-exit < x$$.csv > y$$.csv grep -vq PARSE-ERROR y$$.csv [ -z "$NORES" ] && grep Frontend y$$.csv $WRAP interval-normalize.py --normalize-cpu --error-exit < x$$.csv > yc$$.csv [ -z "$NORES" ] && grep Frontend yc$$.csv $WRAP interval-normalize.py --normalize-cpu --error-exit < xn$$.csv > yc$$.csv [ -z "$NORES" ] && grep Frontend yc$$.csv if grep -q CPUs x$$.csv ; then $WRAP utilized.py x$$.csv -o y$$.csv [ -z "$NORES" ] && grep Frontend y$$.csv fi if grep -q CPUs xn$$.csv ; then $WRAP utilized.py xn$$.csv -o y$$.csv [ -z "$NORES" ] && grep Frontend y$$.csv fi $WRAP interval-plot.py x$$.csv -o x$$.png # plot-normalized.py $WRAP plot-normalized.py -o x$$-2.png y$$.csv # tl-serve.py if [ -n "$DYGRAPHS" ] ; then $WRAP tl-serve.py --gen tls$$ x$$.csv rm -rf tls$$ fi # tl-barplot.py $WRAP tl-barplot.py x$$.csv -o x$$.png rm x$$.png x$$-2.png done $WRAP tl-serve.py x$$.csv & sleep 1 unset http_proxy curl http://localhost:9001 > /dev/null kill %1 sleep 1 wait %1 rm x$$.csv xn$$.csv # cputop.py $WRAP cputop.py "socket == 0" $WRAP cputop.py "thread == 0 and socket == 0" $WRAP cputop.py "thread == 1" offline $WRAP cputop.py offline online [ $($WRAP cputop.py True | wc -l | cut -d ' ' -f 1) -eq $(getconf _NPROCESSORS_ONLN) ] # list-events.py EVENTMAP=${cpus[hsw]} $WRAP list-events.py > x$$.lst [ $(wc -l x$$.lst | cut -d ' ' -f 1) -gt 20 ] grep -qi rtm_retired.aborted x$$.lst rm x$$.lst # event-translate.py EVENTMAP=${cpus[hsw]} $WRAP event-translate.py r4c9 | grep -q rtm_retired.aborted $WRAP gen-dot.py simple > /dev/null $WRAP gen-dot.py ivb_client_ratios > /dev/null # untested: counterdiff.py # may need network: # untested: event_download.py # need root: # untested: msr.py # untested: pci.py # untested: event-rmap.py trap "" ERR 0 echo SUCCEEDED
{ "pile_set_name": "Github" }
package com.springsource.petclinic.http.converter.json; import java.beans.PropertyEditor; import java.util.List; import java.util.Map; import org.springframework.beans.PropertyEditorRegistry; import org.springframework.http.HttpStatus; import org.springframework.validation.BindingResult; import org.springframework.validation.Errors; import org.springframework.validation.FieldError; import org.springframework.validation.ObjectError; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.roo.addon.web.mvc.controller.annotations.http.converters.json.RooJSONBindingErrorException; @ResponseStatus(HttpStatus.UNPROCESSABLE_ENTITY) @RooJSONBindingErrorException public class BindingErrorException extends RuntimeException implements BindingResult { private static final long serialVersionUID = 3173335735776325694L; private final BindingResult bindingResult; public BindingResult getBindingResult() { return bindingResult; } public BindingErrorException(BindingResult bindingResult) { super(); this.bindingResult = bindingResult; } @Override public void reject(String errorCode, String defaultMessage) { // TODO Auto-generated method stub } @Override public void reject(String errorCode, Object[] errorArgs, String defaultMessage) { // TODO Auto-generated method stub } @Override public void rejectValue(String field, String errorCode) { // TODO Auto-generated method stub } @Override public void rejectValue(String field, String errorCode, String defaultMessage) { // TODO Auto-generated method stub } @Override public void rejectValue(String field, String errorCode, Object[] errorArgs, String defaultMessage) { // TODO Auto-generated method stub } @Override public void addAllErrors(Errors errors) { // TODO Auto-generated method stub } @Override public boolean hasErrors() { return this.bindingResult.hasErrors(); } @Override public int getErrorCount() { return this.bindingResult.getErrorCount(); } @Override public List<ObjectError> getAllErrors() { return this.bindingResult.getAllErrors(); } @Override public boolean hasGlobalErrors() { return this.bindingResult.hasGlobalErrors(); } @Override public int getGlobalErrorCount() { return this.bindingResult.getGlobalErrorCount(); } @Override public List<ObjectError> getGlobalErrors() { return this.bindingResult.getGlobalErrors(); } @Override public ObjectError getGlobalError() { return this.bindingResult.getGlobalError(); } @Override public boolean hasFieldErrors() { return this.bindingResult.hasFieldErrors(); } @Override public int getFieldErrorCount() { return this.bindingResult.getFieldErrorCount(); } @Override public List<FieldError> getFieldErrors() { return this.bindingResult.getFieldErrors(); } @Override public FieldError getFieldError() { return this.bindingResult.getFieldError(); } @Override public boolean hasFieldErrors(String field) { return this.bindingResult.hasFieldErrors(field); } @Override public int getFieldErrorCount(String field) { return this.bindingResult.getFieldErrorCount(); } @Override public List<FieldError> getFieldErrors(String field) { return this.bindingResult.getFieldErrors(field); } @Override public FieldError getFieldError(String field) { return this.bindingResult.getFieldError(field); } @Override public Object getFieldValue(String field) { return this.bindingResult.getFieldValue(field); } @Override public Class<?> getFieldType(String field) { return this.bindingResult.getFieldType(field); } @Override public Object getTarget() { return this.bindingResult.getTarget(); } @Override public Map<String, Object> getModel() { return this.bindingResult.getModel(); } @Override public Object getRawFieldValue(String field) { // TODO Auto-generated method stub return null; } @Override public PropertyEditor findEditor(String field, Class<?> valueType) { // TODO Auto-generated method stub return null; } @Override public PropertyEditorRegistry getPropertyEditorRegistry() { // TODO Auto-generated method stub return null; } @Override public void addError(ObjectError error) { // TODO Auto-generated method stub } @Override public String[] resolveMessageCodes(String errorCode) { return this.bindingResult.resolveMessageCodes(errorCode); } @Override public String[] resolveMessageCodes(String errorCode, String field) { return this.bindingResult.resolveMessageCodes(errorCode, field); } @Override public void recordSuppressedField(String field) { // TODO Auto-generated method stub } @Override public String[] getSuppressedFields() { return this.bindingResult.getSuppressedFields(); } @Override public String getObjectName() { // TODO Auto-generated method stub return null; } @Override public void setNestedPath(String nestedPath) { // TODO Auto-generated method stub } @Override public String getNestedPath() { // TODO Auto-generated method stub return null; } @Override public void pushNestedPath(String subPath) { // TODO Auto-generated method stub } @Override public void popNestedPath() throws IllegalStateException { // TODO Auto-generated method stub } @Override public void reject(String errorCode) { // TODO Auto-generated method stub } }
{ "pile_set_name": "Github" }
// +build leveldb package leveldb // #include "leveldb/c.h" import "C" import ( "fmt" "reflect" "unsafe" ) func boolToUchar(b bool) C.uchar { uc := C.uchar(0) if b { uc = C.uchar(1) } return uc } func ucharToBool(uc C.uchar) bool { if uc == C.uchar(0) { return false } return true } func saveError(errStr *C.char) error { if errStr != nil { gs := C.GoString(errStr) C.leveldb_free(unsafe.Pointer(errStr)) return fmt.Errorf(gs) } return nil } func slice(p unsafe.Pointer, n int) []byte { var b []byte pbyte := (*reflect.SliceHeader)(unsafe.Pointer(&b)) pbyte.Data = uintptr(p) pbyte.Len = n pbyte.Cap = n return b }
{ "pile_set_name": "Github" }
.nh .TH "X86-VFMADDSUB132PD-VFMADDSUB213PD-VFMADDSUB231PD" "7" "May 2019" "TTMO" "Intel x86-64 ISA Manual" .SH NAME VFMADDSUB132PD-VFMADDSUB213PD-VFMADDSUB231PD - FUSED MULTIPLY-ALTERNATING ADD-SUBTRACT OF PACKED DOUBLE-PRECISION FLOATING-POINT VALUES .TS allbox; l l l l l l l l l l . \fB\fCOpcode/Instruction\fR \fB\fCOp/En\fR \fB\fC64/32 bit Mode Support\fR \fB\fCCPUID Feature Flag\fR \fB\fCDescription\fR T{ VEX.128.66.0F38.W1 96 /r VFMADDSUB132PD xmm1, xmm2, xmm3/m128 T} A V/V FMA T{ Multiply packed double\-precision floating\-point values from xmm1 and xmm3/mem, add/subtract elements in xmm2 and put result in xmm1. T} T{ VEX.128.66.0F38.W1 A6 /r VFMADDSUB213PD xmm1, xmm2, xmm3/m128 T} A V/V FMA T{ Multiply packed double\-precision floating\-point values from xmm1 and xmm2, add/subtract elements in xmm3/mem and put result in xmm1. T} T{ VEX.128.66.0F38.W1 B6 /r VFMADDSUB231PD xmm1, xmm2, xmm3/m128 T} A V/V FMA T{ Multiply packed double\-precision floating\-point values from xmm2 and xmm3/mem, add/subtract elements in xmm1 and put result in xmm1. T} T{ VEX.256.66.0F38.W1 96 /r VFMADDSUB132PD ymm1, ymm2, ymm3/m256 T} A V/V FMA T{ Multiply packed double\-precision floating\-point values from ymm1 and ymm3/mem, add/subtract elements in ymm2 and put result in ymm1. T} T{ VEX.256.66.0F38.W1 A6 /r VFMADDSUB213PD ymm1, ymm2, ymm3/m256 T} A V/V FMA T{ Multiply packed double\-precision floating\-point values from ymm1 and ymm2, add/subtract elements in ymm3/mem and put result in ymm1. T} T{ VEX.256.66.0F38.W1 B6 /r VFMADDSUB231PD ymm1, ymm2, ymm3/m256 T} A V/V FMA T{ Multiply packed double\-precision floating\-point values from ymm2 and ymm3/mem, add/subtract elements in ymm1 and put result in ymm1. T} T{ EVEX.128.66.0F38.W1 A6 /r VFMADDSUB213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst T} B V/V AVX512VL AVX512F T{ Multiply packed double\-precision floating\-point values from xmm1 and xmm2, add/subtract elements in xmm3/m128/m64bcst and put result in xmm1 subject to writemask k1. T} T{ EVEX.128.66.0F38.W1 B6 /r VFMADDSUB231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst T} B V/V AVX512VL AVX512F T{ Multiply packed double\-precision floating\-point values from xmm2 and xmm3/m128/m64bcst, add/subtract elements in xmm1 and put result in xmm1 subject to writemask k1. T} T{ EVEX.128.66.0F38.W1 96 /r VFMADDSUB132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst T} B V/V AVX512VL AVX512F T{ Multiply packed double\-precision floating\-point values from xmm1 and xmm3/m128/m64bcst, add/subtract elements in xmm2 and put result in xmm1 subject to writemask k1. T} T{ EVEX.256.66.0F38.W1 A6 /r VFMADDSUB213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst T} B V/V AVX512VL AVX512F T{ Multiply packed double\-precision floating\-point values from ymm1 and ymm2, add/subtract elements in ymm3/m256/m64bcst and put result in ymm1 subject to writemask k1. T} T{ EVEX.256.66.0F38.W1 B6 /r VFMADDSUB231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst T} B V/V AVX512VL AVX512F T{ Multiply packed double\-precision floating\-point values from ymm2 and ymm3/m256/m64bcst, add/subtract elements in ymm1 and put result in ymm1 subject to writemask k1. T} T{ EVEX.256.66.0F38.W1 96 /r VFMADDSUB132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst T} B V/V AVX512VL AVX512F T{ Multiply packed double\-precision floating\-point values from ymm1 and ymm3/m256/m64bcst, add/subtract elements in ymm2 and put result in ymm1 subject to writemask k1. T} T{ EVEX.512.66.0F38.W1 A6 /r VFMADDSUB213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er} T} B V/V AVX512F T{ Multiply packed double\-precision floating\-point values from zmm1and zmm2, add/subtract elements in zmm3/m512/m64bcst and put result in zmm1 subject to writemask k1. T} T{ EVEX.512.66.0F38.W1 B6 /r VFMADDSUB231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er} T} B V/V AVX512F T{ Multiply packed double\-precision floating\-point values from zmm2 and zmm3/m512/m64bcst, add/subtract elements in zmm1 and put result in zmm1 subject to writemask k1. T} T{ EVEX.512.66.0F38.W1 96 /r VFMADDSUB132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er} T} B V/V AVX512F T{ Multiply packed double\-precision floating\-point values from zmm1 and zmm3/m512/m64bcst, add/subtract elements in zmm2 and put result in zmm1 subject to writemask k1. T} .TE .SH INSTRUCTION OPERAND ENCODING .TS allbox; l l l l l l l l l l l l . Op/En Tuple Type Operand 1 Operand 2 Operand 3 Operand 4 A NA ModRM:reg (r, w) VEX.vvvv (r) ModRM:r/m (r) NA B Full ModRM:reg (r, w) EVEX.vvvv (r) ModRM:r/m (r) NA .TE .SS Description .PP VFMADDSUB132PD: Multiplies the two, four, or eight packed double\-precision floating\-point values from the first source operand to the two or four packed double\-precision floating\-point values in the third source operand. From the infinite precision intermediate result, adds the odd double\-precision floating\-point elements and subtracts the even double\-precision floating\-point values in the second source operand, performs rounding and stores the resulting two or four packed double\-precision floating\-point values to the destination operand (first source operand). .PP VFMADDSUB213PD: Multiplies the two, four, or eight packed double\-precision floating\-point values from the second source operand to the two or four packed double\-precision floating\-point values in the first source operand. From the infinite precision intermediate result, adds the odd double\-precision floating\-point elements and subtracts the even double\-precision floating\-point values in the third source operand, performs rounding and stores the resulting two or four packed double\-precision floating\-point values to the destination operand (first source operand). .PP VFMADDSUB231PD: Multiplies the two, four, or eight packed double\-precision floating\-point values from the second source operand to the two or four packed double\-precision floating\-point values in the third source operand. From the infinite precision intermediate result, adds the odd double\-precision floating\-point elements and subtracts the even double\-precision floating\-point values in the first source operand, performs rounding and stores the resulting two or four packed double\-precision floating\-point values to the destination operand (first source operand). .PP EVEX encoded versions: The destination operand (also first source operand) and the second source operand are ZMM/YMM/XMM register. The third source operand is a ZMM/YMM/XMM register, a 512/256/128\-bit memory location or a 512/256/128\-bit vector broadcasted from a 64\-bit memory location. The destination operand is conditionally updated with write mask k1. .PP VEX.256 encoded version: The destination operand (also first source operand) is a YMM register and encoded in reg\_field. The second source operand is a YMM register and encoded in VEX.vvvv. The third source operand is a YMM register or a 256\-bit memory location and encoded in rm\_field. .PP VEX.128 encoded version: The destination operand (also first source operand) is a XMM register and encoded in reg\_field. The second source operand is a XMM register and encoded in VEX.vvvv. The third source operand is a XMM register or a 128\-bit memory location and encoded in rm\_field. The upper 128 bits of the YMM destination register are zeroed. .PP Compiler tools may optionally support a complementary mnemonic for each instruction mnemonic listed in the opcode/instruction column of the summary table. The behavior of the complementary mnemonic in situations involving NANs are governed by the definition of the instruction mnemonic defined in the opcode/instruction column. .SS Operation .PP .RS .nf In the operations below, “*” and “\-” symbols represent multiplication and subtraction with infinite precision inputs and outputs (no rounding). .fi .RE .SS VFMADDSUB132PD DEST, SRC2, SRC3 .PP .RS .nf IF (VEX.128) THEN DEST[63:0]←RoundFPControl\_MXCSR(DEST[63:0]*SRC3[63:0] \- SRC2[63:0]) DEST[127:64]←RoundFPControl\_MXCSR(DEST[127:64]*SRC3[127:64] + SRC2[127:64]) DEST[MAXVL\-1:128] ←0 ELSEIF (VEX.256) DEST[63:0]←RoundFPControl\_MXCSR(DEST[63:0]*SRC3[63:0] \- SRC2[63:0]) DEST[127:64]←RoundFPControl\_MXCSR(DEST[127:64]*SRC3[127:64] + SRC2[127:64]) DEST[191:128]←RoundFPControl\_MXCSR(DEST[191:128]*SRC3[191:128] \- SRC2[191:128]) DEST[255:192]←RoundFPControl\_MXCSR(DEST[255:192]*SRC3[255:192] + SRC2[255:192] FI .fi .RE .SS VFMADDSUB213PD DEST, SRC2, SRC3 .PP .RS .nf IF (VEX.128) THEN DEST[63:0]←RoundFPControl\_MXCSR(SRC2[63:0]*DEST[63:0] \- SRC3[63:0]) DEST[127:64]←RoundFPControl\_MXCSR(SRC2[127:64]*DEST[127:64] + SRC3[127:64]) DEST[MAXVL\-1:128] ←0 ELSEIF (VEX.256) DEST[63:0]←RoundFPControl\_MXCSR(SRC2[63:0]*DEST[63:0] \- SRC3[63:0]) DEST[127:64]←RoundFPControl\_MXCSR(SRC2[127:64]*DEST[127:64] + SRC3[127:64]) DEST[191:128]←RoundFPControl\_MXCSR(SRC2[191:128]*DEST[191:128] \- SRC3[191:128]) DEST[255:192]←RoundFPControl\_MXCSR(SRC2[255:192]*DEST[255:192] + SRC3[255:192] FI .fi .RE .SS VFMADDSUB231PD DEST, SRC2, SRC3 .PP .RS .nf IF (VEX.128) THEN DEST[63:0]←RoundFPControl\_MXCSR(SRC2[63:0]*SRC3[63:0] \- DEST[63:0]) DEST[127:64]←RoundFPControl\_MXCSR(SRC2[127:64]*SRC3[127:64] + DEST[127:64]) DEST[MAXVL\-1:128] ←0 ELSEIF (VEX.256) DEST[63:0]←RoundFPControl\_MXCSR(SRC2[63:0]*SRC3[63:0] \- DEST[63:0]) DEST[127:64]←RoundFPControl\_MXCSR(SRC2[127:64]*SRC3[127:64] + DEST[127:64]) DEST[191:128]←RoundFPControl\_MXCSR(SRC2[191:128]*SRC3[191:128] \- DEST[191:128]) DEST[255:192]←RoundFPControl\_MXCSR(SRC2[255:192]*SRC3[255:192] + DEST[255:192] FI .fi .RE .SS VFMADDSUB132PD DEST, SRC2, SRC3 (EVEX encoded version, when src3 operand is a register) .PP .RS .nf (KL, VL) = (2, 128), (4, 256), (8, 512) IF (VL = 512) AND (EVEX.b = 1) THEN SET\_RM(EVEX.RC); ELSE SET\_RM(MXCSR.RM); FI; FOR j←0 TO KL\-1 i←j * 64 IF k1[j] OR *no writemask* THEN IF j *is even* THEN DEST[i+63:i]← RoundFPControl(DEST[i+63:i]*SRC3[i+63:i] \- SRC2[i+63:i]) ELSE DEST[i+63:i]← RoundFPControl(DEST[i+63:i]*SRC3[i+63:i] + SRC2[i+63:i]) FI ELSE IF *merging\-masking* ; merging\-masking THEN *DEST[i+63:i] remains unchanged* ELSE ; zeroing\-masking DEST[i+63:i] ← 0 FI FI; ENDFOR DEST[MAXVL\-1:VL] ← 0 .fi .RE .SS VFMADDSUB132PD DEST, SRC2, SRC3 (EVEX encoded version, when src3 operand is a memory source) .PP .RS .nf (KL, VL) = (2, 128), (4, 256), (8, 512) FOR j←0 TO KL\-1 i←j * 64 IF k1[j] OR *no writemask* THEN IF j *is even* THEN IF (EVEX.b = 1) THEN DEST[i+63:i] ← RoundFPControl\_MXCSR(DEST[i+63:i]*SRC3[63:0] \- SRC2[i+63:i]) ELSE DEST[i+63:i] ← RoundFPControl\_MXCSR(DEST[i+63:i]*SRC3[i+63:i] \- SRC2[i+63:i]) FI; ELSE IF (EVEX.b = 1) THEN DEST[i+63:i] ← RoundFPControl\_MXCSR(DEST[i+63:i]*SRC3[63:0] + SRC2[i+63:i]) ELSE DEST[i+63:i] ← RoundFPControl\_MXCSR(DEST[i+63:i]*SRC3[i+63:i] + SRC2[i+63:i]) FI; FI ELSE IF *merging\-masking* ; merging\-masking THEN *DEST[i+63:i] remains unchanged* ELSE ; zeroing\-masking DEST[i+63:i] ← 0 FI FI; ENDFOR DEST[MAXVL\-1:VL] ← 0 .fi .RE .SS VFMADDSUB213PD DEST, SRC2, SRC3 (EVEX encoded version, when src3 operand is a register) .PP .RS .nf (KL, VL) = (2, 128), (4, 256), (8, 512) IF (VL = 512) AND (EVEX.b = 1) THEN SET\_RM(EVEX.RC); ELSE SET\_RM(MXCSR.RM); FI; FOR j←0 TO KL\-1 i←j * 64 IF k1[j] OR *no writemask* THEN IF j *is even* THEN DEST[i+63:i]← RoundFPControl(SRC2[i+63:i]*DEST[i+63:i] \- SRC3[i+63:i]) ELSE DEST[i+63:i]← RoundFPControl(SRC2[i+63:i]*DEST[i+63:i] + SRC3[i+63:i]) FI ELSE IF *merging\-masking* ; merging\-masking THEN *DEST[i+63:i] remains unchanged* ELSE ; zeroing\-masking DEST[i+63:i] ← 0 FI FI; ENDFOR DEST[MAXVL\-1:VL] ← 0 .fi .RE .SS VFMADDSUB213PD DEST, SRC2, SRC3 (EVEX encoded version, when src3 operand is a memory source) .PP .RS .nf (KL, VL) = (2, 128), (4, 256), (8, 512) FOR j←0 TO KL\-1 i←j * 64 IF k1[j] OR *no writemask* THEN IF j *is even* THEN IF (EVEX.b = 1) THEN DEST[i+63:i] ← RoundFPControl\_MXCSR(SRC2[i+63:i]*DEST[i+63:i] \- SRC3[63:0]) ELSE DEST[i+63:i] ← RoundFPControl\_MXCSR(SRC2[i+63:i]*DEST[i+63:i] \- SRC3[i+63:i]) FI; ELSE IF (EVEX.b = 1) THEN DEST[i+63:i] ← RoundFPControl\_MXCSR(SRC2[i+63:i]*DEST[i+63:i] + SRC3[63:0]) ELSE DEST[i+63:i] ← RoundFPControl\_MXCSR(SRC2[i+63:i]*DEST[i+63:i] + SRC3[i+63:i]) FI; FI ELSE IF *merging\-masking* ; merging\-masking THEN *DEST[i+63:i] remains unchanged* ELSE ; zeroing\-masking DEST[i+63:i] ← 0 FI FI; ENDFOR DEST[MAXVL\-1:VL] ← 0 .fi .RE .SS VFMADDSUB231PD DEST, SRC2, SRC3 (EVEX encoded version, when src3 operand is a register) .PP .RS .nf (KL, VL) = (2, 128), (4, 256), (8, 512) IF (VL = 512) AND (EVEX.b = 1) THEN SET\_RM(EVEX.RC); ELSE SET\_RM(MXCSR.RM); FI; FOR j←0 TO KL\-1 i←j * 64 IF k1[j] OR *no writemask* THEN IF j *is even* THEN DEST[i+63:i]← RoundFPControl(SRC2[i+63:i]*SRC3[i+63:i] \- DEST[i+63:i]) ELSE DEST[i+63:i]← RoundFPControl(SRC2[i+63:i]*SRC3[i+63:i] + DEST[i+63:i]) FI ELSE IF *merging\-masking* ; merging\-masking THEN *DEST[i+63:i] remains unchanged* ELSE ; zeroing\-masking DEST[i+63:i] ← 0 FI FI; ENDFOR DEST[MAXVL\-1:VL] ← 0 .fi .RE .SS VFMADDSUB231PD DEST, SRC2, SRC3 (EVEX encoded version, when src3 operand is a memory source) .PP .RS .nf (KL, VL) = (2, 128), (4, 256), (8, 512) FOR j←0 TO KL\-1 i←j * 64 IF k1[j] OR *no writemask* THEN IF j *is even* THEN IF (EVEX.b = 1) THEN DEST[i+63:i] ← RoundFPControl\_MXCSR(SRC2[i+63:i]*SRC3[63:0] \- DEST[i+63:i]) ELSE DEST[i+63:i] ← RoundFPControl\_MXCSR(SRC2[i+63:i]*SRC3[i+63:i] \- DEST[i+63:i]) FI; ELSE IF (EVEX.b = 1) THEN DEST[i+63:i] ← RoundFPControl\_MXCSR(SRC2[i+63:i]*SRC3[63:0] + DEST[i+63:i]) ELSE DEST[i+63:i] ← RoundFPControl\_MXCSR(SRC2[i+63:i]*SRC3[i+63:i] + DEST[i+63:i]) FI; FI ELSE IF *merging\-masking* ; merging\-masking THEN *DEST[i+63:i] remains unchanged* ELSE ; zeroing\-masking DEST[i+63:i] ← 0 FI FI; ENDFOR DEST[MAXVL\-1:VL] ← 0 .fi .RE .SS Intel C/C++ Compiler Intrinsic Equivalent .PP .RS .nf VFMADDSUBxxxPD \_\_m512d \_mm512\_fmaddsub\_pd(\_\_m512d a, \_\_m512d b, \_\_m512d c); VFMADDSUBxxxPD \_\_m512d \_mm512\_fmaddsub\_round\_pd(\_\_m512d a, \_\_m512d b, \_\_m512d c, int r); VFMADDSUBxxxPD \_\_m512d \_mm512\_mask\_fmaddsub\_pd(\_\_m512d a, \_\_mmask8 k, \_\_m512d b, \_\_m512d c); VFMADDSUBxxxPD \_\_m512d \_mm512\_maskz\_fmaddsub\_pd(\_\_mmask8 k, \_\_m512d a, \_\_m512d b, \_\_m512d c); VFMADDSUBxxxPD \_\_m512d \_mm512\_mask3\_fmaddsub\_pd(\_\_m512d a, \_\_m512d b, \_\_m512d c, \_\_mmask8 k); VFMADDSUBxxxPD \_\_m512d \_mm512\_mask\_fmaddsub\_round\_pd(\_\_m512d a, \_\_mmask8 k, \_\_m512d b, \_\_m512d c, int r); VFMADDSUBxxxPD \_\_m512d \_mm512\_maskz\_fmaddsub\_round\_pd(\_\_mmask8 k, \_\_m512d a, \_\_m512d b, \_\_m512d c, int r); VFMADDSUBxxxPD \_\_m512d \_mm512\_mask3\_fmaddsub\_round\_pd(\_\_m512d a, \_\_m512d b, \_\_m512d c, \_\_mmask8 k, int r); VFMADDSUBxxxPD \_\_m256d \_mm256\_mask\_fmaddsub\_pd(\_\_m256d a, \_\_mmask8 k, \_\_m256d b, \_\_m256d c); VFMADDSUBxxxPD \_\_m256d \_mm256\_maskz\_fmaddsub\_pd(\_\_mmask8 k, \_\_m256d a, \_\_m256d b, \_\_m256d c); VFMADDSUBxxxPD \_\_m256d \_mm256\_mask3\_fmaddsub\_pd(\_\_m256d a, \_\_m256d b, \_\_m256d c, \_\_mmask8 k); VFMADDSUBxxxPD \_\_m128d \_mm\_mask\_fmaddsub\_pd(\_\_m128d a, \_\_mmask8 k, \_\_m128d b, \_\_m128d c); VFMADDSUBxxxPD \_\_m128d \_mm\_maskz\_fmaddsub\_pd(\_\_mmask8 k, \_\_m128d a, \_\_m128d b, \_\_m128d c); VFMADDSUBxxxPD \_\_m128d \_mm\_mask3\_fmaddsub\_pd(\_\_m128d a, \_\_m128d b, \_\_m128d c, \_\_mmask8 k); VFMADDSUBxxxPD \_\_m128d \_mm\_fmaddsub\_pd (\_\_m128d a, \_\_m128d b, \_\_m128d c); VFMADDSUBxxxPD \_\_m256d \_mm256\_fmaddsub\_pd (\_\_m256d a, \_\_m256d b, \_\_m256d c); .fi .RE .SS SIMD Floating\-Point Exceptions .PP Overflow, Underflow, Invalid, Precision, Denormal .SS Other Exceptions .PP VEX\-encoded instructions, see Exceptions Type 2. .PP EVEX\-encoded instructions, see Exceptions Type E2. .SH SEE ALSO .PP x86\-manpages(7) for a list of other x86\-64 man pages. .SH COLOPHON .PP This UNOFFICIAL, mechanically\-separated, non\-verified reference is provided for convenience, but it may be incomplete or broken in various obvious or non\-obvious ways. Refer to Intel® 64 and IA\-32 Architectures Software Developer’s Manual for anything serious. .br This page is generated by scripts; therefore may contain visual or semantical bugs. Please report them (or better, fix them) on https://github.com/ttmo-O/x86-manpages. .br MIT licensed by TTMO 2020 (Turkish Unofficial Chamber of Reverse Engineers - https://ttmo.re).
{ "pile_set_name": "Github" }
/* * AltiVec-enhanced yuv2yuvX * * Copyright (C) 2004 Romain Dolbeau <romain@dolbeau.org> * based on the equivalent C code in swscale.c * * This file is part of FFmpeg. * * FFmpeg is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * FFmpeg is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with FFmpeg; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ #include <inttypes.h> #include "config.h" #include "libswscale/swscale.h" #include "libswscale/swscale_internal.h" #include "libavutil/attributes.h" #include "libavutil/cpu.h" #include "yuv2rgb_altivec.h" #if HAVE_ALTIVEC #define vzero vec_splat_s32(0) #define yuv2planeX_8(d1, d2, l1, src, x, perm, filter) do { \ vector signed short l2 = vec_ld(((x) << 1) + 16, src); \ vector signed short ls = vec_perm(l1, l2, perm); \ vector signed int i1 = vec_mule(filter, ls); \ vector signed int i2 = vec_mulo(filter, ls); \ vector signed int vf1 = vec_mergeh(i1, i2); \ vector signed int vf2 = vec_mergel(i1, i2); \ d1 = vec_add(d1, vf1); \ d2 = vec_add(d2, vf2); \ l1 = l2; \ } while (0) static void yuv2planeX_16_altivec(const int16_t *filter, int filterSize, const int16_t **src, uint8_t *dest, const uint8_t *dither, int offset, int x) { register int i, j; DECLARE_ALIGNED(16, int, val)[16]; vector signed int vo1, vo2, vo3, vo4; vector unsigned short vs1, vs2; vector unsigned char vf; vector unsigned int altivec_vectorShiftInt19 = vec_add(vec_splat_u32(10), vec_splat_u32(9)); for (i = 0; i < 16; i++) val[i] = dither[(x + i + offset) & 7] << 12; vo1 = vec_ld(0, val); vo2 = vec_ld(16, val); vo3 = vec_ld(32, val); vo4 = vec_ld(48, val); for (j = 0; j < filterSize; j++) { vector signed short l1, vLumFilter = vec_ld(j << 1, filter); vector unsigned char perm, perm0 = vec_lvsl(j << 1, filter); vLumFilter = vec_perm(vLumFilter, vLumFilter, perm0); vLumFilter = vec_splat(vLumFilter, 0); // lumFilter[j] is loaded 8 times in vLumFilter perm = vec_lvsl(x << 1, src[j]); l1 = vec_ld(x << 1, src[j]); yuv2planeX_8(vo1, vo2, l1, src[j], x, perm, vLumFilter); yuv2planeX_8(vo3, vo4, l1, src[j], x + 8, perm, vLumFilter); } vo1 = vec_sra(vo1, altivec_vectorShiftInt19); vo2 = vec_sra(vo2, altivec_vectorShiftInt19); vo3 = vec_sra(vo3, altivec_vectorShiftInt19); vo4 = vec_sra(vo4, altivec_vectorShiftInt19); vs1 = vec_packsu(vo1, vo2); vs2 = vec_packsu(vo3, vo4); vf = vec_packsu(vs1, vs2); vec_st(vf, 0, dest); } static inline void yuv2planeX_u(const int16_t *filter, int filterSize, const int16_t **src, uint8_t *dest, int dstW, const uint8_t *dither, int offset, int x) { int i, j; for (i = x; i < dstW; i++) { int t = dither[(i + offset) & 7] << 12; for (j = 0; j < filterSize; j++) t += src[j][i] * filter[j]; dest[i] = av_clip_uint8(t >> 19); } } static void yuv2planeX_altivec(const int16_t *filter, int filterSize, const int16_t **src, uint8_t *dest, int dstW, const uint8_t *dither, int offset) { int dst_u = -(uintptr_t)dest & 15; int i; yuv2planeX_u(filter, filterSize, src, dest, dst_u, dither, offset, 0); for (i = dst_u; i < dstW - 15; i += 16) yuv2planeX_16_altivec(filter, filterSize, src, dest + i, dither, offset, i); yuv2planeX_u(filter, filterSize, src, dest, dstW, dither, offset, i); } static void hScale_altivec_real(SwsContext *c, int16_t *dst, int dstW, const uint8_t *src, const int16_t *filter, const int32_t *filterPos, int filterSize) { register int i; DECLARE_ALIGNED(16, int, tempo)[4]; if (filterSize % 4) { for (i = 0; i < dstW; i++) { register int j; register int srcPos = filterPos[i]; register int val = 0; for (j = 0; j < filterSize; j++) val += ((int)src[srcPos + j]) * filter[filterSize * i + j]; dst[i] = FFMIN(val >> 7, (1 << 15) - 1); } } else switch (filterSize) { case 4: for (i = 0; i < dstW; i++) { register int srcPos = filterPos[i]; vector unsigned char src_v0 = vec_ld(srcPos, src); vector unsigned char src_v1, src_vF; vector signed short src_v, filter_v; vector signed int val_vEven, val_s; if ((((uintptr_t)src + srcPos) % 16) > 12) { src_v1 = vec_ld(srcPos + 16, src); } src_vF = vec_perm(src_v0, src_v1, vec_lvsl(srcPos, src)); src_v = // vec_unpackh sign-extends... (vector signed short)(vec_mergeh((vector unsigned char)vzero, src_vF)); // now put our elements in the even slots src_v = vec_mergeh(src_v, (vector signed short)vzero); filter_v = vec_ld(i << 3, filter); // The 3 above is 2 (filterSize == 4) + 1 (sizeof(short) == 2). // The neat trick: We only care for half the elements, // high or low depending on (i<<3)%16 (it's 0 or 8 here), // and we're going to use vec_mule, so we choose // carefully how to "unpack" the elements into the even slots. if ((i << 3) % 16) filter_v = vec_mergel(filter_v, (vector signed short)vzero); else filter_v = vec_mergeh(filter_v, (vector signed short)vzero); val_vEven = vec_mule(src_v, filter_v); val_s = vec_sums(val_vEven, vzero); vec_st(val_s, 0, tempo); dst[i] = FFMIN(tempo[3] >> 7, (1 << 15) - 1); } break; case 8: for (i = 0; i < dstW; i++) { register int srcPos = filterPos[i]; vector unsigned char src_v0 = vec_ld(srcPos, src); vector unsigned char src_v1, src_vF; vector signed short src_v, filter_v; vector signed int val_v, val_s; if ((((uintptr_t)src + srcPos) % 16) > 8) { src_v1 = vec_ld(srcPos + 16, src); } src_vF = vec_perm(src_v0, src_v1, vec_lvsl(srcPos, src)); src_v = // vec_unpackh sign-extends... (vector signed short)(vec_mergeh((vector unsigned char)vzero, src_vF)); filter_v = vec_ld(i << 4, filter); // the 4 above is 3 (filterSize == 8) + 1 (sizeof(short) == 2) val_v = vec_msums(src_v, filter_v, (vector signed int)vzero); val_s = vec_sums(val_v, vzero); vec_st(val_s, 0, tempo); dst[i] = FFMIN(tempo[3] >> 7, (1 << 15) - 1); } break; case 16: for (i = 0; i < dstW; i++) { register int srcPos = filterPos[i]; vector unsigned char src_v0 = vec_ld(srcPos, src); vector unsigned char src_v1 = vec_ld(srcPos + 16, src); vector unsigned char src_vF = vec_perm(src_v0, src_v1, vec_lvsl(srcPos, src)); vector signed short src_vA = // vec_unpackh sign-extends... (vector signed short)(vec_mergeh((vector unsigned char)vzero, src_vF)); vector signed short src_vB = // vec_unpackh sign-extends... (vector signed short)(vec_mergel((vector unsigned char)vzero, src_vF)); vector signed short filter_v0 = vec_ld(i << 5, filter); vector signed short filter_v1 = vec_ld((i << 5) + 16, filter); // the 5 above are 4 (filterSize == 16) + 1 (sizeof(short) == 2) vector signed int val_acc = vec_msums(src_vA, filter_v0, (vector signed int)vzero); vector signed int val_v = vec_msums(src_vB, filter_v1, val_acc); vector signed int val_s = vec_sums(val_v, vzero); vec_st(val_s, 0, tempo); dst[i] = FFMIN(tempo[3] >> 7, (1 << 15) - 1); } break; default: for (i = 0; i < dstW; i++) { register int j; register int srcPos = filterPos[i]; vector signed int val_s, val_v = (vector signed int)vzero; vector signed short filter_v0R = vec_ld(i * 2 * filterSize, filter); vector unsigned char permF = vec_lvsl((i * 2 * filterSize), filter); vector unsigned char src_v0 = vec_ld(srcPos, src); vector unsigned char permS = vec_lvsl(srcPos, src); for (j = 0; j < filterSize - 15; j += 16) { vector unsigned char src_v1 = vec_ld(srcPos + j + 16, src); vector unsigned char src_vF = vec_perm(src_v0, src_v1, permS); vector signed short src_vA = // vec_unpackh sign-extends... (vector signed short)(vec_mergeh((vector unsigned char)vzero, src_vF)); vector signed short src_vB = // vec_unpackh sign-extends... (vector signed short)(vec_mergel((vector unsigned char)vzero, src_vF)); vector signed short filter_v1R = vec_ld((i * 2 * filterSize) + (j * 2) + 16, filter); vector signed short filter_v2R = vec_ld((i * 2 * filterSize) + (j * 2) + 32, filter); vector signed short filter_v0 = vec_perm(filter_v0R, filter_v1R, permF); vector signed short filter_v1 = vec_perm(filter_v1R, filter_v2R, permF); vector signed int val_acc = vec_msums(src_vA, filter_v0, val_v); val_v = vec_msums(src_vB, filter_v1, val_acc); filter_v0R = filter_v2R; src_v0 = src_v1; } if (j < filterSize - 7) { // loading src_v0 is useless, it's already done above // vector unsigned char src_v0 = vec_ld(srcPos + j, src); vector unsigned char src_v1, src_vF; vector signed short src_v, filter_v1R, filter_v; if ((((uintptr_t)src + srcPos) % 16) > 8) { src_v1 = vec_ld(srcPos + j + 16, src); } src_vF = vec_perm(src_v0, src_v1, permS); src_v = // vec_unpackh sign-extends... (vector signed short)(vec_mergeh((vector unsigned char)vzero, src_vF)); // loading filter_v0R is useless, it's already done above // vector signed short filter_v0R = vec_ld((i * 2 * filterSize) + j, filter); filter_v1R = vec_ld((i * 2 * filterSize) + (j * 2) + 16, filter); filter_v = vec_perm(filter_v0R, filter_v1R, permF); val_v = vec_msums(src_v, filter_v, val_v); } val_s = vec_sums(val_v, vzero); vec_st(val_s, 0, tempo); dst[i] = FFMIN(tempo[3] >> 7, (1 << 15) - 1); } } } #endif /* HAVE_ALTIVEC */ av_cold void ff_sws_init_swscale_ppc(SwsContext *c) { #if HAVE_ALTIVEC enum AVPixelFormat dstFormat = c->dstFormat; if (!(av_get_cpu_flags() & AV_CPU_FLAG_ALTIVEC)) return; if (c->srcBpc == 8 && c->dstBpc <= 14) { c->hyScale = c->hcScale = hScale_altivec_real; } if (!is16BPS(dstFormat) && !is9_OR_10BPS(dstFormat) && dstFormat != AV_PIX_FMT_NV12 && dstFormat != AV_PIX_FMT_NV21 && !c->alpPixBuf) { c->yuv2planeX = yuv2planeX_altivec; } /* The following list of supported dstFormat values should * match what's found in the body of ff_yuv2packedX_altivec() */ if (!(c->flags & (SWS_BITEXACT | SWS_FULL_CHR_H_INT)) && !c->alpPixBuf) { switch (c->dstFormat) { case AV_PIX_FMT_ABGR: c->yuv2packedX = ff_yuv2abgr_X_altivec; break; case AV_PIX_FMT_BGRA: c->yuv2packedX = ff_yuv2bgra_X_altivec; break; case AV_PIX_FMT_ARGB: c->yuv2packedX = ff_yuv2argb_X_altivec; break; case AV_PIX_FMT_RGBA: c->yuv2packedX = ff_yuv2rgba_X_altivec; break; case AV_PIX_FMT_BGR24: c->yuv2packedX = ff_yuv2bgr24_X_altivec; break; case AV_PIX_FMT_RGB24: c->yuv2packedX = ff_yuv2rgb24_X_altivec; break; } } #endif /* HAVE_ALTIVEC */ }
{ "pile_set_name": "Github" }
{ "name" : "494.pdf", "metadata" : { "source" : "CRF", "title" : "Morph-fitting: Fine-Tuning Word Vector Spaces with Simple Language-Specific Rules", "authors" : [ ], "emails" : [ ], "sections" : [ { "heading" : null, "text" : "1 000\n011\n012\n013\n014\n015\n016\n017\n018\n019\n020\n021\n022\n023\n024\n025\n026\n027\n028\n029\n030\n031\n032\n033\n034\n035\n036\n037\n038\n039\n040\n041\n042\n043\n044\n045\n046\n047\n048\n049\n061\n062\n063\n064\n065\n066\n067\n068\n069\n070\n071\n072\n073\n074\n075\n076\n077\n078\n079\n080\n081\n082\n083\n084\n085\n086\n087\n088\n089\n090\n091\n092\n093\n094\n095\n096\n097\n098\n099" }, { "heading" : "1 Introduction", "text" : "Word representation learning has become a research area of central importance in natural language processing (NLP), with its usefulness demonstrated across many application areas such as parsing (Chen and Manning, 2014), machine translation (Zou et al., 2013), and many others (Turian et al., 2010; Collobert et al., 2011). Most promi-\nnent word representation techniques are grounded in the distributional hypothesis, relying on word co-occurrence information in large textual corpora (Curran, 2004; Turney and Pantel, 2010; Mikolov et al., 2013; Mnih and Kavukcuoglu, 2013; Levy and Goldberg, 2014; Schwartz et al., 2015, i.a.).\nMorphologically rich languages, in which “substantial grammatical information. . . is expressed at word level” (Tsarfaty et al., 2010), pose specific challenges for NLP. This is not always considered when techniques are evaluated on languages such as English or Chinese, which do not have rich morphology. In the case of distributional vector space models, morphological complexity brings two challenges to the fore:\n1. Estimating Rare Words: A single lemma can have many different surface realisations. Naively treating each realisation as a separate word leads to sparsity problems and a failure to exploit their shared semantics. On the other hand, lemmatising the entire corpus can obfuscate the differences that exist between different word forms even though they share some aspects of meaning.\n2. Embedded Semantics: Morphology can encode semantic relations such as antonymy (e.g. literate and illiterate, expensive and inexpensive) or synonymy (north, northern, northerly).\nIn this work, we tackle the two challenges jointly by introducing a resource-light vector space finetuning procedure termed morph-fitting. The proposed method does not require curated knowledge bases or gold lexicons. Instead, it makes use of the observation that morphology implicitly encodes semantic signals pertaining to synonymy (e.g., German word inflections katalanisch, katalanischem, katalanischer denote the same semantic concept in different grammatical roles), and antonymy (e.g., mature vs. immature), capitalising on the proliferation of word forms in morphologically\n2\n101\n102\n103\n104\n105\n106\n107\n108\n109\n110\n111\n112\n113\n114\n115\n116\n117\n118\n119\n120\n121\n122\n123\n124\n125\n126\n127\n128\n129\n130\n131\n132\n133\n134\n135\n136\n137\n138\n139\n140\n141\n142\n143\n144\n145\n146\n147\n148\n149\n150\n151\n152\n153\n154\n155\n156\n157\n165\n166\n167\n168\n169\n170\n171\n172\n173\n174\n175\n176\n177\n178\n179\n180\n181\n182\n183\n184\n185\n186\n187\n188\n189\n190\n191\n192\n193\n194\n195\n196\n197\n198\n199\nen_expensive de_teure it_costoso en_slow de_langsam it_lento en_book de_buch it_libro costly teuren dispendioso fast allmählich lentissimo books sachbuch romanzo\ncostlier kostspielige remunerativo slower rasch lenta memoir buches racconto cheaper aufwändige redditizio slower gemächlich inesorabile novel romandebüt volumetto prohibitively kostenintensive rischioso slowed schnell rapidissimo storybooks büchlein saggio pricey aufwendige costosa slowing explosionsartig graduale blurb pamphlet ecclesiaste\nexpensiveness teures costosa slow langsamer lenti booked bücher libri costly teuren costose slowing langsames lente rebook büch libra\ncostlier teurem costosi slowed langsame lenta booking büche librare ruinously teurer dispendioso slowness langsamem veloce rebooked büches libre unaffordable teurerer dispendiose slows langsamen rapido books büchen librano\nTable 1: The nearest neighbours of three example words (expensive, slow and book) in English, German and Italian before (top) and after (bottom) morph-fitting.\nrich languages. Formalised as an instance of the post-processing semantic specialisation paradigm (Faruqui et al., 2015; Mrkšić et al., 2016), morphfitting is steered by a set of linguistic constraints derived from simple language-specific rules which describe (a subset of) morphological processes in a language. The constraints emphasise similarity on one side (e.g., by extracting morphological synonyms), and antonymy on the other (by extracting morphological antonyms), see Fig. 1 and Tab. 2.\nThe key idea of the fine-tuning process is to pull synonymous examples described by the constraints closer together in a transformed vector space, while at the same time pushing antonymous examples away from each other. The explicit post-hoc injection of morphological constraints enables: a) estimating more accurate vectors for low-frequency words if they are described by the constraints containing their relation with high-frequency words,1 thus tackling the data sparsity problem; and b) specialising the distributional space to distinguish between similarity and association, thus supporting language understanding applications such as dialogue state tracking (DST).\nAs a post-processor, morph-fitting allows the integration of morphological rules with any distributional vector space in any language: it treats an input distributional word vector space as a black box and fine-tunes it so that the transformed space reflects the knowledge coded in the input morphological constraints (e.g., Italian words rispettoso and irrispetosa should be far apart in the transformed vector space, see Fig. 1). Tab. 1 illustrates the effects of morph-fitting by qualitative examples in three languages: the vast majority of nearest neighbours are “morphological” synonyms.\nWe demonstrate the efficacy of morph-fitting in four languages (English, German, Italian, Rus-\n1For instance, the vector for the word katalanischem which occurs only 9 times in the German Wikipedia will be pulled closer to the more reliable vectors for katalanisch and katalanischer, with frequencies of 2097 and 1383 respectively.\nsian), yielding large and consistent improvements on benchmarking word similarity evaluation sets such as SimLex-999 (Hill et al., 2015), its multilingual extension (Leviant and Reichart, 2015), and SimVerb-3500 (Gerz et al., 2016). The improvements are reported for all four languages, and with a variety of input distributional spaces, verifying the robustness of the approach.\nWe then show that incorporating morph-fitted vectors into a state-of-the-art neural-network DST model results in improved tracking performance, especially for morphologically rich languages. We report an improvement of 4% on Italian, and 6% on German when using morph-fitted vectors instead of the distributional ones, setting a new state-of-theart DST performance for the two datasets.2" }, { "heading" : "2 Morph-fitting: Methodology", "text" : "Preliminaries In this work, we focus on four languages with varying levels of morphological complexity: English (EN), German (DE), Italian (IT), and Russian (RU). These correspond to languages in the Multilingual SimLex-999 dataset. Vocabularies Wen, Wde, Wit, Wru are compiled by retaining all word forms from the four Wikipedias with\n2There are no readily available DST datasets for Russian.\n3\n201\n202\n203\n204\n205\n206\n207\n208\n209\n210\n211\n212\n213\n214\n215\n216\n217\n218\n219\n220\n221\n222\n223\n224\n225\n226\n227\n228\n229\n230\n231\n232\n233\n234\n235\n236\n237\n238\n239\n240\n241\n242\n243\n244\n245\n246\n247\n248\n249\n250\n251\n252\n253\n254\n255\n256\n257\n263\n264\n265\n266\n267\n268\n269\n270\n271\n272\n273\n274\n275\n276\n277\n278\n279\n280\n281\n282\n283\n284\n285\n286\n287\n288\n289\n290\n291\n292\n293\n294\n295\n296\n297\n298\n299\nword frequency over 10, see Tab. 3. We then query these (large) vocabularies using a set of simple language-specific if-then-else rules to extract sets of linguistic constraints, see Tab. 2.3 These constraints (Sect. 2.2) are used as input for the vector space post-processing ATTRACT-REPEL algorithm (outlined in Sect. 2.1)." }, { "heading" : "2.1 The ATTRACT-REPEL Model", "text" : "The ATTRACT-REPEL model is an extension of PARAGRAM, proposed by Wieting et al. (2015). It provides a generic framework for incorporating similarity (e.g. successful and accomplished) and antonymy constraints (e.g. nimble and clumsy) into pre-trained word vectors. Given the initial vector space and collections of ATTRACT and REPEL constraints A and R, the model gradually modifies the space to bring the designated word vectors closer together or further apart. The method’s cost function consists of three terms. The first term pulls the ATTRACT examples (xl, xr) ∈ A closer together. If BA denotes the current mini-batch of ATTRACT examples, this term can be expressed as:\nA(BA) = ∑\n(xl,xr)∈BA\n(ReLU (δatt + xltl − xlxr)\n+ ReLU (δatt + xrtr − xlxr))\nwhere δatt is the similarity margin which determines how much closer synonymous vectors should be to each other than to each of their respective negative examples. ReLU(x) = max(0, x) is the standard rectified linear unit (Nair and Hinton, 2010). The ‘negative’ example ti for each word xi in any ATTRACT pair is the word vector closest to xi among the examples in the current minibatch (distinct from its target synonym and xi itself). This means that this term forces synonymous words from the in-batch ATTRACT constraints to be closer to one another than to any other word in the current mini-batch.\nThe second term pushes antonyms away from each other. If (xl, xr) ∈ BR is the current minibatch of REPEL constraints, this term is:\nR(BR) = ∑\n(xl,xr)∈BR\n(ReLU (δrpl + xlxr − xltr)\n+ ReLU (δrpl + xlxr − xrtr)) 3A native speaker is able to easily come up with these sets of morphological rules (or at least with a reasonable subset of rules) without any linguistic training. What is more, the rules for DE, IT, and RU were created by non-native, non-fluent speakers with a limited knowledge of the three languages, exemplifying the simplicity and portability of the approach.\nEnglish German Italian\n(discuss, discussed) (schottisch, schottischem) (golfo, golfi) (laugh, laughing) (damalige, damaligen) (minato, minata) (pacifist, pacifists) (kombiniere, kombinierte) (mettere, metto) (evacuate, evacuated) (schweigt, schweigst) (crescono, cresci) (evaluate, evaluates) (hacken, gehackt) (crediti, credite)\n(dressed, undressed) (stabil, unstabil) (abitata, inabitato) (similar, dissimilar) (geformtes, ungeformt) (realtà, irrealtà) (formality, informality) (relevant, irrelevant) (attuato, inattuato)\nIn this case, each word’s ‘negative’ example is the (in-batch) word vector furthest away from it (and distinct from the word’s target antonym). The intuition is that we want antonymous words from the input REPEL constraints to be further away from each other than from any other word in the current mini-batch; δrpl is now the repel margin.\nThe final term of the cost function serves to retain the abundance of semantic information encoded in the starting distributional space. If xiniti is the initial distributional vector and V (B) is the set of all vectors present in the given mini-batch, this term (per mini-batch) is expressed as:\nR(BA,BR) = ∑\nxi∈V (BA∪BR)\nλreg ∥∥∥xiniti − xi∥∥∥ 2\nwhere λreg is the L2 regularisation constant.4 This term effectively pulls word vectors towards their initial (distributional) values, ensuring that relations encoded in initial vectors persist as long as they do not contradict the newly injected ones." }, { "heading" : "2.2 Language-Specific Rules and Constraints", "text" : "Semantic Specialisation with Constraints The fine-tuning ATTRACT-REPEL procedure is entirely driven by the input ATTRACT and REPEL sets of constraints. These can be extracted from a variety of semantic databases such as WordNet (Fellbaum, 1998), the Paraphrase Database (Ganitkevitch et al., 2013; Pavlick et al., 2015), or BabelNet (Navigli and Ponzetto, 2012; Ehrmann et al., 2014) as done in prior work (Faruqui et al., 2015; Wieting et al.,\n4We use hyperparameter values δatt = 0.6, δrpl = 0.0, λreg = 10\n−9 from prior work without fine-tuning. We train all models for 10 epochs with AdaGrad (Duchi et al., 2011).\n4\n301\n302\n303\n304\n305\n306\n307\n308\n309\n310\n311\n312\n313\n314\n315\n316\n317\n318\n319\n320\n321\n322\n323\n324\n325\n326\n327\n328\n329\n330\n331\n332\n333\n334\n335\n336\n337\n338\n339\n340\n341\n342\n343\n344\n345\n346\n347\n348\n349\n350\n351\n352\n353\n354\n355\n356\n357\n358\n359\n360\n361\n362\n363\n364\n365\n366\n367\n368\n369\n370\n371\n372\n373\n374\n375\n376\n377\n378\n379\n380\n381\n382\n383\n384\n385\n386\n387\n388\n389\n390\n391\n392\n393\n394\n395\n396\n397\n398\n399\n2015; Mrkšić et al., 2016, i.a.). In this work, we investigate another option: extracting constraints without curated knowledge bases in a spectrum of languages by exploiting inherent language-specific properties related to linguistic morphology. This relaxation ensures a wider portability of ATTRACTREPEL to languages and domains without readily available or adequate resources.\nExtracting ATTRACT Pairs For the ATTRACT constraints, we focus on inflectional rather than on derivational morphology rules as the former preserve the full meaning of a word, modifying it only to reflect grammatical roles (e.g., verb tense, case markers; (read, reads)).5 This choice is guided by our intent to fine-tune the original vector space to improve the embedded semantic relations.\nWe define two rules for English, widely recognised as morphologically simple (Avramidis and Koehn, 2008; Cotterell et al., 2016). These are: (R1) if w1, w2 ∈Wen, where w2 = w1 + ing/ed/s, then add (w1, w2) and (w2, w1) to the set of ATTRACT constraints A. This rule yields pairs such as (look, looks), (look, looking), (look, looked).\nIf w[: −1] is a function which strips the last character from word w, the second rule is: (R2) if w1 ends with the letter e and w1 ∈Wen and w2 ∈ Wen, where w2 = w1[: −1] + ing/ed/s, then add (w1, w2) and (w2, w1) toA. This creates pairs such as (create, creates), (create, creating) and (create, created). Naturally, introducing more sophisticated rules is possible in order to cover for other special cases and morphological irregularities (e.g., sweep / swept), but in all our EN experiments, A is based on the two simple EN rules R1 and R2.\nThe other three languages, with more complicated morphology, yield a larger number of rules. In Italian, we rely on the sets of rules spanning: (1) regular formation of plural (libro / libri); (2) regular verb conjugation (aspettare / aspettiamo); (3) regular formation of past participle (aspettare / aspettato); and (4) rules regarding grammatical gender (bianco / bianca). Besides these, another set of rules is used for German and Russian: (5) regular declension (e.g., asiatisch / asiatischem).\n5The core difference between inflectional and derivational morphology may be summarised in a few lines as follows: the former refers to a set of processes through which the word form expresses meaningful syntactic information, e.g., verb tense, without any change to the semantics of the word. On the other hand, the latter refers to the formation of new words with semantic shifts in meaning (Schone and Jurafsky, 2001; Haspelmath and Sims, 2013; Lazaridou et al., 2013; Zeller et al., 2013; Cotterell and Schütze, 2017).\nExtracting REPEL Pairs As another source of implicit semantic signals, W also contains words which represent derivational antonyms: e.g., two words that denote concepts with opposite meanings, generated through a derivational process. We use a standard set of EN “antonymy” prefixes: APen = {dis, il, un, in, im, ir, mis, non, anti} (Fromkin et al., 2013). If w1, w2 ∈ Wen, where w2 is generated by adding a prefix from APen to w1, then (w1, w2) and (w2, w1) are added to the set of REPEL constraints R. This rule generates pairs such as (advantage, disadvantage) and (regular, irregular). An additional rule replaces the suffix -ful with -less, extracting antonyms such as (careful, careless).\nFollowing the same principle, we use APde = {un, nicht, anti, ir, in, miss}, APit = {in, ir, im, anti}, and APru = {не, анти}. For instance, this generates an IT pair (rispettoso, irrispettoso) (see Fig. 1). For DE, we use another rule targeting suffix replacement: -voll is replaced by -los.\nWe further expand the set of REPEL constraints by transitively combining antonymy pairs from the previous step with inflectional ATTRACT pairs. This step yields additional constraints such as (rispettosa, irrispettosi) (see Fig. 1). The final A andR constraint counts are given in Tab. 3. The full sets of rules are available as supplemental material." }, { "heading" : "3 Experimental Setup", "text" : "Training Data and Setup For each of the four languages we train the skip-gram with negative sampling (SGNS) model (Mikolov et al., 2013) on the latest Wikipedia dump of each language. We induce 300-dimensional word vectors, with the frequency cut-off set to 10. The vocabulary sizes |W | for each language are provided in Tab. 3.6 We label these collections of vectors SGNS-LARGE.\nOther Starting Distributional Vectors We also analyse the impact of morph-fitting on other collections of well-known EN word vectors. These vectors have varying vocabulary coverage and are trained with different architectures. We test standard distributional models: Common-Crawl GloVe (Pennington et al., 2014), SGNS vectors (Mikolov et al., 2013) with various contexts (BOW = bag-ofwords; DEPS = dependency contexts), and training data (PW = Polyglot Wikipedia from Al-Rfou\n6Other SGNS parameters were set to standard values (Baroni et al., 2014; Vulić and Korhonen, 2016b): 15 epochs, 15 negative samples, global learning rate: .025, subsampling rate: 1e− 4. Similar trends in results persist with d = 100, 500.\n5\n401\n402\n403\n404\n405\n406\n407\n408\n409\n410\n411\n412\n413\n414\n415\n416\n417\n418\n419\n420\n421\n422\n423\n424\n425\n426\n427\n428\n429\n430\n431\n432\n433\n434\n435\n436\n437\n438\n439\n440\n441\n442\n443\n444\n445\n446\n447\n448\n449\n450\n451\n452\n453\n454\n455\n456\n457\n458\n459\n460\n461\n462\n463\n464\n465\n466\n467\n468\n469\n470\n471\n472\n473\n474\n475\n476\n477\n478\n479\n480\n481\n482\n483\n484\n485\n486\n487\n488\n489\n490\n491\n492\n493\n494\n495\n496\n497\n498\n499\net al. (2013); 8B = 8 billion token word2vec corpus), following (Levy and Goldberg, 2014) and (Schwartz et al., 2015). We also test the symmetricpattern based vectors of Schwartz et al. (2016) (SymPat-Emb), count-based PMI-weighted vectors reduced by SVD (Baroni et al., 2014) (Count-SVD), a model which replaces the context modelling function from CBOW with bidirectional LSTMs (Melamud et al., 2016) (Context2Vec), and two sets of EN vectors trained by injecting multilingual information: BiSkip (Luong et al., 2015) and MultiCCA (Faruqui and Dyer, 2014). We also experiment with a selection of standard distributional spaces in other languages from prior work (Dinu et al., 2015; Luong et al., 2015; Vulić and Korhonen, 2016a).\nMorph-fixed Vectors A baseline which utilises an equal amount of knowledge as morph-fitting, termed morph-fixing, fixes the vector of each word to the distributional vector of its most frequent inflectional synonym, tying the vectors of lowfrequency words to their more frequent inflections. For each word w1, we construct a set of M + 1 words Ww1 = {w1, w′1, . . . , w′M} consisting of the word w1 itself and all M words which cooccur with w1 in the ATTRACT constraints. We then choose the word w′max from the set Ww1 with the maximum frequency in the training data, and fix all other word vectors in Ww1 to its word vector. The morph-fixed vectors (MFIX) serve as our primary baseline, as they outperformed another straightforward baseline based on stemming across all of our intrinsic and extrinsic experiments.\nMorph-fitting Variants We analyse two variants of morph-fitting: (1) using ATTRACT constraints only (MFIT-A), and (2) using both ATTRACT and REPEL constraints (MFIT-AR).7" }, { "heading" : "4 Intrinsic Evaluation: Word Similarity", "text" : "Evaluation Setup and Datasets The first set of experiments intrinsically evaluates morph-fitted vector spaces on word similarity benchmarks, using Spearman’s rank correlation as the evaluation metric. First, we use the SimLex-999 dataset, as well as SimVerb-3500, a recent EN verb pair similarity dataset providing similarity ratings for 3,500 verb\n7We also tried using another post-processing model (Mrkšić et al., 2016) in lieu of ATTRACT-REPEL. However, this model was computationally intractable with SGNSLARGE vectors. Moreover, it was consistently outperformed by ATTRACT-REPEL on vector spaces with smaller vocabularies.\npairs.8 SimLex-999 was translated to DE, IT, and RU by Leviant and Reichart (2015), and they crowdsourced similarity scores from native speakers. We use this dataset for our multilingual evaluation.9\nMorph-SimLex We also introduce a synthetic dataset based on multilingual SimLex, termed Morph-SimLex. Since the original sets contain only word lemmas, they are unable to evaluate whether a representation model improves vectors for all synonymous word inflections. Therefore, we enrich the sets of pairs using the same set of ATTRACT rules from Sect. 2.2. In short, given a word pair (w1, w2) with a SimLex score sl1,2, we again construct sets Ww1 = {w1, w′1, . . . , w′M} and Ww2 = {w2, w′′1 , . . . , w′′N}, where Ww1 consists of w1 and all words which co-occur with w1 in the A constraints; the same holds for Ww2 . Morph-SimLex pairs are then generated by taking the Cartesian product between Ww1 and Ww2 , and assigning the same score sl1,2 to each such pair. The final dataset is constructed by repeating the procedure for each of the 999 SimLex pairs, yielding 13,213 EN pairs, 17,021 DE pairs, 18,281 IT pairs, and 10,289 RU pairs. We make this dataset available in hope it can aid further research on improving morphological relations in vector spaces.\nMorph-fitting EN Word Vectors As the first experiment, we morph-fit a wide spectrum of EN distributional vectors induced by various architectures (see Sect. 3). The results on SimLex and SimVerb are summarised in Tab. 4. The results with EN SGNS-LARGE vectors are shown in Fig. 2a. Morphfitted vectors bring consistent improvement across all experiments, regardless of the quality of the initial distributional space. This finding confirms that the method is robust: its effectiveness does not depend on the architecture used to construct the initial space. To illustrate the improvements, note that the best score on SimVerb for a model trained on running text is achieved by Context2vec (ρ = 0.388); injecting morphological constraints into this vector space results in a gain of 7.1 ρ points.\nExperiments on Other Languages We next extend our experiments to other languages, testing both morph-fitting variants. The results are sum-\n8Unlike other gold standard resources such as WordSim353 (Finkelstein et al., 2002) or MEN (Bruni et al., 2014), SimLex and SimVerb provided explicit guidelines to discern between semantic similarity and association, so that related but non-similar words (e.g. cup and coffee) have a low rating.\n9Since Leviant and Reichart (2015) re-scored the original EN SimLex, we use their EN SimLex version for consistency.\n6\n501\n502\n503\n504\n505\n506\n507\n508\n509\n510\n511\n512\n513\n514\n515\n516\n517\n518\n519\n520\n521\n522\n523\n524\n525\n526\n527\n528\n529\n530\n531\n532\n533\n534\n535\n536\n537\n538\n539\n540\n541\n542\n543\n544\n545\n546\n547\n548\n549\n550\n551\n552\n553\n554\n555\n556\n557\n558\n559\n560\n561\n562\n563\n564\n565\n566\n567\n568\n569\n570\n571\n572\n573\n574\n575\n576\n577\n578\n579\n580\n581\n582\n583\n584\n585\n586\n587\n588\n589\n590\n591\n592\n593\n594\n595\n596\n597\n598\n599\nEvaluation Vectors SimLex-999 SimVerb-3500 1. SG-BOW2-PW (300) (Mikolov et al., 2013) .339→ .439 .277→ .381 2. GloVe-6B (300) (Pennington et al., 2014) .324→ .438 .286→ .405 3. Count-SVD (500) (Baroni et al., 2014) .267→ .360 .199→ .301 4. SG-DEPS-PW (300) (Levy and Goldberg, 2014) .376→ .434 .313→ .418 5. SG-DEPS-8B (500) (Bansal et al., 2014) .373→ .441 .356→ .473 6. MultiCCA-EN (512) (Faruqui and Dyer, 2014) .314→ .391 .296→ .354 7. BiSkip-EN (256) (Luong et al., 2015) .276→ .356 .260→ .333 8. SG-BOW2-8B (500) (Schwartz et al., 2015) .373→ .440 .348→ .441 9. SymPat-Emb (500) (Schwartz et al., 2016) .381→ .442 .284→ .373 10. Context2Vec (600) (Melamud et al., 2016) .371→ .440 .388→ .459\nTable 4: The impact of morph-fitting (MFIT-AR used) on a representative set of EN vector space models. All results show the Spearman’s ρ correlation before and after morph-fitting. The numbers in parentheses refer to the vector dimensionality.\nVectors Distrib. MFIT-A MFIT-AR EN: GloVe-6B (300) .324 .376 .438 EN: SG-BOW2-PW (300) .339 .385 .439 DE: SG-DEPS-PW (300) (Vulić and Korhonen, 2016a) .267 .318 .325 DE: BiSkip-DE (256) (Luong et al., 2015) .354 .414 .421 IT: SG-DEPS-PW (300) (Vulić and Korhonen, 2016a) .237 .351 .391 IT: CBOW5-Wacky (300) (Dinu et al., 2015) .363 .417 .446\nTable 5: Results on multilingual SimLex-999 (EN, DE, and IT) with two morph-fitting variants.\nmarised in Tab. 5, while Fig. 2a-2d show results for the morph-fitted SGNS-LARGE vectors. These scores confirm the effectiveness and robustness of morph-fitting across languages, suggesting that the idea of fitting to morphological constraints is indeed language-agnostic, given the set of languagespecific rule-based constraints. Fig. 2 also demonstrates that the morph-fitted vector spaces consistently outperform the morph-fixed ones.\nMorph-SimLex performance across all languages shows even stronger relative gains over distributional and morph-fixed vectors. The original SimLex dataset only contains word lemmas. Consequently, it fails to penalise word vector collections with bad estimates of less-frequent word forms. The comparison between MFIT-A and MFIT-AR indicates that both sets of constraints are important\nfor the fine-tuning process: while MFIT-A already yields consistent gains over the initial spaces, a further refinement can be achieved by also incorporating the antonymous REPEL constraints." }, { "heading" : "5 Downstream Task: Dialogue State Tracking (DST)", "text" : "Goal-oriented dialogue systems provide conversational interfaces for tasks such as booking flights or finding restaurants. In slot-based systems, application domains are specified using ontologies that define the search constraints which users can express. An ontology consists of a number of slots and their assorted slot values. In a restaurant search domain, sets of slot-values could include PRICE = [cheap, expensive] or FOOD = [Thai, Indian, ...]. The DST model is the first component of modern dialogue pipelines (Young, 2010). It serves to capture the intents expressed by the user at each dialogue turn and update the belief state. This is the system’s internal estimate of the user’s goals, used by the downstream dialogue manager to choose the system response. The following example shows the true dialogue state in a multi-turn dialogue:\nUser: What’s good in the southern part of town? inform(area=south) System: Vedanta is the top-rated Indian place. User: How about something cheaper? inform(area=south, price=cheap) System: Seven Days is very popular. Great hot pot. User: What’s the address? inform(area=south, price=cheap); request(address) System: Seven Days is at 66 Regent Street.\nThe Dialogue State Tracking Challenge (DSTC) shared task series formalised the evaluation and provided labelled DST datasets (Henderson et al., 2014a,b; Williams et al., 2016). While a plethora of DST models are available based on, e.g., handcrafted rules (Wang et al., 2014) or conditional random fields (Lee and Eskenazi, 2013), the recent DST methodology has seen a shift towards neuralnetwork architectures (Henderson et al., 2014c; Mrkšić et al., 2015; Liu and Perez, 2017, i.a.)\nModel: Neural Belief Tracker To detect intents in user utterances, most existing models rely on either (or both): 1) Spoken Language Understanding models which require large amounts of annotated training data; or 2) hand-crafted, domain-specific lexicons which try to capture lexical and morphological variation. The Neural Belief Tracker (NBT) is a novel DST model which overcomes both issues\n7\n601\n602\n603\n604\n605\n606\n607\n608\n609\n610\n611\n612\n613\n614\n615\n616\n617\n618\n619\n620\n621\n622\n623\n624\n625\n626\n627\n628\n629\n630\n631\n632\n633\n634\n635\n636\n637\n638\n639\n640\n641\n642\n643\n644\n645\n646\n647\n648\n649\n650\n651\n652\n653\n654\n655\n656\n657\n658\n659\n660\n661\n662\n663\n664\n665\n666\n667\n668\n669\n670\n671\n672\n673\n674\n675\n676\n677\n678\n679\n680\n681\n682\n683\n684\n685\n686\n687\n688\n689\n690\n691\n692\n693\n694\n695\n696\n697\n698\n699\n0.1\n0.15\n0.2\n0.25\n0.3\n0.35\n0.4\n0.45\nDistributional MFix MFit-A MFit-AR 0.6\n0.65\n0.7\n0.75\n0.8\nS im\nL ex\n(S p ea rm\nan ’s ρ )\nEN Vector Collections\nD S T P erform ace (Joint)\nSimLex Morph-SimLex DST\n(a) English\n0.1\n0.15\n0.2\n0.25\n0.3\n0.35\n0.4\n0.45\nDistributional MFix MFit-A MFit-AR 0.6\n0.65\n0.7\n0.75\n0.8\nS im\nL ex\n(S p ea rm\nan ’s ρ )\nDE Vector Collections\nD S T P erform ace (Joint) SimLex Morph-SimLex DST\n(b) German\n0.1\n0.15\n0.2\n0.25\n0.3\n0.35\n0.4\n0.45\nDistributional MFix MFit-A MFit-AR 0.6\n0.65\n0.7\n0.75\n0.8\nS im\nL ex\n(S p ea rm\nan ’s ρ )\nIT Vector Collections\nD S T P erform ace (Joint) SimLex Morph-SimLex DST\n(c) Italian\n0.1\n0.15\n0.2\n0.25\n0.3\n0.35\n0.4\n0.45\nDistributional MFix MFit-A MFit-AR S im\nL ex\n(S p ea rm\nan ’s ρ )\nRU Vector Collections\nSimLex Morph-SimLex\n(d) Russian\nFigure 2: An overview of the results (Spearman’s ρ correlation) for four languages on SimLex-999 (blue squares), Morph-SimLex-999 (red triangles), and the downstream DST performance (black diamonds) using SGNS-LARGE vectors (d = 300), see Tab. 3 and Sect. 3. The left y axis measures the intrinsic word similarity performance, while the right y axis provides the scale for the DST performance.\nby reasoning purely over pre-trained word vectors (Mrkšić et al., 2016). The NBT learns to compose these vectors into intermediate utterance and context representations. These are then used to decide which of the ontology-defined intents (goals) have been expressed by the user. The NBT model keeps word vectors fixed during training, so that unseen, yet related words can be mapped to the right intent at test time (e.g. northern to north).\nData: Multilingual WOZ 2.0 Dataset Our DST evaluation is based on the WOZ dataset, released by Wen et al. (2017). In this Wizard-of-Oz setup, two Amazon Mechanical Turk workers assumed the role of the user and the system asking/providing restaurant information. Users typed instead of speaking, removing the need to deal with noisy speech recognition. In DSTC datasets, users would quickly adapt to the system’s inability to deal with complex queries. Conversely, the WOZ setup allowed them to use sophisticated language. The WOZ 2.0 release expanded the dataset to 1,200 dialogues (Mrkšić et al., 2016). In this work, we use translations of this dataset to Italian and German, provided by the authors of the original dataset.\nEvaluation Setup The principal metric we use to\nmeasure DST performance is joint goal accuracy, which represents the proportion of test set dialogue turns where all user goals expressed up to that point of the dialogue were decoded correctly (Henderson et al., 2014a). The NBT models for EN, DE and IT are trained using four variants of the SGNSLARGE vectors: 1) the initial distributional vectors; 2) morph-fixed; 3) and 4) the two variants of morphfitted vectors (see Sect. 3). Results and Discussion The diamond-dashed lines (against the right axes) in Fig. 2 show the DST performance of NBT models making use of the four vector collections. IT and DE benefit from both kinds of morph-fitting: IT performance increases 74.1→ 78.1 (MFIT-A) and DE performance rises even more: 60.6 → 66.3 (MFIT-AR), setting a new state-of-the-art score for both languages. The morph-fixed vectors do not enhance DST performance, probably because fixing word vectors to their highest frequency inflectional form eliminates useful semantic content encoded in the original vectors. On the other hand, morph-fitting makes use of this information, supplementing it with semantic relations between different morphological forms. These conclusions are in line with the SimLex and Morph-SimLex gains, where morph-fitting outper-\n8\n701\n702\n703\n704\n705\n706\n707\n708\n709\n710\n711\n712\n713\n714\n715\n716\n717\n718\n719\n720\n721\n722\n723\n724\n725\n726\n727\n728\n729\n730\n731\n732\n733\n734\n735\n736\n737\n738\n739\n740\n741\n742\n743\n744\n745\n746\n747\n748\n749\n750\n751\n752\n753\n754\n755\n756\n757\n758\n759\n760\n761\n762\n763\n764\n765\n766\n767\n768\n769\n770\n771\n772\n773\n774\n775\n776\n777\n778\n779\n780\n781\n782\n783\n784\n785\n786\n787\n788\n789\n790\n791\n792\n793\n794\n795\n796\n797\n798\n799\nforms distributional and morph-fixed vectors. English performance shows little variation across the four word vector collections investigated here. This corroborates our intuition that, as a morphologically simpler language, English stands to gain less from fine-tuning the morphological variation for downstream applications. This result again points at the discrepancy between intrinsic and extrinsic evaluation: the considerable gains in SimLex performance do not necessarily induce similar gains in downstream performance." }, { "heading" : "6 Related Work", "text" : "Semantic Specialisation A standard approach to incorporating external information into vector spaces is to pull the representations of similar words closer together. Some models integrate such constraints into the training procedure, modifying the prior or the regularisation (Yu and Dredze, 2014; Xu et al., 2014; Bian et al., 2014; Kiela et al., 2015), or using a variant of the SGNS-style objective (Liu et al., 2015; Osborne et al., 2016). Another class of models, popularly termed retrofitting, injects lexical knowledge from available semantic databases (e.g., WordNet, PPDB) into pre-trained word vectors (Faruqui et al., 2015; Jauhar et al., 2015; Wieting et al., 2015; Nguyen et al., 2016; Mrkšić et al., 2016). Morph-fitting falls into the latter category. However, instead of resorting to curated knowledge bases, and experimenting solely with English, we show that the morphological richness of any language can be exploited as a source of inexpensive supervision for fine-tuning vector spaces, at the same time specialising them to better reflect true semantic similarity.\nWord Vectors and Morphology The use of morphological resources to improve the representations of morphemes and words is an active area of research. The majority of proposed architectures encode morphological information, provided either as gold standard morphological resources (SylakGlassman et al., 2015) such as CELEX (Baayen et al., 1995) or as an external analyser such as Morfessor (Creutz and Lagus, 2007), along with distributional information jointly at training time in the language modelling (LM) objective (Luong et al., 2013; Botha and Blunsom, 2014; Qiu et al., 2014; Cotterell and Schütze, 2015; Bhatia et al., 2016, i.a.). The key idea is to learn a morphological composition function (Lazaridou et al., 2013; Cotterell and Schütze, 2017) which synthesises the\nrepresentation of a word given the representations of its constituent morphemes. Contrary to our work, these models typically coalesce all lexical relations.\nAnother class of models, operating at the character level, shares a similar methodology: such models compose token-level representations from subcomponent embeddings (subwords, morphemes, or characters) (dos Santos and Zadrozny, 2014; Ling et al., 2015; Cao and Rei, 2016; Kim et al., 2016; Wieting et al., 2016; Verwimp et al., 2017, i.a.).\nIn contrast to prior work, our model decouples the use of morphological information, now provided in the form of inflectional and derivational rules transformed into linguistic constraints, from the actual training. This pipelined approach results in a simpler, more portable model. In spirit, our work is similar to Cotterell et al. (2016), who formulate the idea of post-training specialisation in a generative Bayesian framework. Their work uses gold morphological lexicons; we show that competitive performance can be achieved using a nonexhaustive set of simple rules. Our framework facilitates the inclusion of antonyms at no extra cost and naturally extends to constraints from other sources (e.g., WordNet) in future work. Another practical difference is that we focus on similarity and evaluate morph-fitting in a well-defined downstream task where the artefacts of the distributional hypothesis are known to prompt statistical system failures." }, { "heading" : "7 Conclusion and Future Work", "text" : "We have presented a novel morph-fitting method which injects morphological knowledge in the form of linguistic constraints into word vector spaces. The method makes use of implicit semantic signals encoded in inflectional and derivational rules which describe the morphological processes in a language. The results in intrinsic word similarity tasks show that morph-fitting improves vector spaces induced by distributional models across four languages. Finally, we have shown that the use of morph-fitted vectors boosts the performance of downstream language understanding models which rely on word representations as features, especially for morphologically rich languages such as German.\nFuture work will focus on other potential sources of morphological knowledge (Soricut and Och, 2015), porting the framework to other morphologically rich languages and downstream tasks, and on further refinements of the post-processing algorithm and constraints selection.\n9\n801\n802\n803\n804\n805\n806\n807\n808\n809\n810\n811\n812\n813\n814\n815\n816\n817\n818\n819\n820\n821\n822\n823\n824\n825\n826\n827\n828\n829\n830\n831\n832\n833\n834\n835\n836\n837\n838\n839\n840\n841\n842\n843\n844\n845\n846\n847\n848\n849\n850\n851\n852\n853\n854\n855\n856\n857\n858\n859\n860\n861\n862\n863\n864\n865\n866\n867\n868\n869\n870\n871\n872\n873\n874\n875\n876\n877\n878\n879\n880\n881\n882\n883\n884\n885\n886\n887\n888\n889\n890\n891\n892\n893\n894\n895\n896\n897\n898\n899" } ], "references" : [ { "title" : "Polyglot: Distributed word representations for multilingual NLP", "author" : [ "Rami Al-Rfou", "Bryan Perozzi", "Steven Skiena." ], "venue" : "Proceedings of CoNLL. pages 183–192. http://www.aclweb.org/anthology/W133520.", "citeRegEx" : "Al.Rfou et al\\.,? 2013", "shortCiteRegEx" : "Al.Rfou et al\\.", "year" : 2013 }, { "title" : "Enriching morphologically poor languages for statistical machine translation", "author" : [ "Eleftherios Avramidis", "Philipp Koehn." ], "venue" : "Proceedings of ACL. pages 763–770. http://www.aclweb.org/anthology/P/P08/P08-1087.", "citeRegEx" : "Avramidis and Koehn.,? 2008", "shortCiteRegEx" : "Avramidis and Koehn.", "year" : 2008 }, { "title" : "The CELEX lexical data base on CD-ROM", "author" : [ "Harald R. Baayen", "Richard Piepenbrock", "Hedderik van Rijn" ], "venue" : null, "citeRegEx" : "Baayen et al\\.,? \\Q1995\\E", "shortCiteRegEx" : "Baayen et al\\.", "year" : 1995 }, { "title" : "Tailoring continuous word representations for dependency parsing", "author" : [ "Mohit Bansal", "Kevin Gimpel", "Karen Livescu." ], "venue" : "Proceedings of ACL. pages 809– 815. http://www.aclweb.org/anthology/P14-2131.", "citeRegEx" : "Bansal et al\\.,? 2014", "shortCiteRegEx" : "Bansal et al\\.", "year" : 2014 }, { "title" : "Don’t count, predict! A systematic comparison of contextcounting vs", "author" : [ "Marco Baroni", "Georgiana Dinu", "Germán Kruszewski." ], "venue" : "context-predicting semantic vectors. In Proceedings of ACL. pages 238–247.", "citeRegEx" : "Baroni et al\\.,? 2014", "shortCiteRegEx" : "Baroni et al\\.", "year" : 2014 }, { "title" : "Morphological priors for probabilistic neural word embeddings", "author" : [ "Parminder Bhatia", "Robert Guthrie", "Jacob Eisenstein." ], "venue" : "Proceedings of EMNLP. pages 490–500. https://aclweb.org/anthology/D16-1047.", "citeRegEx" : "Bhatia et al\\.,? 2016", "shortCiteRegEx" : "Bhatia et al\\.", "year" : 2016 }, { "title" : "Knowledge-powered deep learning for word embedding", "author" : [ "Jiang Bian", "Bin Gao", "Tie-Yan Liu." ], "venue" : "Proceedings of ECML-PKDD. pages 132– 148. https://doi.org/10.1007/978-3-662-44848-9_9.", "citeRegEx" : "Bian et al\\.,? 2014", "shortCiteRegEx" : "Bian et al\\.", "year" : 2014 }, { "title" : "Compositional morphology for word representations and language modelling", "author" : [ "Jan A. Botha", "Phil Blunsom." ], "venue" : "Proceedings of ICML. pages 1899–1907. http://jmlr.org/proceedings/papers/v32/botha14.html.", "citeRegEx" : "Botha and Blunsom.,? 2014", "shortCiteRegEx" : "Botha and Blunsom.", "year" : 2014 }, { "title" : "Multimodal distributional semantics", "author" : [ "Elia Bruni", "Nam-Khanh Tran", "Marco Baroni." ], "venue" : "Journal of Artificial Intelligence Research 49:1–47. https://doi.org/10.1613/jair.4135.", "citeRegEx" : "Bruni et al\\.,? 2014", "shortCiteRegEx" : "Bruni et al\\.", "year" : 2014 }, { "title" : "A joint model for word embedding and word morphology", "author" : [ "Kris Cao", "Marek Rei." ], "venue" : "Proceedings of the 1st Workshop on Representation Learning for NLP. pages 18–26. http://aclweb.org/anthology/W/W16/W16-1603.", "citeRegEx" : "Cao and Rei.,? 2016", "shortCiteRegEx" : "Cao and Rei.", "year" : 2016 }, { "title" : "A fast and accurate dependency parser using neural networks", "author" : [ "Danqi Chen", "Christopher D. Manning." ], "venue" : "Proceedings of EMNLP. pages 740–750. http://www.aclweb.org/anthology/D14-1082.", "citeRegEx" : "Chen and Manning.,? 2014", "shortCiteRegEx" : "Chen and Manning.", "year" : 2014 }, { "title" : "Natural language processing (almost) from scratch", "author" : [ "Pavel P. Kuksa." ], "venue" : "Journal of Machine Learning Research 12:2493–2537. http://dl.acm.org/citation.cfm?id=1953048.2078186.", "citeRegEx" : "Kuksa.,? 2011", "shortCiteRegEx" : "Kuksa.", "year" : 2011 }, { "title" : "Morphological word-embeddings", "author" : [ "Ryan Cotterell", "Hinrich Schütze." ], "venue" : "Proceedings of NAACL-HLT . pages 1287–1292. http://www.aclweb.org/anthology/N15-1140.", "citeRegEx" : "Cotterell and Schütze.,? 2015", "shortCiteRegEx" : "Cotterell and Schütze.", "year" : 2015 }, { "title" : "Joint semantic synthesis and morphological analysis of the derived word", "author" : [ "Ryan Cotterell", "Hinrich Schütze." ], "venue" : "Transactions of the ACL (to appear) https://arxiv.org/abs/1701.00946.", "citeRegEx" : "Cotterell and Schütze.,? 2017", "shortCiteRegEx" : "Cotterell and Schütze.", "year" : 2017 }, { "title" : "Morphological smoothing and extrapolation of word embeddings", "author" : [ "Ryan Cotterell", "Hinrich Schütze", "Jason Eisner." ], "venue" : "Proceedings of ACL. pages 1651–1660. http://www.aclweb.org/anthology/P161156.", "citeRegEx" : "Cotterell et al\\.,? 2016", "shortCiteRegEx" : "Cotterell et al\\.", "year" : 2016 }, { "title" : "Unsupervised models for morpheme segmentation and morphology learning", "author" : [ "Mathias Creutz", "Krista Lagus." ], "venue" : "TSLP 4(1):3:1–3:34. http://doi.acm.org/10.1145/1217098.1217101.", "citeRegEx" : "Creutz and Lagus.,? 2007", "shortCiteRegEx" : "Creutz and Lagus.", "year" : 2007 }, { "title" : "From Distributional to Semantic Similarity", "author" : [ "James Curran." ], "venue" : "Ph.D. thesis, School of Informatics, University of Edinburgh. http://hdl.handle.net/1842/563.", "citeRegEx" : "Curran.,? 2004", "shortCiteRegEx" : "Curran.", "year" : 2004 }, { "title" : "Improving zero-shot learning by mitigating the hubness problem", "author" : [ "Georgiana Dinu", "Angeliki Lazaridou", "Marco Baroni." ], "venue" : "Proceedings of ICLR (Workshop Papers). http://arxiv.org/abs/1412.6568.", "citeRegEx" : "Dinu et al\\.,? 2015", "shortCiteRegEx" : "Dinu et al\\.", "year" : 2015 }, { "title" : "Learning character-level representations for part-of-speech tagging", "author" : [ "Cícero Nogueira dos Santos", "Bianca Zadrozny." ], "venue" : "Proceedings of ICML. pages 1818–1826. http://jmlr.org/proceedings/papers/v32/santos14.html.", "citeRegEx" : "Santos and Zadrozny.,? 2014", "shortCiteRegEx" : "Santos and Zadrozny.", "year" : 2014 }, { "title" : "Adaptive subgradient methods for online learning and stochastic optimization", "author" : [ "John C. Duchi", "Elad Hazan", "Yoram Singer." ], "venue" : "Journal of Machine Learning Research 12:2121–2159. http://dl.acm.org/citation.cfm?id=2021068.", "citeRegEx" : "Duchi et al\\.,? 2011", "shortCiteRegEx" : "Duchi et al\\.", "year" : 2011 }, { "title" : "Representing multilingual data as linked data: The case of BabelNet 2.0", "author" : [ "Maud Ehrmann", "Francesco Cecconi", "Daniele Vannella", "John Philip Mccrae", "Philipp Cimiano", "Roberto Navigli" ], "venue" : "In Proceedings of LREC", "citeRegEx" : "Ehrmann et al\\.,? \\Q2014\\E", "shortCiteRegEx" : "Ehrmann et al\\.", "year" : 2014 }, { "title" : "Retrofitting word vectors to semantic lexicons", "author" : [ "Manaal Faruqui", "Jesse Dodge", "Sujay Kumar Jauhar", "Chris Dyer", "Eduard Hovy", "Noah A. Smith." ], "venue" : "Proceedings of NAACL-HLT . pages 1606– 1615. http://www.aclweb.org/anthology/N15-1184.", "citeRegEx" : "Faruqui et al\\.,? 2015", "shortCiteRegEx" : "Faruqui et al\\.", "year" : 2015 }, { "title" : "Improving vector space word representations using multilingual correlation", "author" : [ "Manaal Faruqui", "Chris Dyer." ], "venue" : "Proceedings of EACL. pages 462– 471. http://www.aclweb.org/anthology/E14-1049.", "citeRegEx" : "Faruqui and Dyer.,? 2014", "shortCiteRegEx" : "Faruqui and Dyer.", "year" : 2014 }, { "title" : "Placing search in context: The concept revisited", "author" : [ "Lev Finkelstein", "Evgeniy Gabrilovich", "Yossi Matias", "Ehud Rivlin", "Zach Solan", "Gadi Wolfman", "Eytan Ruppin." ], "venue" : "ACM Transactions on Information Systems 20(1):116–131.", "citeRegEx" : "Finkelstein et al\\.,? 2002", "shortCiteRegEx" : "Finkelstein et al\\.", "year" : 2002 }, { "title" : "An Introduction to Language, 10th Edition", "author" : [ "Victoria Fromkin", "Robert Rodman", "Nina Hyams" ], "venue" : null, "citeRegEx" : "Fromkin et al\\.,? \\Q2013\\E", "shortCiteRegEx" : "Fromkin et al\\.", "year" : 2013 }, { "title" : "PPDB: The Paraphrase Database", "author" : [ "Juri Ganitkevitch", "Benjamin Van Durme", "Chris Callison-Burch." ], "venue" : "Proceedings of NAACL-HLT . pages 758–764. http://www.aclweb.org/anthology/N131092.", "citeRegEx" : "Ganitkevitch et al\\.,? 2013", "shortCiteRegEx" : "Ganitkevitch et al\\.", "year" : 2013 }, { "title" : "SimVerb3500: A large-scale evaluation set of verb similarity", "author" : [ "Daniela Gerz", "Ivan Vulić", "Felix Hill", "Roi Reichart", "Anna Korhonen." ], "venue" : "Proceedings of EMNLP. pages 2173–2182. https://aclweb.org/anthology/D16-1235.", "citeRegEx" : "Gerz et al\\.,? 2016", "shortCiteRegEx" : "Gerz et al\\.", "year" : 2016 }, { "title" : "Understanding morphology", "author" : [ "Martin Haspelmath", "Andrea Sims" ], "venue" : null, "citeRegEx" : "Haspelmath and Sims.,? \\Q2013\\E", "shortCiteRegEx" : "Haspelmath and Sims.", "year" : 2013 }, { "title" : "The Second Dialog State Tracking Challenge", "author" : [ "Matthew Henderson", "Blaise Thomson", "Jason D. Wiliams." ], "venue" : "Proceedings of SIGDIAL. pages 263– 272. http://aclweb.org/anthology/W/W14/W144337.pdf.", "citeRegEx" : "Henderson et al\\.,? 2014a", "shortCiteRegEx" : "Henderson et al\\.", "year" : 2014 }, { "title" : "The Third Dialog State Tracking Challenge", "author" : [ "Matthew Henderson", "Blaise Thomson", "Jason D. Wiliams." ], "venue" : "Proceedings of IEEE SLT . pages 324– 329. https://doi.org/10.1109/SLT.2014.7078595.", "citeRegEx" : "Henderson et al\\.,? 2014b", "shortCiteRegEx" : "Henderson et al\\.", "year" : 2014 }, { "title" : "Word-based dialog state tracking with recurrent neural networks", "author" : [ "Matthew Henderson", "Blaise Thomson", "Steve Young." ], "venue" : "Proceedings of SIGDIAL. pages 292–299. http://aclweb.org/anthology/W/W14/W14-", "citeRegEx" : "Henderson et al\\.,? 2014c", "shortCiteRegEx" : "Henderson et al\\.", "year" : 2014 }, { "title" : "SimLex-999: Evaluating semantic models with (genuine) similarity estimation", "author" : [ "Felix Hill", "Roi Reichart", "Anna Korhonen." ], "venue" : "Computational Linguistics 41(4):665–695. https://doi.org/10.1162/COLI_a_00237.", "citeRegEx" : "Hill et al\\.,? 2015", "shortCiteRegEx" : "Hill et al\\.", "year" : 2015 }, { "title" : "Ontologically grounded multi-sense representation learning for semantic vector space models", "author" : [ "Sujay Kumar Jauhar", "Chris Dyer", "Eduard H. Hovy." ], "venue" : "Proceedings of NAACL. pages 683–693. http://www.aclweb.org/anthology/N15-1070.", "citeRegEx" : "Jauhar et al\\.,? 2015", "shortCiteRegEx" : "Jauhar et al\\.", "year" : 2015 }, { "title" : "Specializing word embeddings for similarity or relatedness", "author" : [ "Douwe Kiela", "Felix Hill", "Stephen Clark." ], "venue" : "Proceedings of EMNLP. pages 2044– 2048. http://aclweb.org/anthology/D15-1242.", "citeRegEx" : "Kiela et al\\.,? 2015", "shortCiteRegEx" : "Kiela et al\\.", "year" : 2015 }, { "title" : "Character-aware neural language models", "author" : [ "Yoon Kim", "Yacine Jernite", "David Sontag", "Alexander M. Rush." ], "venue" : "Proceedings of AAAI. pages 2741– 2749.", "citeRegEx" : "Kim et al\\.,? 2016", "shortCiteRegEx" : "Kim et al\\.", "year" : 2016 }, { "title" : "Compositionally derived representations of morphologically complex words in distributional semantics", "author" : [ "Angeliki Lazaridou", "Marco Marelli", "Roberto Zamparelli", "Marco Baroni." ], "venue" : "Proceedings of ACL. pages 1517–1526.", "citeRegEx" : "Lazaridou et al\\.,? 2013", "shortCiteRegEx" : "Lazaridou et al\\.", "year" : 2013 }, { "title" : "Recipe for building robust spoken dialog state trackers: Dialog State Tracking Challenge system description", "author" : [ "Sungjin Lee", "Maxine Eskenazi." ], "venue" : "Proceedings of SIGDIAL. pages 414– 422. http://aclweb.org/anthology/W/W13/W13-", "citeRegEx" : "Lee and Eskenazi.,? 2013", "shortCiteRegEx" : "Lee and Eskenazi.", "year" : 2013 }, { "title" : "Separated by an un-common language: Towards judgment language informed vector space modeling", "author" : [ "Ira Leviant", "Roi Reichart." ], "venue" : "CoRR abs/1508.00106. http://arxiv.org/abs/1508.00106.", "citeRegEx" : "Leviant and Reichart.,? 2015", "shortCiteRegEx" : "Leviant and Reichart.", "year" : 2015 }, { "title" : "Dependency-based word embeddings", "author" : [ "Omer Levy", "Yoav Goldberg." ], "venue" : "Proceedings of ACL. pages 302–308. http://www.aclweb.org/anthology/P14-2050.", "citeRegEx" : "Levy and Goldberg.,? 2014", "shortCiteRegEx" : "Levy and Goldberg.", "year" : 2014 }, { "title" : "Finding function in form: Compositional character models for open vocabulary word representation", "author" : [ "Wang Ling", "Chris Dyer", "Alan W. Black", "Isabel Trancoso", "Ramon Fermandez", "Silvio Amir", "Luis Marujo", "Tiago Luis." ], "venue" : "Proceedings of EMNLP.", "citeRegEx" : "Ling et al\\.,? 2015", "shortCiteRegEx" : "Ling et al\\.", "year" : 2015 }, { "title" : "Gated end-to-end memory networks", "author" : [ "Fei Liu", "Julien Perez." ], "venue" : "Proceedings of EACL (to appear). http://arxiv.org/abs/1610.04211.", "citeRegEx" : "Liu and Perez.,? 2017", "shortCiteRegEx" : "Liu and Perez.", "year" : 2017 }, { "title" : "Learning semantic word embeddings based on ordinal knowledge constraints", "author" : [ "Quan Liu", "Hui Jiang", "Si Wei", "Zhen-Hua Ling", "Yu Hu." ], "venue" : "Proceedings of ACL. pages 1501–1511. http://www.aclweb.org/anthology/P15-1145.", "citeRegEx" : "Liu et al\\.,? 2015", "shortCiteRegEx" : "Liu et al\\.", "year" : 2015 }, { "title" : "Bilingual word representations with monolingual quality in mind", "author" : [ "Thang Luong", "Hieu Pham", "Christopher D. Manning." ], "venue" : "Proceedings of the 1st Workshop on Vector Space Modeling for Natural Language Processing. pages 151–159.", "citeRegEx" : "Luong et al\\.,? 2015", "shortCiteRegEx" : "Luong et al\\.", "year" : 2015 }, { "title" : "Better word representations with recursive neural networks for morphology", "author" : [ "Thang Luong", "Richard Socher", "Christopher Manning." ], "venue" : "Proceedings of CoNLL. pages 104–113. http://www.aclweb.org/anthology/W13-3512.", "citeRegEx" : "Luong et al\\.,? 2013", "shortCiteRegEx" : "Luong et al\\.", "year" : 2013 }, { "title" : "Context2vec: Learning generic context embedding with bidirectional LSTM", "author" : [ "Oren Melamud", "Jacob Goldberger", "Ido Dagan." ], "venue" : "Proceedings of CoNLL. pages 51–61. http://aclweb.org/anthology/K/K16/K16-1006.pdf.", "citeRegEx" : "Melamud et al\\.,? 2016", "shortCiteRegEx" : "Melamud et al\\.", "year" : 2016 }, { "title" : "Distributed representations of words and phrases and their compositionality", "author" : [ "Tomas Mikolov", "Ilya Sutskever", "Kai Chen", "Gregory S. Corrado", "Jeffrey Dean." ], "venue" : "Proceedings of NIPS. pages 3111–3119.", "citeRegEx" : "Mikolov et al\\.,? 2013", "shortCiteRegEx" : "Mikolov et al\\.", "year" : 2013 }, { "title" : "Learning word embeddings efficiently with noise-contrastive estimation", "author" : [ "Andriy Mnih", "Koray Kavukcuoglu." ], "venue" : "Proceedings of NIPS. pages 2265– 2273.", "citeRegEx" : "Mnih and Kavukcuoglu.,? 2013", "shortCiteRegEx" : "Mnih and Kavukcuoglu.", "year" : 2013 }, { "title" : "Multidomain dialog state tracking using recurrent neural networks", "author" : [ "Nikola Mrkšić", "Diarmuid Ó Séaghdha", "Blaise Thomson", "Milica Gašić", "Pei-Hao Su", "David Vandyke", "Tsung-Hsien Wen", "Steve Young." ], "venue" : "Proceedings of ACL. pages 794–799.", "citeRegEx" : "Mrkšić et al\\.,? 2015", "shortCiteRegEx" : "Mrkšić et al\\.", "year" : 2015 }, { "title" : "Neural Belief Tracker: Data-driven dialogue state tracking", "author" : [ "Nikola Mrkšić", "Diarmuid Ó Séaghdha", "Blaise Thomson", "Tsung-Hsien Wen", "Steve Young." ], "venue" : "arXiv preprint: 1606.03777. http://arxiv.org/abs/1606.03777.", "citeRegEx" : "Mrkšić et al\\.,? 2016", "shortCiteRegEx" : "Mrkšić et al\\.", "year" : 2016 }, { "title" : "Counter-fitting word vectors to linguistic constraints", "author" : [ "Nikola Mrkšić", "Diarmuid Ó Séaghdha", "Blaise Thomson", "Milica Gašić", "Lina Maria Rojas-Barahona", "Pei-Hao Su", "David Vandyke", "Tsung-Hsien Wen", "Steve Young." ], "venue" : "Proceedings of NAACL-", "citeRegEx" : "Mrkšić et al\\.,? 2016", "shortCiteRegEx" : "Mrkšić et al\\.", "year" : 2016 }, { "title" : "Rectified linear units improve restricted Boltzmann machines", "author" : [ "Vinod Nair", "Geoffrey E. Hinton." ], "venue" : "Proceedings of ICML. pages 807–814. http://www.icml2010.org/papers/432.pdf.", "citeRegEx" : "Nair and Hinton.,? 2010", "shortCiteRegEx" : "Nair and Hinton.", "year" : 2010 }, { "title" : "BabelNet: The automatic construction, evaluation and application of a wide-coverage multilingual semantic network", "author" : [ "Roberto Navigli", "Simone Paolo Ponzetto." ], "venue" : "Artificial Intelligence 193:217–250. https://doi.org/10.1016/j.artint.2012.07.001.", "citeRegEx" : "Navigli and Ponzetto.,? 2012", "shortCiteRegEx" : "Navigli and Ponzetto.", "year" : 2012 }, { "title" : "Integrating distributional lexical contrast into word embeddings for antonymsynonym distinction", "author" : [ "Kim Anh Nguyen", "Sabine Schulte im Walde", "Ngoc Thang Vu." ], "venue" : "Proceedings of ACL. pages 454–459. http://anthology.aclweb.org/P16-2074.", "citeRegEx" : "Nguyen et al\\.,? 2016", "shortCiteRegEx" : "Nguyen et al\\.", "year" : 2016 }, { "title" : "Encoding prior knowledge with eigenword embeddings", "author" : [ "Dominique Osborne", "Shashi Narayan", "Shay Cohen." ], "venue" : "Transactions of the ACL 4:417–430.", "citeRegEx" : "Osborne et al\\.,? 2016", "shortCiteRegEx" : "Osborne et al\\.", "year" : 2016 }, { "title" : "PPDB 2.0: Better paraphrase ranking, finegrained entailment relations, word embeddings, and style classification", "author" : [ "Ellie Pavlick", "Pushpendre Rastogi", "Juri Ganitkevitch", "Benjamin Van Durme", "Chris Callison-Burch" ], "venue" : "In Proceedings of ACL", "citeRegEx" : "Pavlick et al\\.,? \\Q2015\\E", "shortCiteRegEx" : "Pavlick et al\\.", "year" : 2015 }, { "title" : "Glove: Global vectors for word representation", "author" : [ "Jeffrey Pennington", "Richard Socher", "Christopher Manning." ], "venue" : "Proceedings of EMNLP. pages 1532– 1543. http://www.aclweb.org/anthology/D14-1162.", "citeRegEx" : "Pennington et al\\.,? 2014", "shortCiteRegEx" : "Pennington et al\\.", "year" : 2014 }, { "title" : "Co-learning of word representations and morpheme representations", "author" : [ "Siyu Qiu", "Qing Cui", "Jiang Bian", "Bin Gao", "Tie-Yan Liu." ], "venue" : "Proceedings of COLING. pages 141–150. http://www.aclweb.org/anthology/C14-1015.", "citeRegEx" : "Qiu et al\\.,? 2014", "shortCiteRegEx" : "Qiu et al\\.", "year" : 2014 }, { "title" : "Knowledge-free induction of inflectional morphologies", "author" : [ "Patrick Schone", "Daniel Jurafsky." ], "venue" : "Proceedings of NAACL. http://aclweb.org/anthology/N/N01/N01-1024.", "citeRegEx" : "Schone and Jurafsky.,? 2001", "shortCiteRegEx" : "Schone and Jurafsky.", "year" : 2001 }, { "title" : "Symmetric pattern based word embeddings for improved word similarity prediction", "author" : [ "Roy Schwartz", "Roi Reichart", "Ari Rappoport." ], "venue" : "Proceedings of CoNLL. pages 258–267. http://www.aclweb.org/anthology/K15-1026.", "citeRegEx" : "Schwartz et al\\.,? 2015", "shortCiteRegEx" : "Schwartz et al\\.", "year" : 2015 }, { "title" : "Symmetric patterns and coordinations: Fast and enhanced representations of verbs and adjectives", "author" : [ "Roy Schwartz", "Roi Reichart", "Ari Rappoport." ], "venue" : "Proceedings of NAACL-HLT . pages 499–505. http://www.aclweb.org/anthology/N16-1060.", "citeRegEx" : "Schwartz et al\\.,? 2016", "shortCiteRegEx" : "Schwartz et al\\.", "year" : 2016 }, { "title" : "Unsupervised morphology induction using word embeddings", "author" : [ "Radu Soricut", "Franz Och." ], "venue" : "Proceedings of NAACL-HLT . pages 1627–1637. http://www.aclweb.org/anthology/N15-1186.", "citeRegEx" : "Soricut and Och.,? 2015", "shortCiteRegEx" : "Soricut and Och.", "year" : 2015 }, { "title" : "A languageindependent feature schema for inflectional morphology", "author" : [ "John Sylak-Glassman", "Christo Kirov", "David Yarowsky", "Roger Que." ], "venue" : "Proceedings of ACL. pages 674–680. http://www.aclweb.org/anthology/P15-2111.", "citeRegEx" : "Sylak.Glassman et al\\.,? 2015", "shortCiteRegEx" : "Sylak.Glassman et al\\.", "year" : 2015 }, { "title" : "Statistical parsing of morphologically rich languages (SPMRL) What, how and whither", "author" : [ "Reut Tsarfaty", "Djamé Seddah", "Yoav Goldberg", "Sandra Kuebler", "Yannick Versley", "Marie Candito", "Jennifer Foster", "Ines Rehbein", "Lamia Tounsi." ], "venue" : "Proceed-", "citeRegEx" : "Tsarfaty et al\\.,? 2010", "shortCiteRegEx" : "Tsarfaty et al\\.", "year" : 2010 }, { "title" : "Word representations: A simple and general method for semi-supervised learning", "author" : [ "Joseph P. Turian", "Lev-Arie Ratinov", "Yoshua Bengio." ], "venue" : "Proceedings of ACL. pages 384–394. http://www.aclweb.org/anthology/P10-1040.", "citeRegEx" : "Turian et al\\.,? 2010", "shortCiteRegEx" : "Turian et al\\.", "year" : 2010 }, { "title" : "From frequency to meaning: vector space models of semantics", "author" : [ "Peter D. Turney", "Patrick Pantel." ], "venue" : "Journal of Artifical Intelligence Research 37(1):141–188. https://doi.org/10.1613/jair.2934.", "citeRegEx" : "Turney and Pantel.,? 2010", "shortCiteRegEx" : "Turney and Pantel.", "year" : 2010 }, { "title" : "Character-word LSTM language models", "author" : [ "Lyan Verwimp", "Joris Pelemans", "Hugo Van hamme", "Patrick Wambacq." ], "venue" : "Proceedings of EACL (to appear).", "citeRegEx" : "Verwimp et al\\.,? 2017", "shortCiteRegEx" : "Verwimp et al\\.", "year" : 2017 }, { "title" : "Is \"universal syntax\" universally useful for learning distributed word representations? In Proceedings of ACL", "author" : [ "Ivan Vulić", "Anna Korhonen." ], "venue" : "pages 518–524. http://anthology.aclweb.org/P16-2084.", "citeRegEx" : "Vulić and Korhonen.,? 2016a", "shortCiteRegEx" : "Vulić and Korhonen.", "year" : 2016 }, { "title" : "On the role of seed lexicons in learning bilingual word embeddings", "author" : [ "Ivan Vulić", "Anna Korhonen." ], "venue" : "Proceedings of ACL. pages 247–257. http://www.aclweb.org/anthology/P16-1024.", "citeRegEx" : "Vulić and Korhonen.,? 2016b", "shortCiteRegEx" : "Vulić and Korhonen.", "year" : 2016 }, { "title" : "Knowledge graph embedding by translating on hyperplanes", "author" : [ "Zhen Wang", "Jianwen Zhang", "Jianlin Feng", "Zheng Chen." ], "venue" : "Proceedings of AAAI. pages 1112–1119.", "citeRegEx" : "Wang et al\\.,? 2014", "shortCiteRegEx" : "Wang et al\\.", "year" : 2014 }, { "title" : "A networkbased end-to-end trainable task-oriented dialogue system", "author" : [ "Tsung-Hsien Wen", "David Vandyke", "Nikola Mrkšić", "Milica Gašić", "Lina M. Rojas-Barahona", "Pei-Hao Su", "Stefan Ultes", "Steve Young." ], "venue" : "Proceedings of EACL (to appear).", "citeRegEx" : "Wen et al\\.,? 2017", "shortCiteRegEx" : "Wen et al\\.", "year" : 2017 }, { "title" : "From paraphrase database to compositional paraphrase model and back", "author" : [ "John Wieting", "Mohit Bansal", "Kevin Gimpel", "Karen Livescu." ], "venue" : "Transactions of the ACL 3:345–358.", "citeRegEx" : "Wieting et al\\.,? 2015", "shortCiteRegEx" : "Wieting et al\\.", "year" : 2015 }, { "title" : "Charagram: Embedding words and sentences via character n-grams", "author" : [ "John Wieting", "Mohit Bansal", "Kevin Gimpel", "Karen Livescu." ], "venue" : "Proceedings of EMNLP. pages 1504–1515. https://aclweb.org/anthology/D16-1157.", "citeRegEx" : "Wieting et al\\.,? 2016", "shortCiteRegEx" : "Wieting et al\\.", "year" : 2016 }, { "title" : "The Dialog State Tracking Challenge series: A review", "author" : [ "Jason D. Williams", "Antoine Raux", "Matthew Henderson." ], "venue" : "Dialogue & Discourse 7(3):4–33.", "citeRegEx" : "Williams et al\\.,? 2016", "shortCiteRegEx" : "Williams et al\\.", "year" : 2016 }, { "title" : "RC-NET: A general framework for incorporating knowledge into word representations", "author" : [ "Chang Xu", "Yalong Bai", "Jiang Bian", "Bin Gao", "Gang Wang", "Xiaoguang Liu", "Tie-Yan Liu." ], "venue" : "Proceedings of CIKM. pages 1219–1228.", "citeRegEx" : "Xu et al\\.,? 2014", "shortCiteRegEx" : "Xu et al\\.", "year" : 2014 }, { "title" : "Cognitive User Interfaces", "author" : [ "Steve Young." ], "venue" : "IEEE Signal Processing Magazine .", "citeRegEx" : "Young.,? 2010", "shortCiteRegEx" : "Young.", "year" : 2010 }, { "title" : "Improving lexical embeddings with semantic knowledge", "author" : [ "Mo Yu", "Mark Dredze." ], "venue" : "Proceedings of ACL. pages 545–550. http://www.aclweb.org/anthology/P14-2089.", "citeRegEx" : "Yu and Dredze.,? 2014", "shortCiteRegEx" : "Yu and Dredze.", "year" : 2014 }, { "title" : "DErivBase: Inducing and evaluating a derivational morphology resource for German", "author" : [ "Britta Zeller", "Jan Šnajder", "Sebastian Padó." ], "venue" : "Proceedings of ACL. pages 1201–1211. http://www.aclweb.org/anthology/P13-1118.", "citeRegEx" : "Zeller et al\\.,? 2013", "shortCiteRegEx" : "Zeller et al\\.", "year" : 2013 }, { "title" : "Bilingual word embeddings for phrase-based machine translation", "author" : [ "Will Y. Zou", "Richard Socher", "Daniel Cer", "Christopher D. Manning." ], "venue" : "Proceedings of EMNLP. pages 1393–1398. http://www.aclweb.org/anthology/D13-1141.", "citeRegEx" : "Zou et al\\.,? 2013", "shortCiteRegEx" : "Zou et al\\.", "year" : 2013 } ], "referenceMentions" : [ { "referenceID" : 10, "context" : "Word representation learning has become a research area of central importance in natural language processing (NLP), with its usefulness demonstrated across many application areas such as parsing (Chen and Manning, 2014), machine translation (Zou et al.", "startOffset" : 195, "endOffset" : 219 }, { "referenceID" : 77, "context" : "Word representation learning has become a research area of central importance in natural language processing (NLP), with its usefulness demonstrated across many application areas such as parsing (Chen and Manning, 2014), machine translation (Zou et al., 2013), and many others (Turian et al.", "startOffset" : 241, "endOffset" : 259 }, { "referenceID" : 63, "context" : ", 2013), and many others (Turian et al., 2010; Collobert et al., 2011).", "startOffset" : 25, "endOffset" : 70 }, { "referenceID" : 62, "context" : "is expressed at word level” (Tsarfaty et al., 2010), pose specific challenges for NLP.", "startOffset" : 28, "endOffset" : 51 }, { "referenceID" : 21, "context" : "Formalised as an instance of the post-processing semantic specialisation paradigm (Faruqui et al., 2015; Mrkšić et al., 2016), morphfitting is steered by a set of linguistic constraints derived from simple language-specific rules which describe (a subset of) morphological processes in a language.", "startOffset" : 82, "endOffset" : 125 }, { "referenceID" : 48, "context" : "Formalised as an instance of the post-processing semantic specialisation paradigm (Faruqui et al., 2015; Mrkšić et al., 2016), morphfitting is steered by a set of linguistic constraints derived from simple language-specific rules which describe (a subset of) morphological processes in a language.", "startOffset" : 82, "endOffset" : 125 }, { "referenceID" : 31, "context" : "sian), yielding large and consistent improvements on benchmarking word similarity evaluation sets such as SimLex-999 (Hill et al., 2015), its multilingual extension (Leviant and Reichart, 2015), and SimVerb-3500 (Gerz et al.", "startOffset" : 117, "endOffset" : 136 }, { "referenceID" : 37, "context" : ", 2015), its multilingual extension (Leviant and Reichart, 2015), and SimVerb-3500 (Gerz et al.", "startOffset" : 36, "endOffset" : 64 }, { "referenceID" : 26, "context" : ", 2015), its multilingual extension (Leviant and Reichart, 2015), and SimVerb-3500 (Gerz et al., 2016).", "startOffset" : 83, "endOffset" : 102 }, { "referenceID" : 70, "context" : "1 The ATTRACT-REPEL Model The ATTRACT-REPEL model is an extension of PARAGRAM, proposed by Wieting et al. (2015). It provides a generic framework for incorporating similarity (e.", "startOffset" : 91, "endOffset" : 113 }, { "referenceID" : 50, "context" : "ReLU(x) = max(0, x) is the standard rectified linear unit (Nair and Hinton, 2010).", "startOffset" : 58, "endOffset" : 81 }, { "referenceID" : 25, "context" : "These can be extracted from a variety of semantic databases such as WordNet (Fellbaum, 1998), the Paraphrase Database (Ganitkevitch et al., 2013; Pavlick et al., 2015), or BabelNet (Navigli and Ponzetto, 2012; Ehrmann et al.", "startOffset" : 118, "endOffset" : 167 }, { "referenceID" : 54, "context" : "These can be extracted from a variety of semantic databases such as WordNet (Fellbaum, 1998), the Paraphrase Database (Ganitkevitch et al., 2013; Pavlick et al., 2015), or BabelNet (Navigli and Ponzetto, 2012; Ehrmann et al.", "startOffset" : 118, "endOffset" : 167 }, { "referenceID" : 51, "context" : ", 2015), or BabelNet (Navigli and Ponzetto, 2012; Ehrmann et al., 2014) as done in prior work (Faruqui et al.", "startOffset" : 21, "endOffset" : 71 }, { "referenceID" : 20, "context" : ", 2015), or BabelNet (Navigli and Ponzetto, 2012; Ehrmann et al., 2014) as done in prior work (Faruqui et al.", "startOffset" : 21, "endOffset" : 71 }, { "referenceID" : 19, "context" : "We train all models for 10 epochs with AdaGrad (Duchi et al., 2011).", "startOffset" : 47, "endOffset" : 67 }, { "referenceID" : 1, "context" : "We define two rules for English, widely recognised as morphologically simple (Avramidis and Koehn, 2008; Cotterell et al., 2016).", "startOffset" : 77, "endOffset" : 128 }, { "referenceID" : 14, "context" : "We define two rules for English, widely recognised as morphologically simple (Avramidis and Koehn, 2008; Cotterell et al., 2016).", "startOffset" : 77, "endOffset" : 128 }, { "referenceID" : 57, "context" : "On the other hand, the latter refers to the formation of new words with semantic shifts in meaning (Schone and Jurafsky, 2001; Haspelmath and Sims, 2013; Lazaridou et al., 2013; Zeller et al., 2013; Cotterell and Schütze, 2017).", "startOffset" : 99, "endOffset" : 227 }, { "referenceID" : 27, "context" : "On the other hand, the latter refers to the formation of new words with semantic shifts in meaning (Schone and Jurafsky, 2001; Haspelmath and Sims, 2013; Lazaridou et al., 2013; Zeller et al., 2013; Cotterell and Schütze, 2017).", "startOffset" : 99, "endOffset" : 227 }, { "referenceID" : 35, "context" : "On the other hand, the latter refers to the formation of new words with semantic shifts in meaning (Schone and Jurafsky, 2001; Haspelmath and Sims, 2013; Lazaridou et al., 2013; Zeller et al., 2013; Cotterell and Schütze, 2017).", "startOffset" : 99, "endOffset" : 227 }, { "referenceID" : 76, "context" : "On the other hand, the latter refers to the formation of new words with semantic shifts in meaning (Schone and Jurafsky, 2001; Haspelmath and Sims, 2013; Lazaridou et al., 2013; Zeller et al., 2013; Cotterell and Schütze, 2017).", "startOffset" : 99, "endOffset" : 227 }, { "referenceID" : 13, "context" : "On the other hand, the latter refers to the formation of new words with semantic shifts in meaning (Schone and Jurafsky, 2001; Haspelmath and Sims, 2013; Lazaridou et al., 2013; Zeller et al., 2013; Cotterell and Schütze, 2017).", "startOffset" : 99, "endOffset" : 227 }, { "referenceID" : 24, "context" : "We use a standard set of EN “antonymy” prefixes: APen = {dis, il, un, in, im, ir, mis, non, anti} (Fromkin et al., 2013).", "startOffset" : 98, "endOffset" : 120 }, { "referenceID" : 45, "context" : "Training Data and Setup For each of the four languages we train the skip-gram with negative sampling (SGNS) model (Mikolov et al., 2013) on the latest Wikipedia dump of each language.", "startOffset" : 114, "endOffset" : 136 }, { "referenceID" : 55, "context" : "We test standard distributional models: Common-Crawl GloVe (Pennington et al., 2014), SGNS vectors (Mikolov et al.", "startOffset" : 59, "endOffset" : 84 }, { "referenceID" : 45, "context" : ", 2014), SGNS vectors (Mikolov et al., 2013) with various contexts (BOW = bag-ofwords; DEPS = dependency contexts), and training data (PW = Polyglot Wikipedia from Al-Rfou", "startOffset" : 22, "endOffset" : 44 }, { "referenceID" : 4, "context" : "Other SGNS parameters were set to standard values (Baroni et al., 2014; Vulić and Korhonen, 2016b): 15 epochs, 15 negative samples, global learning rate: .", "startOffset" : 50, "endOffset" : 98 }, { "referenceID" : 67, "context" : "Other SGNS parameters were set to standard values (Baroni et al., 2014; Vulić and Korhonen, 2016b): 15 epochs, 15 negative samples, global learning rate: .", "startOffset" : 50, "endOffset" : 98 }, { "referenceID" : 38, "context" : "(2013); 8B = 8 billion token word2vec corpus), following (Levy and Goldberg, 2014) and (Schwartz et al.", "startOffset" : 57, "endOffset" : 82 }, { "referenceID" : 58, "context" : "(2013); 8B = 8 billion token word2vec corpus), following (Levy and Goldberg, 2014) and (Schwartz et al., 2015).", "startOffset" : 87, "endOffset" : 110 }, { "referenceID" : 4, "context" : "(2016) (SymPat-Emb), count-based PMI-weighted vectors reduced by SVD (Baroni et al., 2014) (Count-SVD), a model which replaces the context modelling function from CBOW with bidirectional LSTMs (Melamud et al.", "startOffset" : 69, "endOffset" : 90 }, { "referenceID" : 44, "context" : ", 2014) (Count-SVD), a model which replaces the context modelling function from CBOW with bidirectional LSTMs (Melamud et al., 2016) (Context2Vec), and two sets of EN vectors trained by injecting multilingual information: BiSkip (Luong et al.", "startOffset" : 110, "endOffset" : 132 }, { "referenceID" : 42, "context" : ", 2016) (Context2Vec), and two sets of EN vectors trained by injecting multilingual information: BiSkip (Luong et al., 2015) and MultiCCA (Faruqui and Dyer, 2014).", "startOffset" : 104, "endOffset" : 124 }, { "referenceID" : 22, "context" : ", 2015) and MultiCCA (Faruqui and Dyer, 2014).", "startOffset" : 21, "endOffset" : 45 }, { "referenceID" : 17, "context" : "We also experiment with a selection of standard distributional spaces in other languages from prior work (Dinu et al., 2015; Luong et al., 2015; Vulić and Korhonen, 2016a).", "startOffset" : 105, "endOffset" : 171 }, { "referenceID" : 42, "context" : "We also experiment with a selection of standard distributional spaces in other languages from prior work (Dinu et al., 2015; Luong et al., 2015; Vulić and Korhonen, 2016a).", "startOffset" : 105, "endOffset" : 171 }, { "referenceID" : 66, "context" : "We also experiment with a selection of standard distributional spaces in other languages from prior work (Dinu et al., 2015; Luong et al., 2015; Vulić and Korhonen, 2016a).", "startOffset" : 105, "endOffset" : 171 }, { "referenceID" : 35, "context" : "(2013); 8B = 8 billion token word2vec corpus), following (Levy and Goldberg, 2014) and (Schwartz et al., 2015). We also test the symmetricpattern based vectors of Schwartz et al. (2016) (SymPat-Emb), count-based PMI-weighted vectors reduced by SVD (Baroni et al.", "startOffset" : 58, "endOffset" : 186 }, { "referenceID" : 48, "context" : "We also tried using another post-processing model (Mrkšić et al., 2016) in lieu of ATTRACT-REPEL.", "startOffset" : 50, "endOffset" : 71 }, { "referenceID" : 37, "context" : "8 SimLex-999 was translated to DE, IT, and RU by Leviant and Reichart (2015), and they crowdsourced similarity scores from native speakers.", "startOffset" : 49, "endOffset" : 77 }, { "referenceID" : 23, "context" : "Unlike other gold standard resources such as WordSim353 (Finkelstein et al., 2002) or MEN (Bruni et al.", "startOffset" : 56, "endOffset" : 82 }, { "referenceID" : 8, "context" : ", 2002) or MEN (Bruni et al., 2014), SimLex and SimVerb provided explicit guidelines to discern between semantic similarity and association, so that related but non-similar words (e.", "startOffset" : 15, "endOffset" : 35 }, { "referenceID" : 8, "context" : ", 2002) or MEN (Bruni et al., 2014), SimLex and SimVerb provided explicit guidelines to discern between semantic similarity and association, so that related but non-similar words (e.g. cup and coffee) have a low rating. Since Leviant and Reichart (2015) re-scored the original EN SimLex, we use their EN SimLex version for consistency.", "startOffset" : 16, "endOffset" : 254 }, { "referenceID" : 45, "context" : "SG-BOW2-PW (300) (Mikolov et al., 2013) .", "startOffset" : 17, "endOffset" : 39 }, { "referenceID" : 55, "context" : "GloVe-6B (300) (Pennington et al., 2014) .", "startOffset" : 15, "endOffset" : 40 }, { "referenceID" : 4, "context" : "Count-SVD (500) (Baroni et al., 2014) .", "startOffset" : 16, "endOffset" : 37 }, { "referenceID" : 38, "context" : "SG-DEPS-PW (300) (Levy and Goldberg, 2014) .", "startOffset" : 17, "endOffset" : 42 }, { "referenceID" : 3, "context" : "SG-DEPS-8B (500) (Bansal et al., 2014) .", "startOffset" : 17, "endOffset" : 38 }, { "referenceID" : 22, "context" : "MultiCCA-EN (512) (Faruqui and Dyer, 2014) .", "startOffset" : 18, "endOffset" : 42 }, { "referenceID" : 42, "context" : "BiSkip-EN (256) (Luong et al., 2015) .", "startOffset" : 16, "endOffset" : 36 }, { "referenceID" : 58, "context" : "SG-BOW2-8B (500) (Schwartz et al., 2015) .", "startOffset" : 17, "endOffset" : 40 }, { "referenceID" : 59, "context" : "SymPat-Emb (500) (Schwartz et al., 2016) .", "startOffset" : 17, "endOffset" : 40 }, { "referenceID" : 44, "context" : "Context2Vec (600) (Melamud et al., 2016) .", "startOffset" : 18, "endOffset" : 40 }, { "referenceID" : 66, "context" : "439 DE: SG-DEPS-PW (300) (Vulić and Korhonen, 2016a) .", "startOffset" : 25, "endOffset" : 52 }, { "referenceID" : 42, "context" : "325 DE: BiSkip-DE (256) (Luong et al., 2015) .", "startOffset" : 24, "endOffset" : 44 }, { "referenceID" : 66, "context" : "421 IT: SG-DEPS-PW (300) (Vulić and Korhonen, 2016a) .", "startOffset" : 25, "endOffset" : 52 }, { "referenceID" : 17, "context" : "391 IT: CBOW5-Wacky (300) (Dinu et al., 2015) .", "startOffset" : 26, "endOffset" : 45 }, { "referenceID" : 74, "context" : "The DST model is the first component of modern dialogue pipelines (Young, 2010).", "startOffset" : 66, "endOffset" : 79 }, { "referenceID" : 72, "context" : "The Dialogue State Tracking Challenge (DSTC) shared task series formalised the evaluation and provided labelled DST datasets (Henderson et al., 2014a,b; Williams et al., 2016).", "startOffset" : 125, "endOffset" : 175 }, { "referenceID" : 68, "context" : ", handcrafted rules (Wang et al., 2014) or conditional random fields (Lee and Eskenazi, 2013), the recent DST methodology has seen a shift towards neuralnetwork architectures (Henderson et al.", "startOffset" : 20, "endOffset" : 39 }, { "referenceID" : 36, "context" : ", 2014) or conditional random fields (Lee and Eskenazi, 2013), the recent DST methodology has seen a shift towards neuralnetwork architectures (Henderson et al.", "startOffset" : 37, "endOffset" : 61 }, { "referenceID" : 48, "context" : "by reasoning purely over pre-trained word vectors (Mrkšić et al., 2016).", "startOffset" : 50, "endOffset" : 71 }, { "referenceID" : 48, "context" : "0 release expanded the dataset to 1,200 dialogues (Mrkšić et al., 2016).", "startOffset" : 50, "endOffset" : 71 }, { "referenceID" : 66, "context" : "0 Dataset Our DST evaluation is based on the WOZ dataset, released by Wen et al. (2017). In this Wizard-of-Oz setup, two Amazon Mechanical Turk workers assumed the role of the user and the system asking/providing restaurant information.", "startOffset" : 70, "endOffset" : 88 }, { "referenceID" : 28, "context" : "Evaluation Setup The principal metric we use to measure DST performance is joint goal accuracy, which represents the proportion of test set dialogue turns where all user goals expressed up to that point of the dialogue were decoded correctly (Henderson et al., 2014a).", "startOffset" : 242, "endOffset" : 267 }, { "referenceID" : 75, "context" : "Some models integrate such constraints into the training procedure, modifying the prior or the regularisation (Yu and Dredze, 2014; Xu et al., 2014; Bian et al., 2014; Kiela et al., 2015), or using a variant of the SGNS-style objective (Liu et al.", "startOffset" : 110, "endOffset" : 187 }, { "referenceID" : 73, "context" : "Some models integrate such constraints into the training procedure, modifying the prior or the regularisation (Yu and Dredze, 2014; Xu et al., 2014; Bian et al., 2014; Kiela et al., 2015), or using a variant of the SGNS-style objective (Liu et al.", "startOffset" : 110, "endOffset" : 187 }, { "referenceID" : 6, "context" : "Some models integrate such constraints into the training procedure, modifying the prior or the regularisation (Yu and Dredze, 2014; Xu et al., 2014; Bian et al., 2014; Kiela et al., 2015), or using a variant of the SGNS-style objective (Liu et al.", "startOffset" : 110, "endOffset" : 187 }, { "referenceID" : 33, "context" : "Some models integrate such constraints into the training procedure, modifying the prior or the regularisation (Yu and Dredze, 2014; Xu et al., 2014; Bian et al., 2014; Kiela et al., 2015), or using a variant of the SGNS-style objective (Liu et al.", "startOffset" : 110, "endOffset" : 187 }, { "referenceID" : 41, "context" : ", 2015), or using a variant of the SGNS-style objective (Liu et al., 2015; Osborne et al., 2016).", "startOffset" : 56, "endOffset" : 96 }, { "referenceID" : 53, "context" : ", 2015), or using a variant of the SGNS-style objective (Liu et al., 2015; Osborne et al., 2016).", "startOffset" : 56, "endOffset" : 96 }, { "referenceID" : 21, "context" : ", WordNet, PPDB) into pre-trained word vectors (Faruqui et al., 2015; Jauhar et al., 2015; Wieting et al., 2015; Nguyen et al., 2016; Mrkšić et al., 2016).", "startOffset" : 47, "endOffset" : 154 }, { "referenceID" : 32, "context" : ", WordNet, PPDB) into pre-trained word vectors (Faruqui et al., 2015; Jauhar et al., 2015; Wieting et al., 2015; Nguyen et al., 2016; Mrkšić et al., 2016).", "startOffset" : 47, "endOffset" : 154 }, { "referenceID" : 70, "context" : ", WordNet, PPDB) into pre-trained word vectors (Faruqui et al., 2015; Jauhar et al., 2015; Wieting et al., 2015; Nguyen et al., 2016; Mrkšić et al., 2016).", "startOffset" : 47, "endOffset" : 154 }, { "referenceID" : 52, "context" : ", WordNet, PPDB) into pre-trained word vectors (Faruqui et al., 2015; Jauhar et al., 2015; Wieting et al., 2015; Nguyen et al., 2016; Mrkšić et al., 2016).", "startOffset" : 47, "endOffset" : 154 }, { "referenceID" : 48, "context" : ", WordNet, PPDB) into pre-trained word vectors (Faruqui et al., 2015; Jauhar et al., 2015; Wieting et al., 2015; Nguyen et al., 2016; Mrkšić et al., 2016).", "startOffset" : 47, "endOffset" : 154 }, { "referenceID" : 2, "context" : ", 2015) such as CELEX (Baayen et al., 1995) or as an external analyser such as Morfessor (Creutz and Lagus, 2007), along with distributional information jointly at training time in the language modelling (LM) objective (Luong et al.", "startOffset" : 22, "endOffset" : 43 }, { "referenceID" : 15, "context" : ", 1995) or as an external analyser such as Morfessor (Creutz and Lagus, 2007), along with distributional information jointly at training time in the language modelling (LM) objective (Luong et al.", "startOffset" : 53, "endOffset" : 77 }, { "referenceID" : 35, "context" : "The key idea is to learn a morphological composition function (Lazaridou et al., 2013; Cotterell and Schütze, 2017) which synthesises the representation of a word given the representations of its constituent morphemes.", "startOffset" : 62, "endOffset" : 115 }, { "referenceID" : 13, "context" : "The key idea is to learn a morphological composition function (Lazaridou et al., 2013; Cotterell and Schütze, 2017) which synthesises the representation of a word given the representations of its constituent morphemes.", "startOffset" : 62, "endOffset" : 115 }, { "referenceID" : 2, "context" : ", 2015) such as CELEX (Baayen et al., 1995) or as an external analyser such as Morfessor (Creutz and Lagus, 2007), along with distributional information jointly at training time in the language modelling (LM) objective (Luong et al., 2013; Botha and Blunsom, 2014; Qiu et al., 2014; Cotterell and Schütze, 2015; Bhatia et al., 2016, i.a.). The key idea is to learn a morphological composition function (Lazaridou et al., 2013; Cotterell and Schütze, 2017) which synthesises the representation of a word given the representations of its constituent morphemes. Contrary to our work, these models typically coalesce all lexical relations. Another class of models, operating at the character level, shares a similar methodology: such models compose token-level representations from subcomponent embeddings (subwords, morphemes, or characters) (dos Santos and Zadrozny, 2014; Ling et al., 2015; Cao and Rei, 2016; Kim et al., 2016; Wieting et al., 2016; Verwimp et al., 2017, i.a.). In contrast to prior work, our model decouples the use of morphological information, now provided in the form of inflectional and derivational rules transformed into linguistic constraints, from the actual training. This pipelined approach results in a simpler, more portable model. In spirit, our work is similar to Cotterell et al. (2016), who formulate the idea of post-training specialisation in a generative Bayesian framework.", "startOffset" : 23, "endOffset" : 1319 }, { "referenceID" : 60, "context" : "Future work will focus on other potential sources of morphological knowledge (Soricut and Och, 2015), porting the framework to other morphologically rich languages and downstream tasks, and on further refinements of the post-processing algorithm and constraints selection.", "startOffset" : 77, "endOffset" : 100 } ], "year" : 2017, "abstractText" : "Morphologically rich languages accentuate two properties of distributional vector space models: 1) the difficulty of inducing accurate representations for lowfrequency word forms; and 2) insensitivity to distinct lexical relations that have similar distributional signatures. These effects are detrimental for language understanding systems, which may infer that inexpensive is a rephrasing for expensive or may not associate acquire with acquires. In this work, we propose a novel morph-fitting procedure which moves past the use of curated semantic lexicons for improving distributional vector spaces. Instead, our method injects morphological constraints generated using simple language-specific rules, pulling inflectional forms of the same word close together and pushing derivational antonyms far apart. In intrinsic evaluation over four languages, we show that our approach: 1) improves low-frequency word estimates; and 2) boosts the semantic quality of the entire word vector collection. Finally, we show that morph-fitted vectors yield large gains in the downstream task of dialogue state tracking, highlighting the importance of morphology for tackling long-tail phenomena in language understanding tasks.", "creator" : "LaTeX with hyperref package" } }
{ "pile_set_name": "Github" }
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by set-gen. DO NOT EDIT. package sets import ( "reflect" "sort" ) // sets.Byte is a set of bytes, implemented via map[byte]struct{} for minimal memory consumption. type Byte map[byte]Empty // NewByte creates a Byte from a list of values. func NewByte(items ...byte) Byte { ss := Byte{} ss.Insert(items...) return ss } // ByteKeySet creates a Byte from a keys of a map[byte](? extends interface{}). // If the value passed in is not actually a map, this will panic. func ByteKeySet(theMap interface{}) Byte { v := reflect.ValueOf(theMap) ret := Byte{} for _, keyValue := range v.MapKeys() { ret.Insert(keyValue.Interface().(byte)) } return ret } // Insert adds items to the set. func (s Byte) Insert(items ...byte) Byte { for _, item := range items { s[item] = Empty{} } return s } // Delete removes all items from the set. func (s Byte) Delete(items ...byte) Byte { for _, item := range items { delete(s, item) } return s } // Has returns true if and only if item is contained in the set. func (s Byte) Has(item byte) bool { _, contained := s[item] return contained } // HasAll returns true if and only if all items are contained in the set. func (s Byte) HasAll(items ...byte) bool { for _, item := range items { if !s.Has(item) { return false } } return true } // HasAny returns true if any items are contained in the set. func (s Byte) HasAny(items ...byte) bool { for _, item := range items { if s.Has(item) { return true } } return false } // Difference returns a set of objects that are not in s2 // For example: // s1 = {a1, a2, a3} // s2 = {a1, a2, a4, a5} // s1.Difference(s2) = {a3} // s2.Difference(s1) = {a4, a5} func (s Byte) Difference(s2 Byte) Byte { result := NewByte() for key := range s { if !s2.Has(key) { result.Insert(key) } } return result } // Union returns a new set which includes items in either s1 or s2. // For example: // s1 = {a1, a2} // s2 = {a3, a4} // s1.Union(s2) = {a1, a2, a3, a4} // s2.Union(s1) = {a1, a2, a3, a4} func (s1 Byte) Union(s2 Byte) Byte { result := NewByte() for key := range s1 { result.Insert(key) } for key := range s2 { result.Insert(key) } return result } // Intersection returns a new set which includes the item in BOTH s1 and s2 // For example: // s1 = {a1, a2} // s2 = {a2, a3} // s1.Intersection(s2) = {a2} func (s1 Byte) Intersection(s2 Byte) Byte { var walk, other Byte result := NewByte() if s1.Len() < s2.Len() { walk = s1 other = s2 } else { walk = s2 other = s1 } for key := range walk { if other.Has(key) { result.Insert(key) } } return result } // IsSuperset returns true if and only if s1 is a superset of s2. func (s1 Byte) IsSuperset(s2 Byte) bool { for item := range s2 { if !s1.Has(item) { return false } } return true } // Equal returns true if and only if s1 is equal (as a set) to s2. // Two sets are equal if their membership is identical. // (In practice, this means same elements, order doesn't matter) func (s1 Byte) Equal(s2 Byte) bool { return len(s1) == len(s2) && s1.IsSuperset(s2) } type sortableSliceOfByte []byte func (s sortableSliceOfByte) Len() int { return len(s) } func (s sortableSliceOfByte) Less(i, j int) bool { return lessByte(s[i], s[j]) } func (s sortableSliceOfByte) Swap(i, j int) { s[i], s[j] = s[j], s[i] } // List returns the contents as a sorted byte slice. func (s Byte) List() []byte { res := make(sortableSliceOfByte, 0, len(s)) for key := range s { res = append(res, key) } sort.Sort(res) return []byte(res) } // UnsortedList returns the slice with contents in random order. func (s Byte) UnsortedList() []byte { res := make([]byte, 0, len(s)) for key := range s { res = append(res, key) } return res } // Returns a single element from the set. func (s Byte) PopAny() (byte, bool) { for key := range s { s.Delete(key) return key, true } var zeroValue byte return zeroValue, false } // Len returns the size of the set. func (s Byte) Len() int { return len(s) } func lessByte(lhs, rhs byte) bool { return lhs < rhs }
{ "pile_set_name": "Github" }
<?php namespace Laravel\Cashier\Coupon; use Illuminate\Database\Eloquent\Builder; use Illuminate\Database\Eloquent\Model; use Laravel\Cashier\Coupon\Contracts\AcceptsCoupons; use Laravel\Cashier\Coupon\Contracts\CouponRepository; use Laravel\Cashier\Order\OrderItemCollection; /** * @method static create(array $array) * @method static whereModel($modelType, $modelId) * @property mixed id * @property string model_type * @property mixed model_id * @property string name * @property int times_left */ class RedeemedCoupon extends Model { /** * The attributes that aren't mass assignable. * * @var array */ protected $guarded = []; /** * @param \Laravel\Cashier\Coupon\Coupon $coupon * @param \Laravel\Cashier\Coupon\Contracts\AcceptsCoupons $model * @return \Illuminate\Database\Eloquent\Model|\Laravel\Cashier\Coupon\RedeemedCoupon */ public static function record(Coupon $coupon, AcceptsCoupons $model) { return $model->redeemedCoupons()->create([ 'name' => $coupon->name(), 'times_left' => $coupon->times(), 'owner_type' => $model->ownerType(), 'owner_id' => $model->ownerId(), ]); } /** * Retrieve the underlying Coupon object. * * @return Coupon */ public function coupon() { /** @var CouponRepository $repository */ $repository = app()->make(CouponRepository::class); return $repository->findOrFail($this->name); } /** * @return \Laravel\Cashier\Coupon\Contracts\CouponHandler */ public function handler() { return $this->coupon()->handler(); } /** * Get the model relation the coupon was redeemed for. * * @return \Illuminate\Database\Eloquent\Relations\MorphTo */ public function model() { return $this->morphTo(); } /** * @param \Laravel\Cashier\Order\OrderItemCollection $items * @return \Laravel\Cashier\Order\OrderItemCollection */ public function applyTo(OrderItemCollection $items) { return $this->coupon()->applyTo($this, $items); } /** * @return $this */ public function markApplied() { $this->decrement('times_left'); return $this; } /** * @return $this */ public function markRollback() { $this->increment('times_left'); return $this; } /** * Scope a query to only include coupons which are being processed * * @param \Illuminate\Database\Eloquent\Builder $query * @return \Illuminate\Database\Eloquent\Builder */ public function scopeActive(Builder $query) { return $query->where('times_left', '>', 0); } /** * @param \Illuminate\Database\Eloquent\Builder $query * @param string $modelType * @param $modelId * @return mixed */ public function scopeWhereModel(Builder $query, string $modelType, $modelId) { return $query->whereModelType($modelType)->whereModelId($modelId); } /** * @return bool */ public function alreadyApplied() { return $this->times_left < 1; } /** * Create a new Eloquent Collection instance. * * @param array $models * @return \Illuminate\Database\Eloquent\Collection */ public function newCollection(array $models = []) { return new RedeemedCouponCollection($models); } /** * Revoke the redeemed coupon. It will no longer be applied. * * @return self */ public function revoke() { return tap($this, function () { $this->times_left = 0; $this->save(); }); } /** * Check whether the RedeemedCoupon applies to the next Order. * * @return bool */ public function isActive() { return $this->times_left > 0; } }
{ "pile_set_name": "Github" }
/** * @license * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ CLASS({ package: 'foam.apps.builder', name: 'TrackLaunchCloseTrait', requires: [ ], imports: [ 'onWindowClosed', 'performance', ], exports: [ ], properties: [ { type: 'Int', name: 'launchTime', lazyFactory: function() { return this.getCurrentTime(); }, }, ], methods: [ function init() { this.SUPER(); this.metricsDAO && this.metricsDAO.put(this.Metric.create({ name: 'launchApp', value: this.launchTime, }, this.Y)); this.onWindowClosed && this.onWindowClosed(this.onAppWindowClosed); }, function getCurrentTime() { return (this.performance && this.performance.now) ? Math.round(this.performance.now() / 1000) : 0; }, ], listeners: [ { name: 'onAppWindowClosed', code: function() { var closeTime = this.getCurrentTime(); this.metricsDAO.put(this.Metric.create({ name: 'openTime', value: closeTime - this.launchTime, }, this.Y)); this.metricsDAO.put(this.Metric.create({ name: 'closeApp', value: closeTime, }, this.Y)); } }, ], });
{ "pile_set_name": "Github" }
<StackLayout sdkExampleTitle sdkToggleNavButton> <!-- >> create-sample-ui-html --> <Label text="Tap the button" class="title" id="labelStyle"></Label> <Button text="TAP" (tap)="onTap()"></Button> <Label [text]="message" class="message" textWrap="true"></Label> <!-- << create-sample-ui-html --> </StackLayout>
{ "pile_set_name": "Github" }
# node.js 第四次测验题和答案 ## 一、事件:玩转 EventEmitter 1. node.js 提供的四种异步编程的机制分别是什么? [相关链接](http://www.ruanyifeng.com/blog/2012/12/asynchronous%EF%BC%BFjavascript.html) ``` 回调函数 事件 订阅者模式 promises 对象 ``` 2. 用回调函数实现异步操作的两种状态是什么? ``` 开始和结束 ``` 3. 事件的三要素是什么? ``` 事件名称 on 绑定 事件处理函数 ``` 4. EventEmitter 引自哪个模块? ``` events ``` 5. 如何引用某模块的某个接口(可用 EventEmitter 举例)? ``` var EventEmitter = require('events').EventEmitter; ``` 6. EventEmitter 对象用来触发事件的方法是什么? ``` emit(eventName[,arguments]) ``` 7. 使自己的类有事件机制的两种方法分别是什么? ``` 1. 让自己的类继承 EventEmitter 类 2. 把 EventEmitter 类中的方法复制到自己的类中(又叫混合方式) ``` 8. 类的继承有哪两种方法? ``` JavaScript 原型继承方式 使用 node.js 的 util 模块中的 inherits 方法 ``` 9. util 的 inherits 方法的使用格式是怎样的? ``` util.inherits(继承者,被继承者); ``` 10. 查看某个类的所有事件的方法是什么? ``` className.eventName(); ``` 11. 查看某个类的某个事件的所有监听个数的方法是什么? ``` className.listenerCount(eventName); ``` 12. 查看某个类的某个事件的所有监听的方法是什么? ``` className.listeners(eventName); ``` 13. 复制 EventEmitter 中的方法,如何实现? ``` 用 for...in 循环 ``` 14. JavaScript 提供什么方式来进行错误处理? ``` try catch ``` 15. 在 node.js 中如何进行错误处理? ``` 发生错误时,node.js 会产生 error 对象,在回调函数中对 error 进行捕获和处理即可 ``` 16. 我们如何获得 error 对象? ``` 在事件处理函数中传入一个形参获取 error 对象 ``` 17. 在代码中,如何对事件的名称进行有效的管理,避免发生名称引用错误? ``` 给自定义的类增加一个 events 属性,events 是一个键值对,引用事件名称时其实是引用一个属性,这样在引用属性时就可以自动补全了 ``` 18. 复制 EventEmitter 中的方法的语句是什么? ``` this[methodName] = EventEmitter.prototype[methodName]; ```
{ "pile_set_name": "Github" }
/* * Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tencentcloudapi.cdn.v20180606.models; import com.tencentcloudapi.common.AbstractModel; import com.google.gson.annotations.SerializedName; import com.google.gson.annotations.Expose; import java.util.HashMap; public class BandwidthAlert extends AbstractModel{ /** * 带宽封顶配置开关 on:开启 off:关闭 */ @SerializedName("Switch") @Expose private String Switch; /** * 带宽封顶阈值,单位为bps 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("BpsThreshold") @Expose private Long BpsThreshold; /** * 达到阈值后的操作 RESOLVE_DNS_TO_ORIGIN:直接回源,仅自有源站域名支持 RETURN_404:全部请求返回 404 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("CounterMeasure") @Expose private String CounterMeasure; /** * 上次触发带宽封顶阈值的时间 注意:此字段可能返回 null,表示取不到有效值。 */ @SerializedName("LastTriggerTime") @Expose private String LastTriggerTime; /** * Get 带宽封顶配置开关 on:开启 off:关闭 * @return Switch 带宽封顶配置开关 on:开启 off:关闭 */ public String getSwitch() { return this.Switch; } /** * Set 带宽封顶配置开关 on:开启 off:关闭 * @param Switch 带宽封顶配置开关 on:开启 off:关闭 */ public void setSwitch(String Switch) { this.Switch = Switch; } /** * Get 带宽封顶阈值,单位为bps 注意:此字段可能返回 null,表示取不到有效值。 * @return BpsThreshold 带宽封顶阈值,单位为bps 注意:此字段可能返回 null,表示取不到有效值。 */ public Long getBpsThreshold() { return this.BpsThreshold; } /** * Set 带宽封顶阈值,单位为bps 注意:此字段可能返回 null,表示取不到有效值。 * @param BpsThreshold 带宽封顶阈值,单位为bps 注意:此字段可能返回 null,表示取不到有效值。 */ public void setBpsThreshold(Long BpsThreshold) { this.BpsThreshold = BpsThreshold; } /** * Get 达到阈值后的操作 RESOLVE_DNS_TO_ORIGIN:直接回源,仅自有源站域名支持 RETURN_404:全部请求返回 404 注意:此字段可能返回 null,表示取不到有效值。 * @return CounterMeasure 达到阈值后的操作 RESOLVE_DNS_TO_ORIGIN:直接回源,仅自有源站域名支持 RETURN_404:全部请求返回 404 注意:此字段可能返回 null,表示取不到有效值。 */ public String getCounterMeasure() { return this.CounterMeasure; } /** * Set 达到阈值后的操作 RESOLVE_DNS_TO_ORIGIN:直接回源,仅自有源站域名支持 RETURN_404:全部请求返回 404 注意:此字段可能返回 null,表示取不到有效值。 * @param CounterMeasure 达到阈值后的操作 RESOLVE_DNS_TO_ORIGIN:直接回源,仅自有源站域名支持 RETURN_404:全部请求返回 404 注意:此字段可能返回 null,表示取不到有效值。 */ public void setCounterMeasure(String CounterMeasure) { this.CounterMeasure = CounterMeasure; } /** * Get 上次触发带宽封顶阈值的时间 注意:此字段可能返回 null,表示取不到有效值。 * @return LastTriggerTime 上次触发带宽封顶阈值的时间 注意:此字段可能返回 null,表示取不到有效值。 */ public String getLastTriggerTime() { return this.LastTriggerTime; } /** * Set 上次触发带宽封顶阈值的时间 注意:此字段可能返回 null,表示取不到有效值。 * @param LastTriggerTime 上次触发带宽封顶阈值的时间 注意:此字段可能返回 null,表示取不到有效值。 */ public void setLastTriggerTime(String LastTriggerTime) { this.LastTriggerTime = LastTriggerTime; } /** * Internal implementation, normal users should not use it. */ public void toMap(HashMap<String, String> map, String prefix) { this.setParamSimple(map, prefix + "Switch", this.Switch); this.setParamSimple(map, prefix + "BpsThreshold", this.BpsThreshold); this.setParamSimple(map, prefix + "CounterMeasure", this.CounterMeasure); this.setParamSimple(map, prefix + "LastTriggerTime", this.LastTriggerTime); } }
{ "pile_set_name": "Github" }
using System; using System.Threading.Tasks; namespace Dopamine.Services.Cache { public interface ICacheService { string CoverArtCacheFolderPath { get; } string TemporaryCacheFolderPath { get; } Task<string> CacheArtworkAsync(byte[] artwork); Task<string> CacheArtworkAsync(string uriString); string GetCachedArtworkPath(string artworkID); Task<string> DownloadFileToTemporaryCacheAsync(string uriString); } }
{ "pile_set_name": "Github" }
<?php $this->extend('_templates/default-nav-table'); ?> <div class="tbl-ctrls"> <?=form_open($form_url)?> <fieldset class="tbl-search right"> <a class="btn tn action" href="<?=$new?>"><?= lang('create_new') ?></a> </fieldset> <h1> <?php echo isset($cp_heading) ? $cp_heading : $cp_page_title?></br> <i><?=lang('bookmarklet_instructions')?></i> </h1> <?php if (isset($filters)) echo $filters; ?> <?php $this->embed('_shared/table', $table); ?> <?php if ( ! empty($pagination)) $this->embed('_shared/pagination', $pagination); ?> <?php if ( ! empty($table['data'])): ?> <fieldset class="tbl-bulk-act hidden"> <select name="bulk_action"> <option value="">-- <?=lang('with_selected')?> --</option> <option value="remove" data-confirm-trigger="selected" rel="modal-confirm-remove"><?=lang('remove')?></option> </select> <button class="btn submit" data-conditional-modal="confirm-trigger"><?=lang('submit')?></button> </fieldset> <?php endif; ?> <?=form_close()?> </div> <?php $modal_vars = array( 'name' => 'modal-confirm-remove', 'form_url' => $form_url, 'hidden' => array( 'bulk_action' => 'remove' ) ); $modal = $this->make('ee:_shared/modal_confirm_remove')->render($modal_vars); ee('CP/Modal')->addModal('remove', $modal); ?>
{ "pile_set_name": "Github" }
/* neo theme for codemirror */ /* Color scheme */ .cm-s-neo.CodeMirror { background-color:#ffffff; color:#2e383c; line-height:1.4375; } .cm-s-neo .cm-comment {color:#75787b} .cm-s-neo .cm-keyword, .cm-s-neo .cm-property {color:#1d75b3} .cm-s-neo .cm-atom,.cm-s-neo .cm-number {color:#75438a} .cm-s-neo .cm-node,.cm-s-neo .cm-tag {color:#9c3328} .cm-s-neo .cm-string {color:#b35e14} .cm-s-neo .cm-variable,.cm-s-neo .cm-qualifier {color:#047d65} /* Editor styling */ .cm-s-neo pre { padding:0; } .cm-s-neo .CodeMirror-gutters { border:none; border-right:10px solid transparent; background-color:transparent; } .cm-s-neo .CodeMirror-linenumber { padding:0; color:#e0e2e5; } .cm-s-neo .CodeMirror-guttermarker { color: #1d75b3; } .cm-s-neo .CodeMirror-guttermarker-subtle { color: #e0e2e5; }
{ "pile_set_name": "Github" }
// Suggested tribes list .signup-tribe-suggestions { h4 { margin-top: 50px; margin-bottom: 10px; padding: 0; text-align: center; } .tribe-content { display: flex; justify-content: space-between; align-items: center; flex-direction: row; color: #fff; padding: 0 20px; min-height: 120px; margin: 0; background: linear-gradient( to right, rgba(0, 0, 0, 0.65) 0%, rgba(0, 0, 0, 0) 100% ); } .tribe-label { margin: 0; padding: 0; font-size: 30px; line-height: 1; } }
{ "pile_set_name": "Github" }
9
{ "pile_set_name": "Github" }
/* * Copyright (C) 2008 Apple Inc. All Rights Reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ WebInspector.DataGrid = function(columns, editCallback, deleteCallback) { this.element = document.createElement("div"); this.element.className = "data-grid"; this.element.tabIndex = 0; this.element.addEventListener("keydown", this._keyDown.bind(this), false); this._headerTable = document.createElement("table"); this._headerTable.className = "header"; this._headerTableHeaders = {}; this._dataTable = document.createElement("table"); this._dataTable.className = "data"; this._dataTable.addEventListener("mousedown", this._mouseDownInDataTable.bind(this), true); this._dataTable.addEventListener("click", this._clickInDataTable.bind(this), true); this._dataTable.addEventListener("contextmenu", this._contextMenuInDataTable.bind(this), true); // FIXME: Add a createCallback which is different from editCallback and has different // behavior when creating a new node. if (editCallback) { this._dataTable.addEventListener("dblclick", this._ondblclick.bind(this), false); this._editCallback = editCallback; } if (deleteCallback) this._deleteCallback = deleteCallback; this.aligned = {}; this._scrollContainer = document.createElement("div"); this._scrollContainer.className = "data-container"; this._scrollContainer.appendChild(this._dataTable); this.element.appendChild(this._headerTable); this.element.appendChild(this._scrollContainer); var headerRow = document.createElement("tr"); var columnGroup = document.createElement("colgroup"); this._columnCount = 0; for (var columnIdentifier in columns) { var column = columns[columnIdentifier]; if (column.disclosure) this.disclosureColumnIdentifier = columnIdentifier; var col = document.createElement("col"); if (column.width) col.style.width = column.width; column.element = col; columnGroup.appendChild(col); var cell = document.createElement("th"); cell.className = columnIdentifier + "-column"; cell.columnIdentifier = columnIdentifier; this._headerTableHeaders[columnIdentifier] = cell; var div = document.createElement("div"); if (column.titleDOMFragment) div.appendChild(column.titleDOMFragment); else div.textContent = column.title; cell.appendChild(div); if (column.sort) { cell.addStyleClass("sort-" + column.sort); this._sortColumnCell = cell; } if (column.sortable) { cell.addEventListener("click", this._clickInHeaderCell.bind(this), false); cell.addStyleClass("sortable"); } if (column.aligned) this.aligned[columnIdentifier] = column.aligned; headerRow.appendChild(cell); ++this._columnCount; } columnGroup.span = this._columnCount; var cell = document.createElement("th"); cell.className = "corner"; headerRow.appendChild(cell); this._headerTableColumnGroup = columnGroup; this._headerTable.appendChild(this._headerTableColumnGroup); this.headerTableBody.appendChild(headerRow); var fillerRow = document.createElement("tr"); fillerRow.className = "filler"; for (var columnIdentifier in columns) { var column = columns[columnIdentifier]; var cell = document.createElement("td"); cell.className = columnIdentifier + "-column"; fillerRow.appendChild(cell); } this._dataTableColumnGroup = columnGroup.cloneNode(true); this._dataTable.appendChild(this._dataTableColumnGroup); this.dataTableBody.appendChild(fillerRow); this.columns = columns || {}; this._columnsArray = []; for (var columnIdentifier in columns) { columns[columnIdentifier].ordinal = this._columnsArray.length; columns[columnIdentifier].identifier = columnIdentifier; this._columnsArray.push(columns[columnIdentifier]); } for (var i = 0; i < this._columnsArray.length; ++i) this._columnsArray[i].bodyElement = this._dataTableColumnGroup.children[i]; this.children = []; this.selectedNode = null; this.expandNodesWhenArrowing = false; this.root = true; this.hasChildren = false; this.expanded = true; this.revealed = true; this.selected = false; this.dataGrid = this; this.indentWidth = 15; this.resizers = []; this._columnWidthsInitialized = false; } WebInspector.DataGrid.prototype = { _ondblclick: function(event) { if (this._editing || this._editingNode) return; this._startEditing(event.target); }, _startEditingColumnOfDataGridNode: function(node, column) { this._editing = true; this._editingNode = node; this._editingNode.select(); var element = this._editingNode._element.children[column]; WebInspector.startEditing(element, { context: element.textContent, commitHandler: this._editingCommitted.bind(this), cancelHandler: this._editingCancelled.bind(this) }); window.getSelection().setBaseAndExtent(element, 0, element, 1); }, _startEditing: function(target) { var element = target.enclosingNodeOrSelfWithNodeName("td"); if (!element) return; this._editingNode = this.dataGridNodeFromNode(target); if (!this._editingNode) { if (!this.creationNode) return; this._editingNode = this.creationNode; } // Force editing the 1st column when editing the creation node if (this._editingNode.isCreationNode) return this._startEditingColumnOfDataGridNode(this._editingNode, 0); this._editing = true; WebInspector.startEditing(element, { context: element.textContent, commitHandler: this._editingCommitted.bind(this), cancelHandler: this._editingCancelled.bind(this) }); window.getSelection().setBaseAndExtent(element, 0, element, 1); }, _editingCommitted: function(element, newText, oldText, context, moveDirection) { // FIXME: We need more column identifiers here throughout this function. // Not needed yet since only editable DataGrid is DOM Storage, which is Key - Value. // FIXME: Better way to do this than regular expressions? var columnIdentifier = parseInt(element.className.match(/\b(\d+)-column\b/)[1]); var textBeforeEditing = this._editingNode.data[columnIdentifier]; var currentEditingNode = this._editingNode; function moveToNextIfNeeded(wasChange) { if (!moveDirection) return; if (moveDirection === "forward") { if (currentEditingNode.isCreationNode && columnIdentifier === 0 && !wasChange) return; if (columnIdentifier === 0) return this._startEditingColumnOfDataGridNode(currentEditingNode, 1); var nextDataGridNode = currentEditingNode.traverseNextNode(true, null, true); if (nextDataGridNode) return this._startEditingColumnOfDataGridNode(nextDataGridNode, 0); if (currentEditingNode.isCreationNode && wasChange) { addCreationNode(false); return this._startEditingColumnOfDataGridNode(this.creationNode, 0); } return; } if (moveDirection === "backward") { if (columnIdentifier === 1) return this._startEditingColumnOfDataGridNode(currentEditingNode, 0); var nextDataGridNode = currentEditingNode.traversePreviousNode(true, null, true); if (nextDataGridNode) return this._startEditingColumnOfDataGridNode(nextDataGridNode, 1); return; } } if (textBeforeEditing == newText) { this._editingCancelled(element); moveToNextIfNeeded.call(this, false); return; } // Update the text in the datagrid that we typed this._editingNode.data[columnIdentifier] = newText; // Make the callback - expects an editing node (table row), the column number that is being edited, // the text that used to be there, and the new text. this._editCallback(this._editingNode, columnIdentifier, textBeforeEditing, newText); if (this._editingNode.isCreationNode) this.addCreationNode(false); this._editingCancelled(element); moveToNextIfNeeded.call(this, true); }, _editingCancelled: function(element, context) { delete this._editing; this._editingNode = null; }, get sortColumnIdentifier() { if (!this._sortColumnCell) return null; return this._sortColumnCell.columnIdentifier; }, get sortOrder() { if (!this._sortColumnCell || this._sortColumnCell.hasStyleClass("sort-ascending")) return "ascending"; if (this._sortColumnCell.hasStyleClass("sort-descending")) return "descending"; return null; }, get headerTableBody() { if ("_headerTableBody" in this) return this._headerTableBody; this._headerTableBody = this._headerTable.getElementsByTagName("tbody")[0]; if (!this._headerTableBody) { this._headerTableBody = this.element.ownerDocument.createElement("tbody"); this._headerTable.insertBefore(this._headerTableBody, this._headerTable.tFoot); } return this._headerTableBody; }, get dataTableBody() { if ("_dataTableBody" in this) return this._dataTableBody; this._dataTableBody = this._dataTable.getElementsByTagName("tbody")[0]; if (!this._dataTableBody) { this._dataTableBody = this.element.ownerDocument.createElement("tbody"); this._dataTable.insertBefore(this._dataTableBody, this._dataTable.tFoot); } return this._dataTableBody; }, autoSizeColumns: function(minPercent, maxPercent, maxDescentLevel) { if (minPercent) minPercent = Math.min(minPercent, Math.floor(100 / this._columnCount)); var widths = {}; var columns = this.columns; for (var columnIdentifier in columns) widths[columnIdentifier] = (columns[columnIdentifier].title || "").length; var children = maxDescentLevel ? this._enumerateChildren(this, [], maxDescentLevel + 1) : this.children; for (var i = 0; i < children.length; ++i) { var node = children[i]; for (var columnIdentifier in columns) { var text = node.data[columnIdentifier] || ""; if (text.length > widths[columnIdentifier]) widths[columnIdentifier] = text.length; } } var totalColumnWidths = 0; for (var columnIdentifier in columns) totalColumnWidths += widths[columnIdentifier]; var recoupPercent = 0; for (var columnIdentifier in columns) { var width = Math.round(100 * widths[columnIdentifier] / totalColumnWidths); if (minPercent && width < minPercent) { recoupPercent += (minPercent - width); width = minPercent; } else if (maxPercent && width > maxPercent) { recoupPercent -= (width - maxPercent); width = maxPercent; } widths[columnIdentifier] = width; } while (minPercent && recoupPercent > 0) { for (var columnIdentifier in columns) { if (widths[columnIdentifier] > minPercent) { --widths[columnIdentifier]; --recoupPercent; if (!recoupPercent) break; } } } while (maxPercent && recoupPercent < 0) { for (var columnIdentifier in columns) { if (widths[columnIdentifier] < maxPercent) { ++widths[columnIdentifier]; ++recoupPercent; if (!recoupPercent) break; } } } for (var columnIdentifier in columns) columns[columnIdentifier].element.style.width = widths[columnIdentifier] + "%"; this._columnWidthsInitialized = false; this.updateWidths(); }, _enumerateChildren: function(rootNode, result, maxLevel) { if (!rootNode.root) result.push(rootNode); if (!maxLevel) return; for (var i = 0; i < rootNode.children.length; ++i) this._enumerateChildren(rootNode.children[i], result, maxLevel - 1); return result; }, // Updates the widths of the table, including the positions of the column // resizers. // // IMPORTANT: This function MUST be called once after the element of the // DataGrid is attached to its parent element and every subsequent time the // width of the parent element is changed in order to make it possible to // resize the columns. // // If this function is not called after the DataGrid is attached to its // parent element, then the DataGrid's columns will not be resizable. updateWidths: function() { var headerTableColumns = this._headerTableColumnGroup.children; var tableWidth = this._dataTable.offsetWidth; var numColumns = headerTableColumns.length; // Do not attempt to use offsetes if we're not attached to the document tree yet. if (!this._columnWidthsInitialized && this.element.offsetWidth) { // Give all the columns initial widths now so that during a resize, // when the two columns that get resized get a percent value for // their widths, all the other columns already have percent values // for their widths. for (var i = 0; i < numColumns; i++) { var columnWidth = this.headerTableBody.rows[0].cells[i].offsetWidth; var percentWidth = ((columnWidth / tableWidth) * 100) + "%"; this._headerTableColumnGroup.children[i].style.width = percentWidth; this._dataTableColumnGroup.children[i].style.width = percentWidth; } this._columnWidthsInitialized = true; } this._positionResizers(); this.dispatchEventToListeners("width changed"); }, columnWidthsMap: function() { var result = {}; for (var i = 0; i < this._columnsArray.length; ++i) { var width = this._headerTableColumnGroup.children[i].style.width; result[this._columnsArray[i].columnIdentifier] = parseFloat(width); } return result; }, applyColumnWidthsMap: function(columnWidthsMap) { for (var columnIdentifier in this.columns) { var column = this.columns[columnIdentifier]; var width = (columnWidthsMap[columnIdentifier] || 0) + "%"; this._headerTableColumnGroup.children[column.ordinal].style.width = width; this._dataTableColumnGroup.children[column.ordinal].style.width = width; } // Normalize widths delete this._columnWidthsInitialized; this.updateWidths(); }, isColumnVisible: function(columnIdentifier) { var column = this.columns[columnIdentifier]; var columnElement = column.element; return !columnElement.hidden; }, showColumn: function(columnIdentifier) { var column = this.columns[columnIdentifier]; var columnElement = column.element; if (!columnElement.hidden) return; columnElement.hidden = false; columnElement.removeStyleClass("hidden"); var columnBodyElement = column.bodyElement; columnBodyElement.hidden = false; columnBodyElement.removeStyleClass("hidden"); }, hideColumn: function(columnIdentifier) { var column = this.columns[columnIdentifier]; var columnElement = column.element; if (columnElement.hidden) return; var oldWidth = parseFloat(columnElement.style.width); columnElement.hidden = true; columnElement.addStyleClass("hidden"); columnElement.style.width = 0; var columnBodyElement = column.bodyElement; columnBodyElement.hidden = true; columnBodyElement.addStyleClass("hidden"); columnBodyElement.style.width = 0; this._columnWidthsInitialized = false; }, get scrollContainer() { return this._scrollContainer; }, isScrolledToLastRow: function() { return this._scrollContainer.isScrolledToBottom(); }, scrollToLastRow: function() { this._scrollContainer.scrollTop = this._scrollContainer.scrollHeight - this._scrollContainer.offsetHeight; }, _positionResizers: function() { var headerTableColumns = this._headerTableColumnGroup.children; var numColumns = headerTableColumns.length; var left = 0; var previousResizer = null; // Make n - 1 resizers for n columns. for (var i = 0; i < numColumns - 1; i++) { var resizer = this.resizers[i]; if (!resizer) { // This is the first call to updateWidth, so the resizers need // to be created. resizer = document.createElement("div"); resizer.addStyleClass("data-grid-resizer"); // This resizer is associated with the column to its right. resizer.addEventListener("mousedown", this._startResizerDragging.bind(this), false); this.element.appendChild(resizer); this.resizers[i] = resizer; } // Get the width of the cell in the first (and only) row of the // header table in order to determine the width of the column, since // it is not possible to query a column for its width. left += this.headerTableBody.rows[0].cells[i].offsetWidth; var columnIsVisible = !this._headerTableColumnGroup.children[i].hidden; if (columnIsVisible) { resizer.style.removeProperty("display"); resizer.style.left = left + "px"; resizer.leftNeighboringColumnID = i; if (previousResizer) previousResizer.rightNeighboringColumnID = i; previousResizer = resizer; } else { resizer.style.setProperty("display", "none"); resizer.leftNeighboringColumnID = 0; resizer.rightNeighboringColumnID = 0; } } if (previousResizer) previousResizer.rightNeighboringColumnID = numColumns - 1; }, addCreationNode: function(hasChildren) { if (this.creationNode) this.creationNode.makeNormal(); var emptyData = {}; for (var column in this.columns) emptyData[column] = ''; this.creationNode = new WebInspector.CreationDataGridNode(emptyData, hasChildren); this.appendChild(this.creationNode); }, appendChild: function(child) { this.insertChild(child, this.children.length); }, insertChild: function(child, index) { if (!child) throw("insertChild: Node can't be undefined or null."); if (child.parent === this) throw("insertChild: Node is already a child of this node."); if (child.parent) child.parent.removeChild(child); this.children.splice(index, 0, child); this.hasChildren = true; child.parent = this; child.dataGrid = this.dataGrid; child._recalculateSiblings(index); delete child._depth; delete child._revealed; delete child._attached; child._shouldRefreshChildren = true; var current = child.children[0]; while (current) { current.dataGrid = this.dataGrid; delete current._depth; delete current._revealed; delete current._attached; current._shouldRefreshChildren = true; current = current.traverseNextNode(false, child, true); } if (this.expanded) child._attach(); }, removeChild: function(child) { if (!child) throw("removeChild: Node can't be undefined or null."); if (child.parent !== this) throw("removeChild: Node is not a child of this node."); child.deselect(); child._detach(); this.children.remove(child, true); if (child.previousSibling) child.previousSibling.nextSibling = child.nextSibling; if (child.nextSibling) child.nextSibling.previousSibling = child.previousSibling; child.dataGrid = null; child.parent = null; child.nextSibling = null; child.previousSibling = null; if (this.children.length <= 0) this.hasChildren = false; }, removeChildren: function() { for (var i = 0; i < this.children.length; ++i) { var child = this.children[i]; child.deselect(); child._detach(); child.dataGrid = null; child.parent = null; child.nextSibling = null; child.previousSibling = null; } this.children = []; this.hasChildren = false; }, removeChildrenRecursive: function() { var childrenToRemove = this.children; var child = this.children[0]; while (child) { if (child.children.length) childrenToRemove = childrenToRemove.concat(child.children); child = child.traverseNextNode(false, this, true); } for (var i = 0; i < childrenToRemove.length; ++i) { var child = childrenToRemove[i]; child.deselect(); child._detach(); child.children = []; child.dataGrid = null; child.parent = null; child.nextSibling = null; child.previousSibling = null; } this.children = []; }, sortNodes: function(comparator, reverseMode) { function comparatorWrapper(a, b) { if (a._dataGridNode._data.summaryRow) return 1; if (b._dataGridNode._data.summaryRow) return -1; var aDataGirdNode = a._dataGridNode; var bDataGirdNode = b._dataGridNode; return reverseMode ? comparator(bDataGirdNode, aDataGirdNode) : comparator(aDataGirdNode, bDataGirdNode); } var tbody = this.dataTableBody; var tbodyParent = tbody.parentElement; tbodyParent.removeChild(tbody); var childNodes = tbody.childNodes; var fillerRow = childNodes[childNodes.length - 1]; var sortedRows = Array.prototype.slice.call(childNodes, 0, childNodes.length - 1); sortedRows.sort(comparatorWrapper); var sortedRowsLength = sortedRows.length; tbody.removeChildren(); var previousSiblingNode = null; for (var i = 0; i < sortedRowsLength; ++i) { var row = sortedRows[i]; var node = row._dataGridNode; node.previousSibling = previousSiblingNode; if (previousSiblingNode) previousSiblingNode.nextSibling = node; tbody.appendChild(row); previousSiblingNode = node; } if (previousSiblingNode) previousSiblingNode.nextSibling = null; tbody.appendChild(fillerRow); tbodyParent.appendChild(tbody); }, _keyDown: function(event) { if (!this.selectedNode || event.shiftKey || event.metaKey || event.ctrlKey || this._editing) return; var handled = false; var nextSelectedNode; if (event.keyIdentifier === "Up" && !event.altKey) { nextSelectedNode = this.selectedNode.traversePreviousNode(true); while (nextSelectedNode && !nextSelectedNode.selectable) nextSelectedNode = nextSelectedNode.traversePreviousNode(!this.expandTreeNodesWhenArrowing); handled = nextSelectedNode ? true : false; } else if (event.keyIdentifier === "Down" && !event.altKey) { nextSelectedNode = this.selectedNode.traverseNextNode(true); while (nextSelectedNode && !nextSelectedNode.selectable) nextSelectedNode = nextSelectedNode.traverseNextNode(!this.expandTreeNodesWhenArrowing); handled = nextSelectedNode ? true : false; } else if (event.keyIdentifier === "Left") { if (this.selectedNode.expanded) { if (event.altKey) this.selectedNode.collapseRecursively(); else this.selectedNode.collapse(); handled = true; } else if (this.selectedNode.parent && !this.selectedNode.parent.root) { handled = true; if (this.selectedNode.parent.selectable) { nextSelectedNode = this.selectedNode.parent; handled = nextSelectedNode ? true : false; } else if (this.selectedNode.parent) this.selectedNode.parent.collapse(); } } else if (event.keyIdentifier === "Right") { if (!this.selectedNode.revealed) { this.selectedNode.reveal(); handled = true; } else if (this.selectedNode.hasChildren) { handled = true; if (this.selectedNode.expanded) { nextSelectedNode = this.selectedNode.children[0]; handled = nextSelectedNode ? true : false; } else { if (event.altKey) this.selectedNode.expandRecursively(); else this.selectedNode.expand(); } } } else if (event.keyCode === 8 || event.keyCode === 46) { if (this._deleteCallback) { handled = true; this._deleteCallback(this.selectedNode); } } else if (isEnterKey(event)) { if (this._editCallback) { handled = true; // The first child of the selected element is the <td class="0-column">, // and that's what we want to edit. this._startEditing(this.selectedNode._element.children[0]); } } if (nextSelectedNode) { nextSelectedNode.reveal(); nextSelectedNode.select(); } if (handled) { event.preventDefault(); event.stopPropagation(); } }, expand: function() { // This is the root, do nothing. }, collapse: function() { // This is the root, do nothing. }, reveal: function() { // This is the root, do nothing. }, dataGridNodeFromNode: function(target) { var rowElement = target.enclosingNodeOrSelfWithNodeName("tr"); return rowElement && rowElement._dataGridNode; }, dataGridNodeFromPoint: function(x, y) { var node = this._dataTable.ownerDocument.elementFromPoint(x, y); var rowElement = node.enclosingNodeOrSelfWithNodeName("tr"); return rowElement && rowElement._dataGridNode; }, _clickInHeaderCell: function(event) { var cell = event.target.enclosingNodeOrSelfWithNodeName("th"); if (!cell || !cell.columnIdentifier || !cell.hasStyleClass("sortable")) return; var sortOrder = this.sortOrder; if (this._sortColumnCell) this._sortColumnCell.removeMatchingStyleClasses("sort-\\w+"); if (cell == this._sortColumnCell) { if (sortOrder === "ascending") sortOrder = "descending"; else sortOrder = "ascending"; } this._sortColumnCell = cell; cell.addStyleClass("sort-" + sortOrder); this.dispatchEventToListeners("sorting changed"); }, markColumnAsSortedBy: function(columnIdentifier, sortOrder) { if (this._sortColumnCell) this._sortColumnCell.removeMatchingStyleClasses("sort-\\w+"); this._sortColumnCell = this._headerTableHeaders[columnIdentifier]; this._sortColumnCell.addStyleClass("sort-" + sortOrder); }, headerTableHeader: function(columnIdentifier) { return this._headerTableHeaders[columnIdentifier]; }, _mouseDownInDataTable: function(event) { var gridNode = this.dataGridNodeFromNode(event.target); if (!gridNode || !gridNode.selectable) return; if (gridNode.isEventWithinDisclosureTriangle(event)) return; if (event.metaKey) { if (gridNode.selected) gridNode.deselect(); else gridNode.select(); } else gridNode.select(); }, _contextMenuInDataTable: function(event) { var gridNode = this.dataGridNodeFromNode(event.target); if (!gridNode || !gridNode.selectable) return; if (gridNode.isEventWithinDisclosureTriangle(event)) return; var contextMenu = new WebInspector.ContextMenu(); // FIXME: Use the column names for Editing, instead of just "Edit". if (this.dataGrid._editCallback) { if (gridNode === this.creationNode) contextMenu.appendItem(WebInspector.UIString("Add New"), this._startEditing.bind(this, event.target)); else contextMenu.appendItem(WebInspector.UIString("Edit"), this._startEditing.bind(this, event.target)); } if (this.dataGrid._deleteCallback && gridNode !== this.creationNode) contextMenu.appendItem(WebInspector.UIString("Delete"), this._deleteCallback.bind(this, gridNode)); contextMenu.show(event); }, _clickInDataTable: function(event) { var gridNode = this.dataGridNodeFromNode(event.target); if (!gridNode || !gridNode.hasChildren) return; if (!gridNode.isEventWithinDisclosureTriangle(event)) return; if (gridNode.expanded) { if (event.altKey) gridNode.collapseRecursively(); else gridNode.collapse(); } else { if (event.altKey) gridNode.expandRecursively(); else gridNode.expand(); } }, _startResizerDragging: function(event) { this.currentResizer = event.target; if (!this.currentResizer.rightNeighboringColumnID) return; WebInspector.elementDragStart(this.lastResizer, this._resizerDragging.bind(this), this._endResizerDragging.bind(this), event, "col-resize"); }, _resizerDragging: function(event) { var resizer = this.currentResizer; if (!resizer) return; // Constrain the dragpoint to be within the containing div of the // datagrid. var dragPoint = event.clientX - this.element.totalOffsetLeft; // Constrain the dragpoint to be within the space made up by the // column directly to the left and the column directly to the right. var leftEdgeOfPreviousColumn = 0; var firstRowCells = this.headerTableBody.rows[0].cells; for (var i = 0; i < resizer.leftNeighboringColumnID; i++) leftEdgeOfPreviousColumn += firstRowCells[i].offsetWidth; var rightEdgeOfNextColumn = leftEdgeOfPreviousColumn + firstRowCells[resizer.leftNeighboringColumnID].offsetWidth + firstRowCells[resizer.rightNeighboringColumnID].offsetWidth; // Give each column some padding so that they don't disappear. var leftMinimum = leftEdgeOfPreviousColumn + this.ColumnResizePadding; var rightMaximum = rightEdgeOfNextColumn - this.ColumnResizePadding; dragPoint = Number.constrain(dragPoint, leftMinimum, rightMaximum); resizer.style.left = (dragPoint - this.CenterResizerOverBorderAdjustment) + "px"; var percentLeftColumn = (((dragPoint - leftEdgeOfPreviousColumn) / this._dataTable.offsetWidth) * 100) + "%"; this._headerTableColumnGroup.children[resizer.leftNeighboringColumnID].style.width = percentLeftColumn; this._dataTableColumnGroup.children[resizer.leftNeighboringColumnID].style.width = percentLeftColumn; var percentRightColumn = (((rightEdgeOfNextColumn - dragPoint) / this._dataTable.offsetWidth) * 100) + "%"; this._headerTableColumnGroup.children[resizer.rightNeighboringColumnID].style.width = percentRightColumn; this._dataTableColumnGroup.children[resizer.rightNeighboringColumnID].style.width = percentRightColumn; this._positionResizers(); event.preventDefault(); this.dispatchEventToListeners("width changed"); }, _endResizerDragging: function(event) { WebInspector.elementDragEnd(event); this.currentResizer = null; this.dispatchEventToListeners("width changed"); }, ColumnResizePadding: 10, CenterResizerOverBorderAdjustment: 3, } WebInspector.DataGrid.prototype.__proto__ = WebInspector.Object.prototype; WebInspector.DataGridNode = function(data, hasChildren) { this._expanded = false; this._selected = false; this._shouldRefreshChildren = true; this._data = data || {}; this.hasChildren = hasChildren || false; this.children = []; this.dataGrid = null; this.parent = null; this.previousSibling = null; this.nextSibling = null; this.disclosureToggleWidth = 10; } WebInspector.DataGridNode.prototype = { selectable: true, get element() { if (this._element) return this._element; if (!this.dataGrid) return null; this._element = document.createElement("tr"); this._element._dataGridNode = this; if (this.hasChildren) this._element.addStyleClass("parent"); if (this.expanded) this._element.addStyleClass("expanded"); if (this.selected) this._element.addStyleClass("selected"); if (this.revealed) this._element.addStyleClass("revealed"); this.createCells(); return this._element; }, createCells: function() { for (var columnIdentifier in this.dataGrid.columns) { var cell = this.createCell(columnIdentifier); this._element.appendChild(cell); } }, get data() { return this._data; }, set data(x) { this._data = x || {}; this.refresh(); }, get revealed() { if ("_revealed" in this) return this._revealed; var currentAncestor = this.parent; while (currentAncestor && !currentAncestor.root) { if (!currentAncestor.expanded) { this._revealed = false; return false; } currentAncestor = currentAncestor.parent; } this._revealed = true; return true; }, set hasChildren(x) { if (this._hasChildren === x) return; this._hasChildren = x; if (!this._element) return; if (this._hasChildren) { this._element.addStyleClass("parent"); if (this.expanded) this._element.addStyleClass("expanded"); } else { this._element.removeStyleClass("parent"); this._element.removeStyleClass("expanded"); } }, get hasChildren() { return this._hasChildren; }, set revealed(x) { if (this._revealed === x) return; this._revealed = x; if (this._element) { if (this._revealed) this._element.addStyleClass("revealed"); else this._element.removeStyleClass("revealed"); } for (var i = 0; i < this.children.length; ++i) this.children[i].revealed = x && this.expanded; }, get depth() { if ("_depth" in this) return this._depth; if (this.parent && !this.parent.root) this._depth = this.parent.depth + 1; else this._depth = 0; return this._depth; }, get shouldRefreshChildren() { return this._shouldRefreshChildren; }, set shouldRefreshChildren(x) { this._shouldRefreshChildren = x; if (x && this.expanded) this.expand(); }, get selected() { return this._selected; }, set selected(x) { if (x) this.select(); else this.deselect(); }, get expanded() { return this._expanded; }, set expanded(x) { if (x) this.expand(); else this.collapse(); }, refresh: function() { if (!this._element || !this.dataGrid) return; this._element.removeChildren(); this.createCells(); }, createCell: function(columnIdentifier) { var cell = document.createElement("td"); cell.className = columnIdentifier + "-column"; var alignment = this.dataGrid.aligned[columnIdentifier]; if (alignment) cell.addStyleClass(alignment); var div = document.createElement("div"); div.textContent = this.data[columnIdentifier]; cell.appendChild(div); if (columnIdentifier === this.dataGrid.disclosureColumnIdentifier) { cell.addStyleClass("disclosure"); if (this.depth) cell.style.setProperty("padding-left", (this.depth * this.dataGrid.indentWidth) + "px"); } return cell; }, // Share these functions with DataGrid. They are written to work with a DataGridNode this object. appendChild: WebInspector.DataGrid.prototype.appendChild, insertChild: WebInspector.DataGrid.prototype.insertChild, removeChild: WebInspector.DataGrid.prototype.removeChild, removeChildren: WebInspector.DataGrid.prototype.removeChildren, removeChildrenRecursive: WebInspector.DataGrid.prototype.removeChildrenRecursive, _recalculateSiblings: function(myIndex) { if (!this.parent) return; var previousChild = (myIndex > 0 ? this.parent.children[myIndex - 1] : null); if (previousChild) { previousChild.nextSibling = this; this.previousSibling = previousChild; } else this.previousSibling = null; var nextChild = this.parent.children[myIndex + 1]; if (nextChild) { nextChild.previousSibling = this; this.nextSibling = nextChild; } else this.nextSibling = null; }, collapse: function() { if (this._element) this._element.removeStyleClass("expanded"); this._expanded = false; for (var i = 0; i < this.children.length; ++i) this.children[i].revealed = false; this.dispatchEventToListeners("collapsed"); }, collapseRecursively: function() { var item = this; while (item) { if (item.expanded) item.collapse(); item = item.traverseNextNode(false, this, true); } }, expand: function() { if (!this.hasChildren || this.expanded) return; if (this.revealed && !this._shouldRefreshChildren) for (var i = 0; i < this.children.length; ++i) this.children[i].revealed = true; if (this._shouldRefreshChildren) { for (var i = 0; i < this.children.length; ++i) this.children[i]._detach(); this.dispatchEventToListeners("populate"); if (this._attached) { for (var i = 0; i < this.children.length; ++i) { var child = this.children[i]; if (this.revealed) child.revealed = true; child._attach(); } } delete this._shouldRefreshChildren; } if (this._element) this._element.addStyleClass("expanded"); this._expanded = true; this.dispatchEventToListeners("expanded"); }, expandRecursively: function() { var item = this; while (item) { item.expand(); item = item.traverseNextNode(false, this); } }, reveal: function() { var currentAncestor = this.parent; while (currentAncestor && !currentAncestor.root) { if (!currentAncestor.expanded) currentAncestor.expand(); currentAncestor = currentAncestor.parent; } this.element.scrollIntoViewIfNeeded(false); this.dispatchEventToListeners("revealed"); }, select: function(supressSelectedEvent) { if (!this.dataGrid || !this.selectable || this.selected) return; if (this.dataGrid.selectedNode) this.dataGrid.selectedNode.deselect(); this._selected = true; this.dataGrid.selectedNode = this; if (this._element) this._element.addStyleClass("selected"); if (!supressSelectedEvent) this.dispatchEventToListeners("selected"); }, deselect: function(supressDeselectedEvent) { if (!this.dataGrid || this.dataGrid.selectedNode !== this || !this.selected) return; this._selected = false; this.dataGrid.selectedNode = null; if (this._element) this._element.removeStyleClass("selected"); if (!supressDeselectedEvent) this.dispatchEventToListeners("deselected"); }, traverseNextNode: function(skipHidden, stayWithin, dontPopulate, info) { if (!dontPopulate && this.hasChildren) this.dispatchEventToListeners("populate"); if (info) info.depthChange = 0; var node = (!skipHidden || this.revealed) ? this.children[0] : null; if (node && (!skipHidden || this.expanded)) { if (info) info.depthChange = 1; return node; } if (this === stayWithin) return null; node = (!skipHidden || this.revealed) ? this.nextSibling : null; if (node) return node; node = this; while (node && !node.root && !((!skipHidden || node.revealed) ? node.nextSibling : null) && node.parent !== stayWithin) { if (info) info.depthChange -= 1; node = node.parent; } if (!node) return null; return (!skipHidden || node.revealed) ? node.nextSibling : null; }, traversePreviousNode: function(skipHidden, dontPopulate) { var node = (!skipHidden || this.revealed) ? this.previousSibling : null; if (!dontPopulate && node && node.hasChildren) node.dispatchEventToListeners("populate"); while (node && ((!skipHidden || (node.revealed && node.expanded)) ? node.children[node.children.length - 1] : null)) { if (!dontPopulate && node.hasChildren) node.dispatchEventToListeners("populate"); node = ((!skipHidden || (node.revealed && node.expanded)) ? node.children[node.children.length - 1] : null); } if (node) return node; if (!this.parent || this.parent.root) return null; return this.parent; }, isEventWithinDisclosureTriangle: function(event) { if (!this.hasChildren) return false; var cell = event.target.enclosingNodeOrSelfWithNodeName("td"); if (!cell.hasStyleClass("disclosure")) return false; var computedLeftPadding = window.getComputedStyle(cell).getPropertyCSSValue("padding-left").getFloatValue(CSSPrimitiveValue.CSS_PX); var left = cell.totalOffsetLeft + computedLeftPadding; return event.pageX >= left && event.pageX <= left + this.disclosureToggleWidth; }, _attach: function() { if (!this.dataGrid || this._attached) return; this._attached = true; var nextNode = null; var previousNode = this.traversePreviousNode(true, true); if (previousNode && previousNode.element.parentNode && previousNode.element.nextSibling) var nextNode = previousNode.element.nextSibling; if (!nextNode) nextNode = this.dataGrid.dataTableBody.lastChild; this.dataGrid.dataTableBody.insertBefore(this.element, nextNode); if (this.expanded) for (var i = 0; i < this.children.length; ++i) this.children[i]._attach(); }, _detach: function() { if (!this._attached) return; this._attached = false; if (this._element && this._element.parentNode) this._element.parentNode.removeChild(this._element); for (var i = 0; i < this.children.length; ++i) this.children[i]._detach(); }, savePosition: function() { if (this._savedPosition) return; if (!this.parent) throw("savePosition: Node must have a parent."); this._savedPosition = { parent: this.parent, index: this.parent.children.indexOf(this) }; }, restorePosition: function() { if (!this._savedPosition) return; if (this.parent !== this._savedPosition.parent) this._savedPosition.parent.insertChild(this, this._savedPosition.index); delete this._savedPosition; } } WebInspector.DataGridNode.prototype.__proto__ = WebInspector.Object.prototype; WebInspector.CreationDataGridNode = function(data, hasChildren) { WebInspector.DataGridNode.call(this, data, hasChildren); this.isCreationNode = true; } WebInspector.CreationDataGridNode.prototype = { makeNormal: function() { delete this.isCreationNode; delete this.makeNormal; } } WebInspector.CreationDataGridNode.prototype.__proto__ = WebInspector.DataGridNode.prototype;
{ "pile_set_name": "Github" }
NULL NULL NULL NULL NULL NULL NULL
{ "pile_set_name": "Github" }
#!/bin/bash $PYTHON setup.py install --old-and-unmanageable jupyter-nbextension install widgetsnbextension --py --sys-prefix --overwrite
{ "pile_set_name": "Github" }
{ "exercise": "minesweeper", "version": "1.1.0", "comments": [ " The expected outputs are represented as arrays of strings to ", " improve readability in this JSON file. ", " Your track may choose whether to present the input as a single ", " string (concatenating all the lines) or as the list. " ], "cases": [ { "description": "annotate 9", "property": "annotate", "input": { "minefield": [ " ", " * ", " ", " ", " * " ] }, "expected": [ " 111", " 1*1", " 111", "111 ", "1*1 " ] }, { "description": "different len", "property": "annotate", "input": { "minefield": [ " ", "* ", " " ] }, "expected": {"error": "The minefield must have a consistent length"} }, { "description": "invalid char", "property": "annotate", "input": { "minefield": ["X * "] }, "expected": {"error": "The minefield contains an invalid character"} } ] }
{ "pile_set_name": "Github" }
/** * Copyright (C) 2007 - 2016, Jens Lehmann * * This file is part of DL-Learner. * * DL-Learner is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3 of the License, or * (at your option) any later version. * * DL-Learner is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.dllearner.algorithms.pattern; import org.apache.log4j.Logger; import org.dllearner.core.StringRenderer; import org.dllearner.core.StringRenderer.Rendering; import org.dllearner.kb.repository.LocalDirectoryOntologyRepository; import org.dllearner.kb.repository.OntologyRepository; import org.dllearner.kb.repository.bioportal.BioPortalRepository; import org.dllearner.kb.repository.tones.TONESRepository; import org.junit.Test; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; public class OWLPatternDetectionTest { // @Before public void setUp() throws Exception { StringRenderer.setRenderer(Rendering.DL_SYNTAX); } // @Test public void testTONESRepository(){ OntologyRepository repository = new TONESRepository(); repository.initialize(); OWLAxiomPatternFinder patternFinder = new OWLAxiomPatternFinder(repository); patternFinder.start(); } // @Test public void testBioPortalRepository(){ OntologyRepository repository = new BioPortalRepository(); repository.initialize(); OWLAxiomPatternFinder patternFinder = new OWLAxiomPatternFinder(repository); patternFinder.start(); } // @Test public void testLocalDir(){ OntologyRepository repository = new LocalDirectoryOntologyRepository(new File("/media/me/Work-Ext/datasets/owlxml_mowlcorp/files")); repository.initialize(); OWLAxiomPatternFinder patternFinder = new OWLAxiomPatternFinder(repository); patternFinder.start(); } }
{ "pile_set_name": "Github" }
;;; widget-d.el - widget class definitions ;;; ;;; Copyright (C) 1995,1996, 1999 Eric M. Ludlam ;;; ;;; Author: <zappo@gnu.ai.mit.edu> ;;; RCS: $Id: widget-d.el,v 1.14 1999/02/03 19:31:08 zappo Exp $ ;;; Keywords: OO widget ;;; ;;; This program is free software; you can redistribute it and/or modify ;;; it under the terms of the GNU General Public License as published by ;;; the Free Software Foundation; either version 2, or (at your option) ;;; any later version. ;;; ;;; This program is distributed in the hope that it will be useful, ;;; but WITHOUT ANY WARRANTY; without even the implied warranty of ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the ;;; GNU General Public License for more details. ;;; ;;; You should have received a copy of the GNU General Public License ;;; along with this program; if not, you can either send email to this ;;; program's author (see below) or write to: ;;; ;;; The Free Software Foundation, Inc. ;;; 675 Mass Ave. ;;; Cambridge, MA 02139, USA. ;;; ;;; Please send bug reports, etc. to zappo@gnu.ai.mit.edu. ;;; ;;; ;;; Commentary: ;;; ;;; This file defines all the classes needed to create and maintain ;;; widgets in an emacs controlled environment using the eieio ;;; package. Only definitions exist in this file. ;;; (require 'eieio) (defvar widget-d-load-hooks nil "List of hooks run after this file is loaded. Permits users to customize the default widget behavior using `oset-default'") ;;; Data object definition ;;; ;;; A data object, as discussed in the Fresco documentation, is just a ;;; blob where we store stuff. Widgets store values in these objects, ;;; and follow their interface, so when the data is updated, other ;;; functions (gadgets, widgets, etc) can update themselves to the ;;; changes in the environment. (defclass data-object () ((value :initarg :value :initform nil :accessor get-value :documentation "Lisp object which represents the data this object maintains." :protection private) (reference :initarg :reference :initform nil :documentation "List of objects looking at me. The method `update-symbol' is called for each member of `reference' whenever `value' is modified." :protection private) ) "This defines a `data-object' which is used for all widgets maintaining some value. For example, a `widget-label' displays a string or number, and a `widget-scrollbar' edits a number. These widgets will use data-object to store their data.") ;;; Widget definitions for the base set of widgets ;;; (defclass widget-core () ((parent :initarg :parent :initform nil :accessor get-parent :documentation "A widget of type `widget-group' of which this is a child.") (watched-symbols :initarg :watched-symbols :initform nil :documentation "List of symbols this widget cares about." :protection private) (help-hook :initarg :help-hook :initform nil :documentation "Function to call when help is requested about this button. Default value is to display instructions about the operation of this widget in the minibuffer. This takes two paramters which are the widget for which help was requested and the reason, which us either 'click for a mouse event, or the keypress initiating the call.") ) "Class for core widget. This is the widget all other widgets are based from.") (defclass widget-gadget-translator (widget-core) ((watch :initarg :watch :documentation "A `data-object' to watch. When it changes, run the translator function.") (change :initarg :change :documentation "A `data-object' to change whenever `watch' is modified.") (translate-function :initarg :translate-function :initform (lambda-default (watch change) nil) :documentation "Function to call when `watch' changes. It should modify the `data-object' `change' from it's value. It takes two parameters WATCH and CHANGE.") ) "Non-visible class for a gadget translator. The translator's job is to convert the `data-object' in `watch' to some new value, and store it in `change'. This is useful for translating indices into descriptions or something like that.") (defclass widget-visual (widget-core) ((x :initarg :x :initform nil :documentation "The X position in a buffer relative to parent.") (y :initarg :y :initform nil :documentation "The Y position in a buffer relative to parent.") ; (resizeable :initarg :resizeable ; :initform nil ; :documentation "(unused) t if this widget has a tendency to resize itself.") (nx :initform 0 :documentation "The normalized X position relative to parent. (After geometry management)") (ny :initform 0 :documentation "The normalized Y position relative to parent. (After geometry management)") ; (marker :initarg :marker ; :initform nil ; :protection private ; :documentation "(Unused) Marker in the dialog buffer from which all drawing commands are based.") (face :initarg :face :initform widget-default-face :protection private :documentation "Face used to draw this widget.") (handle-io :initarg :handle-io :initform nil :documentation "t if this widget accepts keyboard or mouse input.") (handle-motion :initform nil :documentation "t if this widget handles it's own motion events, or 'traditional if it uses traditional motion events. Traditional events are not passed to a widget unless the motion moves the cursor outside of the widget's boundary.") (rx :documentation "Real X position in buffer" :protection private) (ry :documentation "Real Y position in buffer" :protection private) ) "Class for visual widget. This is the widget all visible widgets are derived from. Its definition includes an X,Y position which defines it's offset inside the parent, and can include its offset from other widgets which are children of `parent'. @xref{(dialog) Geometry Management}") (defclass widget-square (widget-visual) ((width :initarg :width :documentation "Width in characters") (height :initarg :height :documentation "Height in characters") (boxed :initarg :boxed :initform nil :documentation "t if a box is to be drawn around this widget") (box-face :initarg :box-face :initform widget-box-face :documentation "Face used on the box (if drawn)" :protection private) (box-char :initarg :box-char :initform [?+ ?+ ?+ ?+ ?- ?- ?| ?|] :documentation "Character set used the draw the box. The vector is [ upper-right upper-left bottom-right bottom-left horizontal vertical ]" :protection private) (box-sides :initarg :box-sides :initform [ t t t t ] :documentation "Vector which represents those sides of the box which will be drawn, where a t in a position indicates the side is to be drawn. The vector is of the form [ left right top bottom ]") ) "This is the definition for visible widgets which have a square shape. This provides basic sizing schemes and box drawing utilities for widgets that are square.") ;; ;; Some group types ;; (defclass widget-group (widget-square) ((child-list :initarg :child-list :initform nil :accessor get-children :documentation "List of children this group needs to manage") ) "Definition for the group widget. This is an intermediary type whose job is to provide basic child management for higher level widgets which contain children such as `widget-toplevel' and `widget-frame'. This widget knows how to add new children, and manage its size based on the positions and sizes of it's children. It also knows how to create navigation lists.") (defclass widget-toplevel (widget-group) ((rx :initarg :rx) ;create initargs for real parts (ry :initarg :ry) ;for toplevel only (buffer :initarg :buffer :initform current-buffer :documentation "The buffer this dialog resides in.") (logical-child-list :initform nil :documentation "Contains a list of all the children and grand-children in their logical order for the purpose of tab-stepping across them" :protection private) (handle-io :initform t) ) "Definition for a top-level shell. This maintains the interface to emacs' buffer, and is a parent of all children displayed in the buffer. This will be created automatically with a call to `dialog-mode' when designing a screen.") (defclass widget-frame (widget-group) ((handle-io :initform t) (boxed :initform t) (frame-label :initarg :frame-label :initform t :documentation "Label to place on the edge of our frame. An initial value of t means to use the object name. An initial value of nil means no title. If this is initialized with a string, then that string is used as the label's string. The created widget will be a `widget-label'. If this is a widget, then that widget is positioned on the edge of the screen." :protection private) (position :initarg :position :initform top-left :documentation "Where the `frame-label' will reside. Valid values are symbols consisting of substrings of left, right, center, top, and bottom." :protection private) ) "Definition for a frame, which can contain several children grouped in a box with a `widget-label' on one edge (covering a part of the box).") (defclass widget-radio-frame (widget-frame) ((state :initarg :state :initform 0 :documentation "Current index of the selected radio button") ) "Special frame class which behaves as a radio box. Designed to only contain widgets of type `widget-radio-button'.") (defclass widget-labeled-text (widget-group) ((handle-io :initform t) (label :initarg :label :initform nil :documentation "Text object displayed with a `widget-label' before a `widget-text-field'.") (unit :initarg :unit :initform nil :documentation "Text object displayed with a `widget-label' after the `widget-text-field' which represents some sort of typing which would be useful to know.") (value :initarg :value :initform nil :documentation "The `data-object' we are going to edit with the text widget") (text-length :initarg :text-length :initform 20 :documentation "The width passed to the `widget-text-field'") ) "Special group widget which makes creating text fields with labels next to them more convenient.") (defclass widget-option-text (widget-group) ((handle-io :initform t) (value :initarg :value :initform nil :documentation "Text object displayed with a `widget-option-button' just to the right and a label to the left. This contains the value used by the option button and the text widget so they can communicate.") (label :initarg :label :initform nil :documentation "Text object displayed with a `widget-label' before a `widget-text-field'.") (text-length :initarg :text-length :initform 20 :documentation "The width passed to the `widget-text-field'") (option-list :initarg :option-list :initform nil :documentation "List of strings which are the options to appear in the option list.") ) "Specialized text widget which will have an optional `widget-label' followed by a `widget-text-field' which will be followed by a `widget-option-button'. The menu button will appear as a down-arrow. Items selected from the menu will then appear in the text field.") (defclass widget-scrolled-text (widget-group) ((handle-io :initform t) (boxed :initform t) (value :initarg :value :initform nil :documentation "The `data-object' we are going to edit with the text widget") (state :initarg :state :initform 0 :documentation "Current value of the built-in scrollbar") (maximum :initarg :maximum :initform 1 :documentation "Largest allowed value for the built-in scrollbar") (scrollbar :initform nil :documentation "Holder for scrollbar so we don't make too many of them" :protection private) ) "Specialized composite widget which will build a `widget-text-block' of the same dimensions given for our width/height. A scrollbar will be created just off the edge of our box and it's `maximum' and `minimum' will be controlled by the text widget (as it's text gets larger/smaller), and the scrollbar's value will alter the text widget's positioning of text.") ;; ;; The important label type ;; (defclass widget-label (widget-square) ((label-value :initarg :label-value :initform nil :documentation "The `data-object' to display on ourselves") (label-list :initarg nil :initform nil :documentation "The `label-value' is transformed into this list, which is broken into substrings around carriage returns." :protection private) (justification :initarg :justification :initform center :documentation "how to justify the text. Valid values are 'left, 'center, 'right") (focus-face :initarg :focus-face :initform nil :documentation "Face used when mouse passes over `label-value'" :protection private) (leftmargin :initarg :leftmargin :initform 0 :documentation "Size of left space to format around") (rightmargin :initarg :rightmargin :initform 0 :documentation "Size of right space to format around") (topmargin :initarg :topmargin :initform 0 :documentation "Size of space above this label to format around") (bottommargin :initarg :bottommargin :initform 0 :documentation "Size of space below this label to format around.") ) "Class for displaying labels. The value of the label is determined by the `data-object' stored in `label-value' which can be initialized with a string, number, or other lisp object. Supports strings with carriage returns in them.") ;; ;; Button types ;; (defclass widget-button (widget-label) ((arm-face :initarg :arm-face :initform widget-arm-face :documentation "Face used when this button has been pushed." :protection private) (focus-face :initform widget-focus-face :protection private) (activate-hook :initarg :activate-hook :initform nil :documentation "Function to call when a user clicks this button. It must take two paramters. The object representing the object being clicked, and the reason it was clicked. This usually has the value 'click, or the keyboard event that caused a press.") (handle-io :initarg :handle-io :initform t) ) "Class for a button widget. This is the starting point for all interactive buttons. This button will be CLICKED on, or will have RET or SPC pressed while selected, and it will then call `activate-hook'. If a push button is desired, it is better to use a widget of type `widget-push-button' instead as it has a better visual effects.") (defclass widget-push-button (widget-button) ((boxed :initform t) (box-char :initform [? ? ? ? ? ? ?< ?> ] :protection private) (box-sides :initform [ t t nil nil ]) (box-face :initform widget-indicator-face :protection private) ;; Add a little bit of margin (leftmargin :initform 1) (rightmargin :initform 1) ) "Class for a push button. This button behaves as a `widget-button' but with a different visual effect. This is the preferred widget to use as the `widget-button' is used as a starting point for all button types.") (defclass widget-arrow-button (widget-button) ((activate-hook :initform (lambda-default (obj reason) "Arrow button Activate-Hook" (let ((state (oref obj state))) (set-value state (+ (get-value state) (oref obj adjustment)))))) (state :initarg :state :initarg nil :documentation "The value which will be adjusted when we are activated") (direction :initarg :direction :initarg 'up :documentation "Direction this arrow button points. Valid values are 'up, 'down, 'left, and 'right. The values 'up and 'left will default `adjustment' to -1, and 'right and 'down will set it to 1. This field will also automatically set the `label-value' slot if it is not specified.") (adjustment :initarg :adjustment :initarg nil :documentation "How much to adjust `state' by when activated. If it is not specified at creation time, it's value will be generated from the value of the `direction' slot.") ) "An arrow button is a specialized button used to increment or decrement a state variable. Arrow buttons are usually used to adjust `widget-scale' values.") (defclass widget-option-button (widget-button) ((option-indicator :initarg :option-indicator :initform "<=>" :documentation "String printed to the left of the label in `left-margin' used to show this is an option button.") (option-list :initarg :option-list :initform nil :documentation "List of strings which are the options to appear in the pull down menu.") (option-obarray :initform nil :protection private :documentation "Obarray used for command line reading of symbols") (title :initarg :title :initform "Option" :documentation "String that appears as the completion-prompt, or as the title to a popup menu. When used in a prompt, the form is \"Select\" title \": \".") (ind-face :initarg :ind-face :initform widget-indicator-face :documentation "Face used on the `option-indicator' string" :protection private) (justification :initarg :justification :initform left) (dynamic-label :initarg :dynamic-label :initform t :documentation "t means that the label of this button will always show the selected element from option-list. nil will leave the label string alone." :protection private) (boxed :initform nil) (state :initarg :state :initform 0 :documentation "`data-object' used as a numerical index into list of strings representing the current value.") ) "Class for option button widget. This button will provide a menu when clicked on. The menu will consist of those items in `option-list', and the chosen item will appear in the button's text.") (defclass widget-toggle-button (widget-button) ((boxed :initform nil) (state :initarg :state :initform nil :documentation "Current value of the toggle button") (ind-face :initarg :ind-face :initform widget-indicator-face :documentation "Face used on toggle indicator" :protection private) (showvec :initarg :showvec :initform [ "[ ]" "[X]" ] :documentation "Vector [ false true ] of strings used to show the state") ) "Class for toggle button widget. This button will be CLICKED, and when successful clicks occur, a boolean value will be turned ON or OFF, and a visible piece will be modified based on `showvec'.") (defclass widget-radio-button (widget-toggle-button) ((radio-index :initarg :radioindex :initform 0 :documentation "Index indexing the parent's state, which then lets us know if we are toggled on or off. ie, if the parent's state is 1, and our index is 0, then the state of this button will become nil. This value does not change during use.") (parent-state :initform nil :documentation "Data object pointing the parent's state" :protection private) (showvec :initform [ "< >" "<O>" ]) ;change type of indicator ) "Subclass of `widget-toggle-button' which knows how to talk with several other instantiations of itself in order to radio between different values.") ;; ;; Scrollbar types ;; (defclass widget-scale (widget-group) ((handle-io :initform t) (focus-face :initarg :focus-face :initform widget-focus-face :documentation "Face used on thumb and step buttons when the mouse is over them." :protection private) (state :initarg :state :initform 0 :documentation "Current value of this scale") (minimum :initarg :minimum :initform 0 :documentation "Smallest allowed value") (maximum :initarg :maximum :initform 10 :documentation "Largest allowed value") (direction :initarg :direction :initform 'horizontal :documentation "Direction to draw the scale") (end-buttons :initarg :end-buttons :initform nil :documentation "t means to create two buttons to inc/dec the scale value") (trough-face :initarg :trough-face :initform nil :documentation "Face used when rendering the trough of a scale widget") (trough-chars :initarg :trough-chars :initform [ ?- ?| ] :documentation "Characters used when drawing the trough, the vector is of the form [ horizontal-char vertical-char ]") (thumb :initarg :thumb :initform "#" :documentation "Character used to draw the value thumb button indicator") ) "Class of scale. A scale is merely a thumb marker displaying the current value of some number graphically across some random number of text characters.") (defclass widget-scrollbar (widget-scale) ((end-buttons :initarg :end-buttons :initform t) (range :initarg :range :initform 10 :documentation "Range of currently viewable area (Not used)")) "Class for a scrollbar. A scrollbar also will have a visual range where the thumbtack changes size based on RANGE.") ;; ;; Text types ;; (defclass widget-text-field (widget-square) ((handle-io :initarg :handle-io :initform t) (handle-motion :initform 'traditional) (height :initform 1) (face :initarg :face :initform widget-text-face :protection private) (spface :initarg :spface :initform widget-text-button-face :documentation "Face used on text buttons which appear to the left and right of the editable text. They indicate unseen text to the left or right of the field." :protection private) (focus-face :initarg :focus-face :initform widget-text-focus-face :protection private) (keymap :initarg :keymap :initform nil :documentation "Keymap used to interpret text. By default, the global map is used when this value is nil. Otherwise, additional mode-specific keymaps could be substituted to lend additional behaviors.") (display-column :initarg :display-column :initform 0 :documentation "Current horizontal position in a text buffer where the display starts") (display-row :initarg :display-row :initform 0 :documentation "Current vertical position in a text buffer where the display starts") (display-num-rows :initarg :display-num-rows :initform nil :documentation "The number of rows of text displayed in this text widget. This is different from the number of rows displayed as some are clipped.") ; This isn't used, but may be useful in the future. ; (keycache :initform nil ; :documentation "Collecting keypresses for multi keystroke keys.") (value :initarg :value :initform nil :documentation "A `data-object' representing the string we are editing.") ) "Class for a text field widget. This will accept user-typed text which is no more than 1 line high. Extra text will not be printed, but characters on either side of the field will display `<' or `>' to indicate that there is more to see outside of the visible part.") (defclass widget-text-box (widget-text-field) ((height :initform nil) (boxed :initform t) (face :initform nil :protection private) (focus-face :initform nil :protection private)) "A text box is a multi-line `widget-text-field'. It specifies differing features needed to make a multi-line text box look better.") (run-hooks 'widget-d-load-hooks) ;;; end of lisp (provide 'widget-d)
{ "pile_set_name": "Github" }
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by client-gen. DO NOT EDIT. package v1beta1 import ( "time" v1beta1 "k8s.io/api/extensions/v1beta1" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" scheme "k8s.io/client-go/kubernetes/scheme" rest "k8s.io/client-go/rest" ) // DeploymentsGetter has a method to return a DeploymentInterface. // A group's client should implement this interface. type DeploymentsGetter interface { Deployments(namespace string) DeploymentInterface } // DeploymentInterface has methods to work with Deployment resources. type DeploymentInterface interface { Create(*v1beta1.Deployment) (*v1beta1.Deployment, error) Update(*v1beta1.Deployment) (*v1beta1.Deployment, error) UpdateStatus(*v1beta1.Deployment) (*v1beta1.Deployment, error) Delete(name string, options *v1.DeleteOptions) error DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error Get(name string, options v1.GetOptions) (*v1beta1.Deployment, error) List(opts v1.ListOptions) (*v1beta1.DeploymentList, error) Watch(opts v1.ListOptions) (watch.Interface, error) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1beta1.Deployment, err error) GetScale(deploymentName string, options v1.GetOptions) (*v1beta1.Scale, error) UpdateScale(deploymentName string, scale *v1beta1.Scale) (*v1beta1.Scale, error) DeploymentExpansion } // deployments implements DeploymentInterface type deployments struct { client rest.Interface ns string } // newDeployments returns a Deployments func newDeployments(c *ExtensionsV1beta1Client, namespace string) *deployments { return &deployments{ client: c.RESTClient(), ns: namespace, } } // Get takes name of the deployment, and returns the corresponding deployment object, and an error if there is any. func (c *deployments) Get(name string, options v1.GetOptions) (result *v1beta1.Deployment, err error) { result = &v1beta1.Deployment{} err = c.client.Get(). Namespace(c.ns). Resource("deployments"). Name(name). VersionedParams(&options, scheme.ParameterCodec). Do(). Into(result) return } // List takes label and field selectors, and returns the list of Deployments that match those selectors. func (c *deployments) List(opts v1.ListOptions) (result *v1beta1.DeploymentList, err error) { var timeout time.Duration if opts.TimeoutSeconds != nil { timeout = time.Duration(*opts.TimeoutSeconds) * time.Second } result = &v1beta1.DeploymentList{} err = c.client.Get(). Namespace(c.ns). Resource("deployments"). VersionedParams(&opts, scheme.ParameterCodec). Timeout(timeout). Do(). Into(result) return } // Watch returns a watch.Interface that watches the requested deployments. func (c *deployments) Watch(opts v1.ListOptions) (watch.Interface, error) { var timeout time.Duration if opts.TimeoutSeconds != nil { timeout = time.Duration(*opts.TimeoutSeconds) * time.Second } opts.Watch = true return c.client.Get(). Namespace(c.ns). Resource("deployments"). VersionedParams(&opts, scheme.ParameterCodec). Timeout(timeout). Watch() } // Create takes the representation of a deployment and creates it. Returns the server's representation of the deployment, and an error, if there is any. func (c *deployments) Create(deployment *v1beta1.Deployment) (result *v1beta1.Deployment, err error) { result = &v1beta1.Deployment{} err = c.client.Post(). Namespace(c.ns). Resource("deployments"). Body(deployment). Do(). Into(result) return } // Update takes the representation of a deployment and updates it. Returns the server's representation of the deployment, and an error, if there is any. func (c *deployments) Update(deployment *v1beta1.Deployment) (result *v1beta1.Deployment, err error) { result = &v1beta1.Deployment{} err = c.client.Put(). Namespace(c.ns). Resource("deployments"). Name(deployment.Name). Body(deployment). Do(). Into(result) return } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). func (c *deployments) UpdateStatus(deployment *v1beta1.Deployment) (result *v1beta1.Deployment, err error) { result = &v1beta1.Deployment{} err = c.client.Put(). Namespace(c.ns). Resource("deployments"). Name(deployment.Name). SubResource("status"). Body(deployment). Do(). Into(result) return } // Delete takes name of the deployment and deletes it. Returns an error if one occurs. func (c *deployments) Delete(name string, options *v1.DeleteOptions) error { return c.client.Delete(). Namespace(c.ns). Resource("deployments"). Name(name). Body(options). Do(). Error() } // DeleteCollection deletes a collection of objects. func (c *deployments) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error { var timeout time.Duration if listOptions.TimeoutSeconds != nil { timeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second } return c.client.Delete(). Namespace(c.ns). Resource("deployments"). VersionedParams(&listOptions, scheme.ParameterCodec). Timeout(timeout). Body(options). Do(). Error() } // Patch applies the patch and returns the patched deployment. func (c *deployments) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1beta1.Deployment, err error) { result = &v1beta1.Deployment{} err = c.client.Patch(pt). Namespace(c.ns). Resource("deployments"). SubResource(subresources...). Name(name). Body(data). Do(). Into(result) return } // GetScale takes name of the deployment, and returns the corresponding v1beta1.Scale object, and an error if there is any. func (c *deployments) GetScale(deploymentName string, options v1.GetOptions) (result *v1beta1.Scale, err error) { result = &v1beta1.Scale{} err = c.client.Get(). Namespace(c.ns). Resource("deployments"). Name(deploymentName). SubResource("scale"). VersionedParams(&options, scheme.ParameterCodec). Do(). Into(result) return } // UpdateScale takes the top resource name and the representation of a scale and updates it. Returns the server's representation of the scale, and an error, if there is any. func (c *deployments) UpdateScale(deploymentName string, scale *v1beta1.Scale) (result *v1beta1.Scale, err error) { result = &v1beta1.Scale{} err = c.client.Put(). Namespace(c.ns). Resource("deployments"). Name(deploymentName). SubResource("scale"). Body(scale). Do(). Into(result) return }
{ "pile_set_name": "Github" }
/* * COPYRIGHT (c) 1989-2012. * On-Line Applications Research Corporation (OAR). * * The license and distribution terms for this file may be * found in the file LICENSE in this distribution or at * http://www.rtems.org/license/LICENSE. */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #include <tmacros.h> #include <termios.h> #include <rtems/termiostypes.h> #include <rtems/dumpbuf.h> /* forward declarations to avoid warnings */ void ppp_test_driver_set_rx(const char *expected, size_t len); void pppasyncattach(void); int pppopen(struct rtems_termios_tty *tty); int pppclose(struct rtems_termios_tty *tty); int pppread(struct rtems_termios_tty *tty, rtems_libio_rw_args_t *rw_args); int pppwrite(struct rtems_termios_tty *tty, rtems_libio_rw_args_t *rw_args); int pppioctl(struct rtems_termios_tty *tty, rtems_libio_ioctl_args_t *args); int pppinput(int c, struct rtems_termios_tty *tty); int pppstart(struct rtems_termios_tty *tp); /* * Define the PPP line discipline. */ static struct rtems_termios_linesw pppdisc = { pppopen, pppclose, pppread, pppwrite, pppinput, pppstart, pppioctl, NULL }; const char *RXExpected; size_t RXLength; void ppp_test_driver_set_rx( const char *expected, size_t len ) { RXExpected = expected; RXLength = len; } void pppasyncattach(void) { rtems_termios_linesw[PPPDISC] = pppdisc; } int pppopen(struct rtems_termios_tty *tty) { puts( "pppopen called" ); return 0; } int pppclose(struct rtems_termios_tty *tty) { puts( "pppclose called" ); return 0; } int pppread(struct rtems_termios_tty *tty, rtems_libio_rw_args_t *rw_args) { puts( "pppread called" ); rtems_termios_enqueue_raw_characters( tty, (char *)RXExpected, RXLength ); RXExpected = NULL; RXLength = 0; return 0; } int pppwrite(struct rtems_termios_tty *tty, rtems_libio_rw_args_t *rw_args) { int maximum = rw_args->count; char *out_buffer = rw_args->buffer; printf( "pppwrite called - %d bytes\n", maximum ); rtems_print_buffer( (unsigned char *) out_buffer, maximum ); rw_args->bytes_moved = maximum; rtems_termios_dequeue_characters( tty, 1 ); return 0; } int pppioctl(struct rtems_termios_tty *tty, rtems_libio_ioctl_args_t *args) { puts( "pppioctl called" ); return 0; } int pppinput(int c, struct rtems_termios_tty *tty) { printf( "pppinput called - with (%c)\n", c ); return 0; } int pppstart(struct rtems_termios_tty *tp) { puts( "pppstart called" ); return 0; }
{ "pile_set_name": "Github" }
 ##权限表 INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (1, '用户管理', null, '0', '1',''); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (2, '新增修改', '/user-post', '1', '2','upost'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (3, '列表查询', '/user-get', '1', '2','uget'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (4, '删除用户', '/user/{id}-delete', '1', '2','udelete'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (5, '角色管理', null, '0', '1',''); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (6, '新增修改', '/role-post', '5', '2','rpost'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (7, '列表查询', '/role-get', '5', '2','rget'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (8, '删除角色', '/role/{id}-delete', '5', '2','rdelete'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (9, '角色授权', '/role-authorization-post', '5', '2','rauth'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (10, '权限管理', null, '0', '1',''); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (11, '新增修改', '/permission-post', '10', '2','ppost'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (12, '列表查询', '/permission-get', '10', '2','pget'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (13, '删除权限', '/permission/{id}-delete', '10', '2','pdelete'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (14, '预览权限', '/permission-view-get', '10', '2','pview'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (15, '菜单', null, '0', '1',''); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (16, '标签取数','/tag-index','15','2','tindex'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (17,'模板取数','/tag-fetch-data','15','2','tftemplet'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (18, '客户群','/tag-cluster','15','2','tcluster'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (19, '常规标签配置','/tag-simple','15','2','tsconfig'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (20, '标签工厂','/tag-factory','15','2','tfconfig'); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (21, 'tag服务权限控制',null,'0','1',''); INSERT INTO ida_permission(permission_id, permission_name, permission_url, parent_permission_id, permission_lv, permission_auth) VALUES (22, '通过id获取用户信息','/api-tag/tag/getUserById','21','2','tag:get'); ##角色表 INSERT INTO ida_role(role_id, role_name) VALUES (1, '用户管理员'); INSERT INTO ida_role(role_id, role_name) VALUES (2, '角色管理员'); INSERT INTO ida_role(role_id, role_name) VALUES (3, '权限管理员'); INSERT INTO ida_role(role_id, role_name) VALUES (4, '超级管理员'); ##用户表 INSERT INTO ida_user(user_id, user_name, user_role_names, user_account, user_password) VALUES (1, '许耀辉', '超级管理员','admin', '123456'); INSERT INTO ida_user(user_id, user_name, user_role_names, user_account, user_password) VALUES (2, '用户管理员', '用户管理员', 'user', '123456'); ##用户角色表 INSERT INTO ida_user_role(user_id, role_id) VALUES (1,4); INSERT INTO ida_user_role(user_id, role_id) VALUES (2,1); ##角色权限表 INSERT INTO ida_role_permission(role_id, permission_id) VALUES (4,2); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (4,3); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (4,4); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (4,6); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (4,7); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (4,8); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (4,9); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (4,11); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (4,12); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (4,13); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (4,14); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (1,1); INSERT INTO ida_role_permission(role_id, permission_id) VALUES (1,2);
{ "pile_set_name": "Github" }
module Rack module OAuth2 class Server # Base class for all OAuth errors. These map to error codes in the spec. class OAuthError < StandardError def initialize(code, message) super message @code = code.to_sym end # The OAuth error code. attr_reader :code end # The end-user or authorization server denied the request. class AccessDeniedError < OAuthError def initialize super :access_denied, "You are now allowed to access this resource." end end # Access token expired, client expected to request new one using refresh # token. class ExpiredTokenError < OAuthError def initialize super :expired_token, "The access token has expired." end end # The client identifier provided is invalid, the client failed to # authenticate, the client did not include its credentials, provided # multiple client credentials, or used unsupported credentials type. class InvalidClientError < OAuthError def initialize super :invalid_client, "Client ID and client secret do not match." end end # The provided access grant is invalid, expired, or revoked (e.g. invalid # assertion, expired authorization token, bad end-user password credentials, # or mismatching authorization code and redirection URI). class InvalidGrantError < OAuthError def initialize(message = nil) super :invalid_grant, message || "This access grant is no longer valid." end end # Invalid_request, the request is missing a required parameter, includes an # unsupported parameter or parameter value, repeats the same parameter, uses # more than one method for including an access token, or is otherwise # malformed. class InvalidRequestError < OAuthError def initialize(message) super :invalid_request, message || "The request has the wrong parameters." end end # The requested scope is invalid, unknown, or malformed. class InvalidScopeError < OAuthError def initialize super :invalid_scope, "The requested scope is not supported." end end # Access token expired, client cannot refresh and needs new authorization. class InvalidTokenError < OAuthError def initialize super :invalid_token, "The access token is no longer valid." end end # The redirection URI provided does not match a pre-registered value. class RedirectUriMismatchError < OAuthError def initialize super :redirect_uri_mismatch, "Must use the same redirect URI you registered with us." end end # The authenticated client is not authorized to use the access grant type provided. class UnauthorizedClientError < OAuthError def initialize super :unauthorized_client, "You are not allowed to access this resource." end end # This access grant type is not supported by this server. class UnsupportedGrantType < OAuthError def initialize super :unsupported_grant_type, "This access grant type is not supported by this server." end end # The requested response type is not supported by the authorization server. class UnsupportedResponseTypeError < OAuthError def initialize super :unsupported_response_type, "The requested response type is not supported." end end end end end
{ "pile_set_name": "Github" }
## Samples The SDK comes with various samples that demonstrate how various SDK features can be used. A list of the included samples is below. For all of the samples listed, a target is generated when the build process described in the README.md file is followed. Specific targets are mentioned along with the sample description. The "Basic Subscribe Publish" and "Shadow Delta" sample can also be used with any of the provided reference wrappers by specifying the corresponding argument in the CMake call: * OpenSSL (Default if no argument is provided)- `cmake <path_to_sdk>` OR `cmake <path_to_sdk> -DNETWORK_LIBRARY=OpenSSL` * MbedTLS - `cmake <path_to_sdk> -DNETWORK_LIBRARY=MbedTLS` * WebSocket - `cmake <path_to_sdk> -DNETWORK_LIBRARY=WebSocket` Place the IoT certs for your thing in the `BASE_SDK_DIRECTORY/certs/` folder and modify the SampleConfig.json in the `BASE_SDK_DIRECTORY/common/` with your thing-specific values. ### Basic Subscribe Publish This is a basic sample that demonstrates how MQTT Subscribe and Publish operations can be performed using the default MQTT Client. * Code for this sample is located [here](./PubSub) * Target for this sample is `pub-sub-sample` ### Shadow Delta This sample demonstrates how various Shadow operations can be performed. * Code for this sample is located [here](./ShadowDelta) * Target for this sample is `shadow-delta-sample` Note: The shadow client token is set as the thing name by default in the sample. The shadow client token is limited to 64 bytes and will return an error if a token longer than 64 bytes is used (`"code":400,"message":"invalid client token"`, although receiving a 400 does not necessarily mean that it is due to the length of the client token). Modify the code [here](../ShadowDelta/ShadowDelta.cpp#L184) if your thing name is longer than 64 bytes to prevent this error. ### Jobs Sample This sample demonstrates how various Jobs API operations can be performed including subscribing to Jobs notifications and publishing Job execution updates. * Code for this sample is located [here](./Jobs) * Target for this sample is `jobs-sample` ### Jobs Agent Sample This sample is a full featured example of a Jobs Agent that uses Jobs API operations to perform a variety of management tasks such as installing additional files/programs, rebooting a device, and collecting device status information. It can be run on a device as-is or it can be modified to suit specific use cases. Example job documents are provided below. For more information see the AWS IoT connected device management documentation [here](https://aws.amazon.com/iot-device-management/). * Code for this sample is located [here](./JobsAgent) * Target for this sample is `jobs-agent` #### Using the jobs-agent ##### systemStatus operation The jobs-agent will respond to the AWS IoT jobs management platform with system status information when it receives a job execution notification with a job document that looks like this: ``` { "operation": "systemStatus" } ``` ##### reboot operation When the jobs-agent receives a reboot job document it will attempt to reboot the device it is running on while sending updates on its progress to the AWS IoT jobs management platform. After the reboot the job execution status will be marked as IN_PROGRESS until the jobs-agent is also restarted at which point the status will be updated to SUCCESS. To avoid manual steps during reboot it is suggested that device be configured to automatically start the jobs-agent at device startup time. Job document format: ``` { "operation": "reboot" } ``` ##### shutdown operation When the jobs-agent receives a shutdown job document it will attempt to shutdown the device. ``` { "operation": "shutdown" } ``` ##### install operation When the jobs-agent receives an install job document it will attempt to install the files specified in the job document. An install job document should follow this general format. ``` { "operation": "install", "packageName": "uniquePackageName", "workingDirectory": ".", "launchCommand": "program-name program-arguments", "autoStart": "true", "files": [ { "fileName": "program-name", "fileVersion": "1.0.2.10", "fileSource": { "url": "https://some-bucket.s3.amazonaws.com/program-name" }, "checksum": { "inline": { "value": "9569257356cfc5c7b2b849e5f58b5d287f183e08627743498d9bd52801a2fbe4" }, "hashAlgorithm": "SHA256" } }, { "fileName": "config.json", "fileSource": { "url": "https://some-bucket.s3.amazonaws.com/config.json" } } ] } ``` * `packageName`: Each install operation must have a unique package name. If the packageName matches a previous install operation then the new install operation overwrites the previous one. * `workingDirectory`: Optional property for working directory * `launchCommand`: Optional property for launching an application/package. If omitted copy files only. * `autoStart`: If set to true then agent will execute launch command when agent starts up. * `files`: Specifies files to be installed * `fileName`: Name of file as written to file system * `fileSource.url`: Location of file to be downloaded from * `checksum`: Optional file checksum (currently ignored) * `inline.value`: Checksum value * `hashAlgorithm`: Checksum hash algorithm used ##### start operation When the jobs-agent receives a start job document it will attempt to startup the specified package. ``` { "operation": "start", "packageName": "somePackageName" } ``` ##### stop operation When the jobs-agent receives a stop job document it will attempt to stop the specified package. ``` { "operation": "stop", "packageName": "somePackageName" } ``` ##### restart operation When the jobs-agent receives a restart job document it will attempt to restart the specified package. ``` { "operation": "restart", "packageName": "somePackageName" } ``` ### Discovery Sample This sample demonstrates how the discovery operation can be performed to get the connectivity information to connect to a Greengrass Core (GGC). The configuration for this example is slightly different as the Discovery operation is a HTTP call, and uses port 8443, instead of port 8883 which is used for MQTT operations. The endpoint is the same IoT host endpoint used to connect the IoT thing to the cloud. More information about the Discovery process is located [here](../GreengrassDiscovery.md) The sample uses the DiscoveryResponse parser class to convert the Discovery Response JSON into a vector of connectivity information. It then iterates through the vector and tries to connect to one of the GGCs using the port, endpoint and root CA for that group. On successfully connecting to a core, it proceeds to subscribe and publish messages to the GGC. To ensure that the messages get sent back to the device, the GGC has to have this device authorized and the specific route for this device programmed into the GGC during deployment. * Code for this sample is located [here](./Discovery) * Target for this sample is `discovery-sample` ### StorySwitch This sample demonstrates how the SDK can be used to create a device which uses Greengrass. This sample should be used in conjunction with the StoryRobotArm sample and the Greengrass storyline. In this sample the switch is a device in a simple storyline. The storyline involves a switch (this device) controlling a robot arm through a Greengrass Core (GGC). The switch device will use the Discovery process to find the connectivity information of the GGC associated with its group and use the information to connect to the GGC. Once connected to the GGC the switch will accept input from the user: "1" (on), "0" (off) or "q" (quit). Pressing "1" will publish a desired state of "on" to the robot arm shadows, thereby telling the robot arm to turn on. Pressing "0" will publish a desired state of "off" to the robot arm shadows, thereby telling the robot arm to turn off. Pressing "q" will quit the program. Please see the documentation for Greengrass service in order to experience the full storyline. More information about the Discovery process is located [here](../GreengrassDiscovery.md) * Code for this sample is located [here](./StorySwitch) * Target for this sample is `switch-sample` ### StoryRobotArm This sample demonstrates how the SDK can be used to create a device which uses Greengrass. This sample should be used in conjunction with the StorySwitch sample and the Greengrass storyline. In this sample the robot arm is a device in a simple storyline. The storyline involves a switch controlling a robot arm (this device) through a Greengrass Core (GGC). The robot arm device will use the Discovery process to find the connectivity information of the GGC associated with its group and use the information to connect to the GGC. Once connected the robot arm will wait for changes on its shadows to know wether to turn "on" or "off". If the shadows reports a delta, indicating that there is a difference between reported and desired states, the robot arm will check the delta and make the appropriate action to turn "on" or "off". Please see the documentation for Greengrass service in order the experience the full storyline. More information about the Discovery process is located [here](../GreengrassDiscovery.md) * Code for this sample is located [here](./StoryRobotArm) * Target for this sample is `robot-arm-sample` For further information about the provided MQTT and Shadow Classes, please refer to the [Development Guide](../DevGuide.md)
{ "pile_set_name": "Github" }
/*========================================================================= * * Copyright NumFOCUS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0.txt * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *=========================================================================*/ #ifndef itkRigid3DTransform_hxx #define itkRigid3DTransform_hxx #include "itkRigid3DTransform.h" namespace itk { // Constructor with default arguments template <typename TParametersValueType> Rigid3DTransform<TParametersValueType>::Rigid3DTransform() : Superclass(ParametersDimension) {} // Constructor with default arguments template <typename TParametersValueType> Rigid3DTransform<TParametersValueType>::Rigid3DTransform(unsigned int paramDim) : Superclass(paramDim) {} // Constructor with default arguments template <typename TParametersValueType> Rigid3DTransform<TParametersValueType>::Rigid3DTransform(const MatrixType & matrix, const OutputVectorType & offset) : Superclass(matrix, offset) {} // Print self template <typename TParametersValueType> void Rigid3DTransform<TParametersValueType>::PrintSelf(std::ostream & os, Indent indent) const { Superclass::PrintSelf(os, indent); } // Check if input matrix is orthogonal to within tolerance template <typename TParametersValueType> bool Rigid3DTransform<TParametersValueType>::MatrixIsOrthogonal(const MatrixType & matrix, const TParametersValueType tolerance) { typename MatrixType::InternalMatrixType test = matrix.GetVnlMatrix() * matrix.GetTranspose(); if (!test.is_identity(tolerance)) { return false; } return true; } // Directly set the rotation matrix template <typename TParametersValueType> void Rigid3DTransform<TParametersValueType>::SetMatrix(const MatrixType & matrix) { const TParametersValueType tolerance = MatrixOrthogonalityTolerance<TParametersValueType>::GetTolerance(); this->SetMatrix(matrix, tolerance); } template <typename TParametersValueType> void Rigid3DTransform<TParametersValueType>::SetMatrix(const MatrixType & matrix, const TParametersValueType tolerance) { if (!this->MatrixIsOrthogonal(matrix, tolerance)) { itkExceptionMacro(<< "Attempting to set a non-orthogonal rotation matrix"); } this->Superclass::SetMatrix(matrix); } // Set optimizable parameters from array template <typename TParametersValueType> void Rigid3DTransform<TParametersValueType>::SetParameters(const ParametersType & parameters) { // Save parameters. Needed for proper operation of TransformUpdateParameters. if (&parameters != &(this->m_Parameters)) { this->m_Parameters = parameters; } unsigned int par = 0; MatrixType matrix; OutputVectorType translation; for (unsigned int row = 0; row < 3; row++) { for (unsigned int col = 0; col < 3; col++) { matrix[row][col] = this->m_Parameters[par]; ++par; } } for (unsigned int dim = 0; dim < 3; dim++) { translation[dim] = this->m_Parameters[par]; ++par; } const TParametersValueType tolerance = MatrixOrthogonalityTolerance<TParametersValueType>::GetTolerance(); if (!this->MatrixIsOrthogonal(matrix, tolerance)) { itkExceptionMacro(<< "Attempting to set a non-orthogonal rotation matrix"); } this->SetVarMatrix(matrix); this->SetVarTranslation(translation); // Update matrix and offset. // Technically ComputeMatrix() is not require as the parameters are // directly the elements of the matrix. this->ComputeMatrix(); this->ComputeOffset(); this->Modified(); } // Compose with a translation template <typename TParametersValueType> void Rigid3DTransform<TParametersValueType>::Translate(const OffsetType & offset, bool) { OutputVectorType newOffset = this->GetOffset(); newOffset += offset; this->SetOffset(newOffset); this->ComputeTranslation(); } } // namespace itk #endif
{ "pile_set_name": "Github" }
package com.forgeessentials.multiworld.gen; import java.util.ArrayList; import java.util.List; import java.util.Random; import net.minecraft.init.Biomes; import net.minecraft.util.WeightedRandom; import net.minecraft.world.biome.Biome; import net.minecraft.world.gen.layer.GenLayer; import net.minecraft.world.gen.layer.IntCache; import net.minecraftforge.common.BiomeManager; import net.minecraftforge.common.BiomeManager.BiomeEntry; import com.forgeessentials.multiworld.WorldServerMultiworld; import com.google.common.collect.ImmutableList; /** * * @author Olee */ public class GenLayerMultiworldBiome extends GenLayer { protected Random random = new Random(); @SuppressWarnings("unchecked") protected List<BiomeEntry>[] biomes = new ArrayList[BiomeManager.BiomeType.values().length]; public GenLayerMultiworldBiome(long seed, GenLayer parent, WorldServerMultiworld currentMultiworld) { super(seed); this.parent = parent; for (BiomeManager.BiomeType type : BiomeManager.BiomeType.values()) { ImmutableList<BiomeEntry> biomesToAdd = net.minecraftforge.common.BiomeManager.getBiomes(type); int idx = type.ordinal(); if (biomes[idx] == null) biomes[idx] = new java.util.ArrayList<BiomeManager.BiomeEntry>(); if (biomesToAdd != null) biomes[idx].addAll(biomesToAdd); } biomes[BiomeManager.BiomeType.DESERT.ordinal()].add(new BiomeEntry(Biomes.PLAINS, 10)); biomes[BiomeManager.BiomeType.WARM.ordinal()].add(new BiomeEntry(Biomes.PLAINS, 10)); biomes[BiomeManager.BiomeType.COOL.ordinal()].add(new BiomeEntry(Biomes.PLAINS, 10)); biomes[BiomeManager.BiomeType.ICY.ordinal()].add(new BiomeEntry(Biomes.PLAINS, 10)); } /** * Returns a list of integer values generated by this layer. These may be interpreted as temperatures, rainfall * amounts, or biomeList[] indices based on the particular GenLayer subclass. */ @Override public int[] getInts(int p_75904_1_, int p_75904_2_, int p_75904_3_, int p_75904_4_) { int[] aint = this.parent.getInts(p_75904_1_, p_75904_2_, p_75904_3_, p_75904_4_); int[] aint1 = IntCache.getIntCache(p_75904_3_ * p_75904_4_); for (int i1 = 0; i1 < p_75904_4_; ++i1) { for (int j1 = 0; j1 < p_75904_3_; ++j1) { this.initChunkSeed(j1 + p_75904_1_, i1 + p_75904_2_); int k1 = aint[j1 + i1 * p_75904_3_]; int l1 = (k1 & 3840) >> 8; k1 &= -3841; if (isBiomeOceanic(k1)) { aint1[j1 + i1 * p_75904_3_] = k1; } else if (k1 == Biome.REGISTRY.getIDForObject(Biomes.MUSHROOM_ISLAND)) { aint1[j1 + i1 * p_75904_3_] = k1; } else if (k1 == 1) { if (l1 > 0) { if (this.nextInt(3) == 0) { aint1[j1 + i1 * p_75904_3_] = Biome.REGISTRY.getIDForObject(Biomes.MESA_ROCK); } else { aint1[j1 + i1 * p_75904_3_] = Biome.REGISTRY.getIDForObject(Biomes.MESA_CLEAR_ROCK); } } else { aint1[j1 + i1 * p_75904_3_] = Biome.REGISTRY.getIDForObject(getWeightedBiomeEntry(BiomeManager.BiomeType.DESERT).biome); } } else if (k1 == 2) { if (l1 > 0) { aint1[j1 + i1 * p_75904_3_] = Biome.REGISTRY.getIDForObject(Biomes.JUNGLE); } else { aint1[j1 + i1 * p_75904_3_] = Biome.REGISTRY.getIDForObject(getWeightedBiomeEntry(BiomeManager.BiomeType.WARM).biome); } } else if (k1 == 3) { if (l1 > 0) { aint1[j1 + i1 * p_75904_3_] = Biome.REGISTRY.getIDForObject(Biomes.COLD_TAIGA); } else { aint1[j1 + i1 * p_75904_3_] = Biome.REGISTRY.getIDForObject(getWeightedBiomeEntry(BiomeManager.BiomeType.COOL).biome); } } else if (k1 == 4) { aint1[j1 + i1 * p_75904_3_] = Biome.REGISTRY.getIDForObject(getWeightedBiomeEntry(BiomeManager.BiomeType.ICY).biome); } else { aint1[j1 + i1 * p_75904_3_] = Biome.REGISTRY.getIDForObject(Biomes.MUSHROOM_ISLAND); } } } return aint1; } protected BiomeEntry getWeightedBiomeEntry(BiomeManager.BiomeType type) { List<BiomeEntry> biomeList = biomes[type.ordinal()]; int totalWeight = WeightedRandom.getTotalWeight(biomeList); int rand = nextInt(totalWeight / 10) * 10; int weight = rand + (BiomeManager.isTypeListModded(type) ? nextInt(Math.min(10, totalWeight - rand)) : 0); return (BiomeEntry) WeightedRandom.getRandomItem(random, biomeList, weight); } }
{ "pile_set_name": "Github" }
# --------------------------------------------------------------------------- BCC32=bcc32 CPP32=cpp32 !if !$d(BCB) BCB = $(MAKEDIR)\.. !endif # --------------------------------------------------------------------------- # IDE SECTION # --------------------------------------------------------------------------- # The following section of the project makefile is managed by the BCB IDE. # It is recommended to use the IDE to change any of the values in this # section. # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- PROJECT = eh_test.exe OBJFILES = TestClass.obj \ nc_alloc.obj \ random_number.obj \ test_algo.obj \ test_algobase.obj \ test_bit_vector.obj \ test_bitset.obj \ test_deque.obj \ test_hash_map.obj \ test_hash_set.obj \ test_list.obj \ test_map.obj \ test_rope.obj \ test_set.obj \ test_slist.obj \ test_string.obj \ test_valarray.obj \ test_vector.obj main.obj # --------------------------------------------------------------------------- PATHCPP = .; PATHPAS = .; PATHASM = .; PATHRC = .; # USERDEFINES = _STLP_NO_OWN_IOSTREAMS USERDEFINES = _DEBUG SYSDEFINES = _RTLDLL;NO_STRICT;USEPACKAGES # SYSDEFINES = NO_STRICT;USEPACKAGES # --------------------------------------------------------------------------- CFLAG1 = -w- -jb -j1 -I.;..\..\stlport;$(BCB)\include; -Od -v -N -x -xp -tWC -D$(SYSDEFINES);$(USERDEFINES) LDFLAGS = -L..\..\lib;$(BCB)\..\lib cw32i.lib stlp.4.5.lib .autodepend # --------------------------------------------------------------------------- all : $(PROJECT) cd ..\..\lib ..\test\eh\eh_test.exe -s 100 $(PROJECT) : $(OBJFILES) $(BCC32) -e$(PROJECT) $(CFLAG1) $(LDFLAGS) $(OBJFILES) clean: del *.obj *.exe *.core *.tds # --------------------------------------------------------------------------- .cpp.obj: $(BCC32) $(CFLAG1) -n$(@D) -c $< .cpp.exe: $(BCC32) $(CFLAG1) $(LDFLAGS) -n$(@D) $< .cpp.i: $(CPP32) $(CFLAG1) -n. -Sr -Ss -Sd {$< } # ---------------------------------------------------------------------------
{ "pile_set_name": "Github" }
// // version.hpp // ~~~~~~~~~~~ // // Copyright (c) 2003-2018 Christopher M. Kohlhoff (chris at kohlhoff dot com) // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // #ifndef ASIO_VERSION_HPP #define ASIO_VERSION_HPP #if defined(_MSC_VER) && (_MSC_VER >= 1200) # pragma once #endif // defined(_MSC_VER) && (_MSC_VER >= 1200) // ASIO_VERSION % 100 is the sub-minor version // ASIO_VERSION / 100 % 1000 is the minor version // ASIO_VERSION / 100000 is the major version #define ASIO_VERSION 101201 // 1.12.1 #endif // ASIO_VERSION_HPP
{ "pile_set_name": "Github" }
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef TOOLS_ANDROID_COMMON_DAEMON_H_ #define TOOLS_ANDROID_COMMON_DAEMON_H_ namespace base { class CommandLine; } namespace tools { bool HasHelpSwitch(const base::CommandLine& command_line); bool HasNoSpawnDaemonSwitch(const base::CommandLine& command_line); void ShowHelp(const char* program, const char* extra_title, const char* extra_descriptions); // Spawns a daemon process and exits the current process with exit_status. // Any code executed after this function returns will be executed in the // spawned daemon process. void SpawnDaemon(int exit_status); } // namespace tools #endif // TOOLS_ANDROID_COMMON_DAEMON_H_
{ "pile_set_name": "Github" }
package linkedql import ( "encoding/json" "fmt" "github.com/cayleygraph/cayley/graph" "github.com/cayleygraph/cayley/query/path" "github.com/cayleygraph/quad" "github.com/cayleygraph/quad/voc" ) // PropertyPathI is an interface to be used where a path of properties is expected. type PropertyPathI interface { BuildPath(qs graph.QuadStore, ns *voc.Namespaces) (*path.Path, error) } // PropertyPath is a struct wrapping PropertyPathI type PropertyPath struct { PropertyPathI } // NewPropertyPath constructs a new PropertyPath func NewPropertyPath(p PropertyPathI) *PropertyPath { return &PropertyPath{PropertyPathI: p} } // Description implements Step. func (*PropertyPath) Description() string { return "PropertyPath is a string, multiple strins or path describing a set of properties" } // UnmarshalJSON implements RawMessage func (p *PropertyPath) UnmarshalJSON(data []byte) error { var errors []error var propertyIRIs PropertyIRIs err := json.Unmarshal(data, &propertyIRIs) if err == nil { p.PropertyPathI = propertyIRIs return nil } errors = append(errors, err) var propertyIRIStrings PropertyIRIStrings err = json.Unmarshal(data, &propertyIRIStrings) if err == nil { p.PropertyPathI = propertyIRIStrings return nil } errors = append(errors, err) var propertyIRI PropertyIRI err = json.Unmarshal(data, &propertyIRI) if err == nil { p.PropertyPathI = propertyIRI return nil } errors = append(errors, err) var propertyIRIString PropertyIRIString err = json.Unmarshal(data, &propertyIRIString) if err == nil { p.PropertyPathI = propertyIRIString return nil } errors = append(errors, err) step, err := Unmarshal(data) if err == nil { pathStep, ok := step.(PathStep) if ok { p.PropertyPathI = pathStep return nil } errors = append(errors, fmt.Errorf("Step of type %T is not a PathStep. A PropertyPath step must be a PathStep", step)) } errors = append(errors, err) return formatMultiError(errors) } // PropertyIRIs is a slice of property IRIs. type PropertyIRIs []PropertyIRI // BuildPath implements PropertyPath. func (p PropertyIRIs) BuildPath(qs graph.QuadStore, ns *voc.Namespaces) (*path.Path, error) { var values []quad.Value for _, iri := range p { values = append(values, iri.full(ns)) } return path.StartPath(qs, values...), nil } // PropertyIRIStrings is a slice of property IRI strings. type PropertyIRIStrings []string // PropertyIRIs casts PropertyIRIStrings into PropertyIRIs func (p PropertyIRIStrings) PropertyIRIs() PropertyIRIs { var iris PropertyIRIs for _, iri := range p { iris = append(iris, PropertyIRI(iri)) } return iris } // BuildPath implements PropertyPath. func (p PropertyIRIStrings) BuildPath(qs graph.QuadStore, ns *voc.Namespaces) (*path.Path, error) { return p.PropertyIRIs().BuildPath(qs, ns) } // PropertyIRI is an IRI of a Property type PropertyIRI quad.IRI func (p PropertyIRI) full(ns *voc.Namespaces) quad.IRI { return quad.IRI(p).FullWith(ns) } // BuildPath implements PropertyPath func (p PropertyIRI) BuildPath(qs graph.QuadStore, ns *voc.Namespaces) (*path.Path, error) { return path.StartPath(qs, p.full(ns)), nil } // PropertyIRIString is a string of IRI of a Property type PropertyIRIString string // BuildPath implements PropertyPath func (p PropertyIRIString) BuildPath(qs graph.QuadStore, ns *voc.Namespaces) (*path.Path, error) { iri := PropertyIRI(p) return iri.BuildPath(qs, ns) } // PropertyStep is a step that should resolve to a path of properties type PropertyStep struct { PathStep } // BuildPath implements PropertyPath func (p PropertyStep) BuildPath(qs graph.QuadStore, ns *voc.Namespaces) (*path.Path, error) { return p.BuildPath(qs, ns) }
{ "pile_set_name": "Github" }
# # This ini file runs Telnet sessions on the DemoNetworkEth network, using # TelnetApp+TcpGenericServerApp. # # See also fileTransfer.ini and basicHTTP.ini for different kinds of # network traffic. # [General] network = DemoNetworkEth # number of client and server computers *.n = 2 # tcp apps **.cli[*].numApps = 1 **.cli[*].app[*].typename = "TelnetApp" **.cli[*].app[0].localAddress = "" **.cli[*].app[0].localPort = 1000 **.cli[*].app[0].connectAddress = "srv[1]" **.cli[*].app[0].connectPort = 1000 **.cli[*].app[0].startTime = uniform(10s,15s) **.cli[*].app[0].numCommands = int(exponential(10)) **.cli[*].app[0].commandLength = intWithUnit(exponential(10B)) **.cli[*].app[0].keyPressDelay = exponential(0.1s) **.cli[*].app[0].commandOutputLength = intWithUnit(exponential(40B)) **.cli[*].app[0].thinkTime = truncnormal(2s,3s) **.cli[*].app[0].idleInterval = truncnormal(3600s,1200s) **.cli[*].app[0].reconnectInterval = 30s **.srv[*].numApps = 1 **.srv[*].app[*].typename = "TcpGenericServerApp" **.srv[*].app[0].localAddress = "" **.srv[*].app[0].localPort = 1000 **.srv[*].app[0].replyDelay = 0s # tcp settings **.app[*].dataTransferMode = "object" # Ethernet NIC configuration **.eth[*].mac.queue.typename = "EtherQosQueue" **.eth[*].mac.queue.dataQueue.typename = "DropTailQueue" # in routers **.eth[*].mac.queue.dataQueue.packetCapacity = 10 # in routers **.eth[*].duplexMode = true
{ "pile_set_name": "Github" }
import subprocess def plot( x, y, width=80, height=25, label=None, xlim=None, ylim=None, xlabel=None, title=None, extra_gnuplot_arguments=None, plot_command="plot '-' w lines", ticks_scale=0, ): p = subprocess.Popen( ["gnuplot"], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, ) gnuplot_input = [] gnuplot_input.append(f"set term dumb mono {width},{height}") # gnuplot_input.append("set tics nomirror") gnuplot_input.append(f"set tics scale {ticks_scale}") if xlim: gnuplot_input.append("set xrange [{}:{}]".format(xlim[0], xlim[1])) if ylim: gnuplot_input.append("set yrange [{}:{}]".format(ylim[0], ylim[1])) if xlabel: gnuplot_input.append(f'set xlabel "{xlabel}"') if title: gnuplot_input.append(f'set title "{title}"') if extra_gnuplot_arguments: gnuplot_input += extra_gnuplot_arguments string = plot_command if label: string += f" title '{label}'" else: string += " notitle" gnuplot_input.append(string) for xx, yy in zip(x, y): gnuplot_input.append(f"{xx:e} {yy:e}") gnuplot_input.append("e") out = p.communicate(input="\n".join(gnuplot_input).encode())[0] return _remove_empty_lines(out.decode()) def _remove_empty_lines(string): return string.split("\n")[1:-2]
{ "pile_set_name": "Github" }
func a(){t-- {t-- {t--} {t-- {t--}} {t--}} {t--}}
{ "pile_set_name": "Github" }
require('../../../modules/es6.array.filter'); module.exports = require('../../../modules/_entry-virtual')('Array').filter;
{ "pile_set_name": "Github" }
// Copyright 2009 the Sputnik authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /** * Operator uses PutValue * * @path ch11/11.13/11.13.2/S11.13.2_A2.2_T10.js * @description If Type(LeftHandSideExpression) is not Reference, throw ReferenceError (or SyntaxError). Check operator is "x ^= y" * @negative */ //CHECK#1 try { var z = (1 ^= 1); $ERROR('#1.1: 1 ^= 1 throw ReferenceError (or SyntaxError). Actual: ' + (z)); } catch (e) { if ((e instanceof ReferenceError) !== true) { $ERROR('#1.2: 1 ^= 1 throw ReferenceError (or SyntaxError). Actual: ' + (e)); } else { var z = (1 ^= 1); } }
{ "pile_set_name": "Github" }
/* * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "modules/audio_processing/transient/transient_suppressor.h" #include <math.h> #include <string.h> #include <cmath> #include <complex> #include <deque> #include <set> #include "common_audio/include/audio_util.h" #include "common_audio/signal_processing/include/signal_processing_library.h" #include "common_audio/third_party/fft4g/fft4g.h" #include "modules/audio_processing/ns/windows_private.h" #include "modules/audio_processing/transient/common.h" #include "modules/audio_processing/transient/transient_detector.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" namespace webrtc { static const float kMeanIIRCoefficient = 0.5f; static const float kVoiceThreshold = 0.02f; // TODO(aluebs): Check if these values work also for 48kHz. static const size_t kMinVoiceBin = 3; static const size_t kMaxVoiceBin = 60; namespace { float ComplexMagnitude(float a, float b) { return std::abs(a) + std::abs(b); } } // namespace TransientSuppressor::TransientSuppressor() : data_length_(0), detection_length_(0), analysis_length_(0), buffer_delay_(0), complex_analysis_length_(0), num_channels_(0), window_(NULL), detector_smoothed_(0.f), keypress_counter_(0), chunks_since_keypress_(0), detection_enabled_(false), suppression_enabled_(false), use_hard_restoration_(false), chunks_since_voice_change_(0), seed_(182), using_reference_(false) {} TransientSuppressor::~TransientSuppressor() {} int TransientSuppressor::Initialize(int sample_rate_hz, int detection_rate_hz, int num_channels) { switch (sample_rate_hz) { case ts::kSampleRate8kHz: analysis_length_ = 128u; window_ = kBlocks80w128; break; case ts::kSampleRate16kHz: analysis_length_ = 256u; window_ = kBlocks160w256; break; case ts::kSampleRate32kHz: analysis_length_ = 512u; window_ = kBlocks320w512; break; case ts::kSampleRate48kHz: analysis_length_ = 1024u; window_ = kBlocks480w1024; break; default: return -1; } if (detection_rate_hz != ts::kSampleRate8kHz && detection_rate_hz != ts::kSampleRate16kHz && detection_rate_hz != ts::kSampleRate32kHz && detection_rate_hz != ts::kSampleRate48kHz) { return -1; } if (num_channels <= 0) { return -1; } detector_.reset(new TransientDetector(detection_rate_hz)); data_length_ = sample_rate_hz * ts::kChunkSizeMs / 1000; if (data_length_ > analysis_length_) { RTC_NOTREACHED(); return -1; } buffer_delay_ = analysis_length_ - data_length_; complex_analysis_length_ = analysis_length_ / 2 + 1; RTC_DCHECK_GE(complex_analysis_length_, kMaxVoiceBin); num_channels_ = num_channels; in_buffer_.reset(new float[analysis_length_ * num_channels_]); memset(in_buffer_.get(), 0, analysis_length_ * num_channels_ * sizeof(in_buffer_[0])); detection_length_ = detection_rate_hz * ts::kChunkSizeMs / 1000; detection_buffer_.reset(new float[detection_length_]); memset(detection_buffer_.get(), 0, detection_length_ * sizeof(detection_buffer_[0])); out_buffer_.reset(new float[analysis_length_ * num_channels_]); memset(out_buffer_.get(), 0, analysis_length_ * num_channels_ * sizeof(out_buffer_[0])); // ip[0] must be zero to trigger initialization using rdft(). size_t ip_length = 2 + sqrtf(analysis_length_); ip_.reset(new size_t[ip_length]()); memset(ip_.get(), 0, ip_length * sizeof(ip_[0])); wfft_.reset(new float[complex_analysis_length_ - 1]); memset(wfft_.get(), 0, (complex_analysis_length_ - 1) * sizeof(wfft_[0])); spectral_mean_.reset(new float[complex_analysis_length_ * num_channels_]); memset(spectral_mean_.get(), 0, complex_analysis_length_ * num_channels_ * sizeof(spectral_mean_[0])); fft_buffer_.reset(new float[analysis_length_ + 2]); memset(fft_buffer_.get(), 0, (analysis_length_ + 2) * sizeof(fft_buffer_[0])); magnitudes_.reset(new float[complex_analysis_length_]); memset(magnitudes_.get(), 0, complex_analysis_length_ * sizeof(magnitudes_[0])); mean_factor_.reset(new float[complex_analysis_length_]); static const float kFactorHeight = 10.f; static const float kLowSlope = 1.f; static const float kHighSlope = 0.3f; for (size_t i = 0; i < complex_analysis_length_; ++i) { mean_factor_[i] = kFactorHeight / (1.f + exp(kLowSlope * static_cast<int>(i - kMinVoiceBin))) + kFactorHeight / (1.f + exp(kHighSlope * static_cast<int>(kMaxVoiceBin - i))); } detector_smoothed_ = 0.f; keypress_counter_ = 0; chunks_since_keypress_ = 0; detection_enabled_ = false; suppression_enabled_ = false; use_hard_restoration_ = false; chunks_since_voice_change_ = 0; seed_ = 182; using_reference_ = false; return 0; } int TransientSuppressor::Suppress(float* data, size_t data_length, int num_channels, const float* detection_data, size_t detection_length, const float* reference_data, size_t reference_length, float voice_probability, bool key_pressed) { if (!data || data_length != data_length_ || num_channels != num_channels_ || detection_length != detection_length_ || voice_probability < 0 || voice_probability > 1) { return -1; } UpdateKeypress(key_pressed); UpdateBuffers(data); int result = 0; if (detection_enabled_) { UpdateRestoration(voice_probability); if (!detection_data) { // Use the input data of the first channel if special detection data is // not supplied. detection_data = &in_buffer_[buffer_delay_]; } float detector_result = detector_->Detect(detection_data, detection_length, reference_data, reference_length); if (detector_result < 0) { return -1; } using_reference_ = detector_->using_reference(); // |detector_smoothed_| follows the |detector_result| when this last one is // increasing, but has an exponential decaying tail to be able to suppress // the ringing of keyclicks. float smooth_factor = using_reference_ ? 0.6 : 0.1; detector_smoothed_ = detector_result >= detector_smoothed_ ? detector_result : smooth_factor * detector_smoothed_ + (1 - smooth_factor) * detector_result; for (int i = 0; i < num_channels_; ++i) { Suppress(&in_buffer_[i * analysis_length_], &spectral_mean_[i * complex_analysis_length_], &out_buffer_[i * analysis_length_]); } } // If the suppression isn't enabled, we use the in buffer to delay the signal // appropriately. This also gives time for the out buffer to be refreshed with // new data between detection and suppression getting enabled. for (int i = 0; i < num_channels_; ++i) { memcpy(&data[i * data_length_], suppression_enabled_ ? &out_buffer_[i * analysis_length_] : &in_buffer_[i * analysis_length_], data_length_ * sizeof(*data)); } return result; } // This should only be called when detection is enabled. UpdateBuffers() must // have been called. At return, |out_buffer_| will be filled with the // processed output. void TransientSuppressor::Suppress(float* in_ptr, float* spectral_mean, float* out_ptr) { // Go to frequency domain. for (size_t i = 0; i < analysis_length_; ++i) { // TODO(aluebs): Rename windows fft_buffer_[i] = in_ptr[i] * window_[i]; } WebRtc_rdft(analysis_length_, 1, fft_buffer_.get(), ip_.get(), wfft_.get()); // Since WebRtc_rdft puts R[n/2] in fft_buffer_[1], we move it to the end // for convenience. fft_buffer_[analysis_length_] = fft_buffer_[1]; fft_buffer_[analysis_length_ + 1] = 0.f; fft_buffer_[1] = 0.f; for (size_t i = 0; i < complex_analysis_length_; ++i) { magnitudes_[i] = ComplexMagnitude(fft_buffer_[i * 2], fft_buffer_[i * 2 + 1]); } // Restore audio if necessary. if (suppression_enabled_) { if (use_hard_restoration_) { HardRestoration(spectral_mean); } else { SoftRestoration(spectral_mean); } } // Update the spectral mean. for (size_t i = 0; i < complex_analysis_length_; ++i) { spectral_mean[i] = (1 - kMeanIIRCoefficient) * spectral_mean[i] + kMeanIIRCoefficient * magnitudes_[i]; } // Back to time domain. // Put R[n/2] back in fft_buffer_[1]. fft_buffer_[1] = fft_buffer_[analysis_length_]; WebRtc_rdft(analysis_length_, -1, fft_buffer_.get(), ip_.get(), wfft_.get()); const float fft_scaling = 2.f / analysis_length_; for (size_t i = 0; i < analysis_length_; ++i) { out_ptr[i] += fft_buffer_[i] * window_[i] * fft_scaling; } } void TransientSuppressor::UpdateKeypress(bool key_pressed) { const int kKeypressPenalty = 1000 / ts::kChunkSizeMs; const int kIsTypingThreshold = 1000 / ts::kChunkSizeMs; const int kChunksUntilNotTyping = 4000 / ts::kChunkSizeMs; // 4 seconds. if (key_pressed) { keypress_counter_ += kKeypressPenalty; chunks_since_keypress_ = 0; detection_enabled_ = true; } keypress_counter_ = std::max(0, keypress_counter_ - 1); if (keypress_counter_ > kIsTypingThreshold) { if (!suppression_enabled_) { RTC_LOG(LS_INFO) << "[ts] Transient suppression is now enabled."; } suppression_enabled_ = true; keypress_counter_ = 0; } if (detection_enabled_ && ++chunks_since_keypress_ > kChunksUntilNotTyping) { if (suppression_enabled_) { RTC_LOG(LS_INFO) << "[ts] Transient suppression is now disabled."; } detection_enabled_ = false; suppression_enabled_ = false; keypress_counter_ = 0; } } void TransientSuppressor::UpdateRestoration(float voice_probability) { const int kHardRestorationOffsetDelay = 3; const int kHardRestorationOnsetDelay = 80; bool not_voiced = voice_probability < kVoiceThreshold; if (not_voiced == use_hard_restoration_) { chunks_since_voice_change_ = 0; } else { ++chunks_since_voice_change_; if ((use_hard_restoration_ && chunks_since_voice_change_ > kHardRestorationOffsetDelay) || (!use_hard_restoration_ && chunks_since_voice_change_ > kHardRestorationOnsetDelay)) { use_hard_restoration_ = not_voiced; chunks_since_voice_change_ = 0; } } } // Shift buffers to make way for new data. Must be called after // |detection_enabled_| is updated by UpdateKeypress(). void TransientSuppressor::UpdateBuffers(float* data) { // TODO(aluebs): Change to ring buffer. memmove(in_buffer_.get(), &in_buffer_[data_length_], (buffer_delay_ + (num_channels_ - 1) * analysis_length_) * sizeof(in_buffer_[0])); // Copy new chunk to buffer. for (int i = 0; i < num_channels_; ++i) { memcpy(&in_buffer_[buffer_delay_ + i * analysis_length_], &data[i * data_length_], data_length_ * sizeof(*data)); } if (detection_enabled_) { // Shift previous chunk in out buffer. memmove(out_buffer_.get(), &out_buffer_[data_length_], (buffer_delay_ + (num_channels_ - 1) * analysis_length_) * sizeof(out_buffer_[0])); // Initialize new chunk in out buffer. for (int i = 0; i < num_channels_; ++i) { memset(&out_buffer_[buffer_delay_ + i * analysis_length_], 0, data_length_ * sizeof(out_buffer_[0])); } } } // Restores the unvoiced signal if a click is present. // Attenuates by a certain factor every peak in the |fft_buffer_| that exceeds // the spectral mean. The attenuation depends on |detector_smoothed_|. // If a restoration takes place, the |magnitudes_| are updated to the new value. void TransientSuppressor::HardRestoration(float* spectral_mean) { const float detector_result = 1.f - pow(1.f - detector_smoothed_, using_reference_ ? 200.f : 50.f); // To restore, we get the peaks in the spectrum. If higher than the previous // spectral mean we adjust them. for (size_t i = 0; i < complex_analysis_length_; ++i) { if (magnitudes_[i] > spectral_mean[i] && magnitudes_[i] > 0) { // RandU() generates values on [0, int16::max()] const float phase = 2 * ts::kPi * WebRtcSpl_RandU(&seed_) / std::numeric_limits<int16_t>::max(); const float scaled_mean = detector_result * spectral_mean[i]; fft_buffer_[i * 2] = (1 - detector_result) * fft_buffer_[i * 2] + scaled_mean * cosf(phase); fft_buffer_[i * 2 + 1] = (1 - detector_result) * fft_buffer_[i * 2 + 1] + scaled_mean * sinf(phase); magnitudes_[i] = magnitudes_[i] - detector_result * (magnitudes_[i] - spectral_mean[i]); } } } // Restores the voiced signal if a click is present. // Attenuates by a certain factor every peak in the |fft_buffer_| that exceeds // the spectral mean and that is lower than some function of the current block // frequency mean. The attenuation depends on |detector_smoothed_|. // If a restoration takes place, the |magnitudes_| are updated to the new value. void TransientSuppressor::SoftRestoration(float* spectral_mean) { // Get the spectral magnitude mean of the current block. float block_frequency_mean = 0; for (size_t i = kMinVoiceBin; i < kMaxVoiceBin; ++i) { block_frequency_mean += magnitudes_[i]; } block_frequency_mean /= (kMaxVoiceBin - kMinVoiceBin); // To restore, we get the peaks in the spectrum. If higher than the // previous spectral mean and lower than a factor of the block mean // we adjust them. The factor is a double sigmoid that has a minimum in the // voice frequency range (300Hz - 3kHz). for (size_t i = 0; i < complex_analysis_length_; ++i) { if (magnitudes_[i] > spectral_mean[i] && magnitudes_[i] > 0 && (using_reference_ || magnitudes_[i] < block_frequency_mean * mean_factor_[i])) { const float new_magnitude = magnitudes_[i] - detector_smoothed_ * (magnitudes_[i] - spectral_mean[i]); const float magnitude_ratio = new_magnitude / magnitudes_[i]; fft_buffer_[i * 2] *= magnitude_ratio; fft_buffer_[i * 2 + 1] *= magnitude_ratio; magnitudes_[i] = new_magnitude; } } } } // namespace webrtc
{ "pile_set_name": "Github" }
# /* ************************************************************************** # * * # * (C) Copyright Paul Mensonides 2002. # * Distributed under the Boost Software License, Version 1.0. (See # * accompanying file LICENSE_1_0.txt or copy at # * http://www.boost.org/LICENSE_1_0.txt) # * * # ************************************************************************** */ # # /* See http://www.boost.org for most recent version. */ # # if BOOST_PP_LOCAL_R(256) BOOST_PP_LOCAL_MACRO(256) # endif # if BOOST_PP_LOCAL_R(255) BOOST_PP_LOCAL_MACRO(255) # endif # if BOOST_PP_LOCAL_R(254) BOOST_PP_LOCAL_MACRO(254) # endif # if BOOST_PP_LOCAL_R(253) BOOST_PP_LOCAL_MACRO(253) # endif # if BOOST_PP_LOCAL_R(252) BOOST_PP_LOCAL_MACRO(252) # endif # if BOOST_PP_LOCAL_R(251) BOOST_PP_LOCAL_MACRO(251) # endif # if BOOST_PP_LOCAL_R(250) BOOST_PP_LOCAL_MACRO(250) # endif # if BOOST_PP_LOCAL_R(249) BOOST_PP_LOCAL_MACRO(249) # endif # if BOOST_PP_LOCAL_R(248) BOOST_PP_LOCAL_MACRO(248) # endif # if BOOST_PP_LOCAL_R(247) BOOST_PP_LOCAL_MACRO(247) # endif # if BOOST_PP_LOCAL_R(246) BOOST_PP_LOCAL_MACRO(246) # endif # if BOOST_PP_LOCAL_R(245) BOOST_PP_LOCAL_MACRO(245) # endif # if BOOST_PP_LOCAL_R(244) BOOST_PP_LOCAL_MACRO(244) # endif # if BOOST_PP_LOCAL_R(243) BOOST_PP_LOCAL_MACRO(243) # endif # if BOOST_PP_LOCAL_R(242) BOOST_PP_LOCAL_MACRO(242) # endif # if BOOST_PP_LOCAL_R(241) BOOST_PP_LOCAL_MACRO(241) # endif # if BOOST_PP_LOCAL_R(240) BOOST_PP_LOCAL_MACRO(240) # endif # if BOOST_PP_LOCAL_R(239) BOOST_PP_LOCAL_MACRO(239) # endif # if BOOST_PP_LOCAL_R(238) BOOST_PP_LOCAL_MACRO(238) # endif # if BOOST_PP_LOCAL_R(237) BOOST_PP_LOCAL_MACRO(237) # endif # if BOOST_PP_LOCAL_R(236) BOOST_PP_LOCAL_MACRO(236) # endif # if BOOST_PP_LOCAL_R(235) BOOST_PP_LOCAL_MACRO(235) # endif # if BOOST_PP_LOCAL_R(234) BOOST_PP_LOCAL_MACRO(234) # endif # if BOOST_PP_LOCAL_R(233) BOOST_PP_LOCAL_MACRO(233) # endif # if BOOST_PP_LOCAL_R(232) BOOST_PP_LOCAL_MACRO(232) # endif # if BOOST_PP_LOCAL_R(231) BOOST_PP_LOCAL_MACRO(231) # endif # if BOOST_PP_LOCAL_R(230) BOOST_PP_LOCAL_MACRO(230) # endif # if BOOST_PP_LOCAL_R(229) BOOST_PP_LOCAL_MACRO(229) # endif # if BOOST_PP_LOCAL_R(228) BOOST_PP_LOCAL_MACRO(228) # endif # if BOOST_PP_LOCAL_R(227) BOOST_PP_LOCAL_MACRO(227) # endif # if BOOST_PP_LOCAL_R(226) BOOST_PP_LOCAL_MACRO(226) # endif # if BOOST_PP_LOCAL_R(225) BOOST_PP_LOCAL_MACRO(225) # endif # if BOOST_PP_LOCAL_R(224) BOOST_PP_LOCAL_MACRO(224) # endif # if BOOST_PP_LOCAL_R(223) BOOST_PP_LOCAL_MACRO(223) # endif # if BOOST_PP_LOCAL_R(222) BOOST_PP_LOCAL_MACRO(222) # endif # if BOOST_PP_LOCAL_R(221) BOOST_PP_LOCAL_MACRO(221) # endif # if BOOST_PP_LOCAL_R(220) BOOST_PP_LOCAL_MACRO(220) # endif # if BOOST_PP_LOCAL_R(219) BOOST_PP_LOCAL_MACRO(219) # endif # if BOOST_PP_LOCAL_R(218) BOOST_PP_LOCAL_MACRO(218) # endif # if BOOST_PP_LOCAL_R(217) BOOST_PP_LOCAL_MACRO(217) # endif # if BOOST_PP_LOCAL_R(216) BOOST_PP_LOCAL_MACRO(216) # endif # if BOOST_PP_LOCAL_R(215) BOOST_PP_LOCAL_MACRO(215) # endif # if BOOST_PP_LOCAL_R(214) BOOST_PP_LOCAL_MACRO(214) # endif # if BOOST_PP_LOCAL_R(213) BOOST_PP_LOCAL_MACRO(213) # endif # if BOOST_PP_LOCAL_R(212) BOOST_PP_LOCAL_MACRO(212) # endif # if BOOST_PP_LOCAL_R(211) BOOST_PP_LOCAL_MACRO(211) # endif # if BOOST_PP_LOCAL_R(210) BOOST_PP_LOCAL_MACRO(210) # endif # if BOOST_PP_LOCAL_R(209) BOOST_PP_LOCAL_MACRO(209) # endif # if BOOST_PP_LOCAL_R(208) BOOST_PP_LOCAL_MACRO(208) # endif # if BOOST_PP_LOCAL_R(207) BOOST_PP_LOCAL_MACRO(207) # endif # if BOOST_PP_LOCAL_R(206) BOOST_PP_LOCAL_MACRO(206) # endif # if BOOST_PP_LOCAL_R(205) BOOST_PP_LOCAL_MACRO(205) # endif # if BOOST_PP_LOCAL_R(204) BOOST_PP_LOCAL_MACRO(204) # endif # if BOOST_PP_LOCAL_R(203) BOOST_PP_LOCAL_MACRO(203) # endif # if BOOST_PP_LOCAL_R(202) BOOST_PP_LOCAL_MACRO(202) # endif # if BOOST_PP_LOCAL_R(201) BOOST_PP_LOCAL_MACRO(201) # endif # if BOOST_PP_LOCAL_R(200) BOOST_PP_LOCAL_MACRO(200) # endif # if BOOST_PP_LOCAL_R(199) BOOST_PP_LOCAL_MACRO(199) # endif # if BOOST_PP_LOCAL_R(198) BOOST_PP_LOCAL_MACRO(198) # endif # if BOOST_PP_LOCAL_R(197) BOOST_PP_LOCAL_MACRO(197) # endif # if BOOST_PP_LOCAL_R(196) BOOST_PP_LOCAL_MACRO(196) # endif # if BOOST_PP_LOCAL_R(195) BOOST_PP_LOCAL_MACRO(195) # endif # if BOOST_PP_LOCAL_R(194) BOOST_PP_LOCAL_MACRO(194) # endif # if BOOST_PP_LOCAL_R(193) BOOST_PP_LOCAL_MACRO(193) # endif # if BOOST_PP_LOCAL_R(192) BOOST_PP_LOCAL_MACRO(192) # endif # if BOOST_PP_LOCAL_R(191) BOOST_PP_LOCAL_MACRO(191) # endif # if BOOST_PP_LOCAL_R(190) BOOST_PP_LOCAL_MACRO(190) # endif # if BOOST_PP_LOCAL_R(189) BOOST_PP_LOCAL_MACRO(189) # endif # if BOOST_PP_LOCAL_R(188) BOOST_PP_LOCAL_MACRO(188) # endif # if BOOST_PP_LOCAL_R(187) BOOST_PP_LOCAL_MACRO(187) # endif # if BOOST_PP_LOCAL_R(186) BOOST_PP_LOCAL_MACRO(186) # endif # if BOOST_PP_LOCAL_R(185) BOOST_PP_LOCAL_MACRO(185) # endif # if BOOST_PP_LOCAL_R(184) BOOST_PP_LOCAL_MACRO(184) # endif # if BOOST_PP_LOCAL_R(183) BOOST_PP_LOCAL_MACRO(183) # endif # if BOOST_PP_LOCAL_R(182) BOOST_PP_LOCAL_MACRO(182) # endif # if BOOST_PP_LOCAL_R(181) BOOST_PP_LOCAL_MACRO(181) # endif # if BOOST_PP_LOCAL_R(180) BOOST_PP_LOCAL_MACRO(180) # endif # if BOOST_PP_LOCAL_R(179) BOOST_PP_LOCAL_MACRO(179) # endif # if BOOST_PP_LOCAL_R(178) BOOST_PP_LOCAL_MACRO(178) # endif # if BOOST_PP_LOCAL_R(177) BOOST_PP_LOCAL_MACRO(177) # endif # if BOOST_PP_LOCAL_R(176) BOOST_PP_LOCAL_MACRO(176) # endif # if BOOST_PP_LOCAL_R(175) BOOST_PP_LOCAL_MACRO(175) # endif # if BOOST_PP_LOCAL_R(174) BOOST_PP_LOCAL_MACRO(174) # endif # if BOOST_PP_LOCAL_R(173) BOOST_PP_LOCAL_MACRO(173) # endif # if BOOST_PP_LOCAL_R(172) BOOST_PP_LOCAL_MACRO(172) # endif # if BOOST_PP_LOCAL_R(171) BOOST_PP_LOCAL_MACRO(171) # endif # if BOOST_PP_LOCAL_R(170) BOOST_PP_LOCAL_MACRO(170) # endif # if BOOST_PP_LOCAL_R(169) BOOST_PP_LOCAL_MACRO(169) # endif # if BOOST_PP_LOCAL_R(168) BOOST_PP_LOCAL_MACRO(168) # endif # if BOOST_PP_LOCAL_R(167) BOOST_PP_LOCAL_MACRO(167) # endif # if BOOST_PP_LOCAL_R(166) BOOST_PP_LOCAL_MACRO(166) # endif # if BOOST_PP_LOCAL_R(165) BOOST_PP_LOCAL_MACRO(165) # endif # if BOOST_PP_LOCAL_R(164) BOOST_PP_LOCAL_MACRO(164) # endif # if BOOST_PP_LOCAL_R(163) BOOST_PP_LOCAL_MACRO(163) # endif # if BOOST_PP_LOCAL_R(162) BOOST_PP_LOCAL_MACRO(162) # endif # if BOOST_PP_LOCAL_R(161) BOOST_PP_LOCAL_MACRO(161) # endif # if BOOST_PP_LOCAL_R(160) BOOST_PP_LOCAL_MACRO(160) # endif # if BOOST_PP_LOCAL_R(159) BOOST_PP_LOCAL_MACRO(159) # endif # if BOOST_PP_LOCAL_R(158) BOOST_PP_LOCAL_MACRO(158) # endif # if BOOST_PP_LOCAL_R(157) BOOST_PP_LOCAL_MACRO(157) # endif # if BOOST_PP_LOCAL_R(156) BOOST_PP_LOCAL_MACRO(156) # endif # if BOOST_PP_LOCAL_R(155) BOOST_PP_LOCAL_MACRO(155) # endif # if BOOST_PP_LOCAL_R(154) BOOST_PP_LOCAL_MACRO(154) # endif # if BOOST_PP_LOCAL_R(153) BOOST_PP_LOCAL_MACRO(153) # endif # if BOOST_PP_LOCAL_R(152) BOOST_PP_LOCAL_MACRO(152) # endif # if BOOST_PP_LOCAL_R(151) BOOST_PP_LOCAL_MACRO(151) # endif # if BOOST_PP_LOCAL_R(150) BOOST_PP_LOCAL_MACRO(150) # endif # if BOOST_PP_LOCAL_R(149) BOOST_PP_LOCAL_MACRO(149) # endif # if BOOST_PP_LOCAL_R(148) BOOST_PP_LOCAL_MACRO(148) # endif # if BOOST_PP_LOCAL_R(147) BOOST_PP_LOCAL_MACRO(147) # endif # if BOOST_PP_LOCAL_R(146) BOOST_PP_LOCAL_MACRO(146) # endif # if BOOST_PP_LOCAL_R(145) BOOST_PP_LOCAL_MACRO(145) # endif # if BOOST_PP_LOCAL_R(144) BOOST_PP_LOCAL_MACRO(144) # endif # if BOOST_PP_LOCAL_R(143) BOOST_PP_LOCAL_MACRO(143) # endif # if BOOST_PP_LOCAL_R(142) BOOST_PP_LOCAL_MACRO(142) # endif # if BOOST_PP_LOCAL_R(141) BOOST_PP_LOCAL_MACRO(141) # endif # if BOOST_PP_LOCAL_R(140) BOOST_PP_LOCAL_MACRO(140) # endif # if BOOST_PP_LOCAL_R(139) BOOST_PP_LOCAL_MACRO(139) # endif # if BOOST_PP_LOCAL_R(138) BOOST_PP_LOCAL_MACRO(138) # endif # if BOOST_PP_LOCAL_R(137) BOOST_PP_LOCAL_MACRO(137) # endif # if BOOST_PP_LOCAL_R(136) BOOST_PP_LOCAL_MACRO(136) # endif # if BOOST_PP_LOCAL_R(135) BOOST_PP_LOCAL_MACRO(135) # endif # if BOOST_PP_LOCAL_R(134) BOOST_PP_LOCAL_MACRO(134) # endif # if BOOST_PP_LOCAL_R(133) BOOST_PP_LOCAL_MACRO(133) # endif # if BOOST_PP_LOCAL_R(132) BOOST_PP_LOCAL_MACRO(132) # endif # if BOOST_PP_LOCAL_R(131) BOOST_PP_LOCAL_MACRO(131) # endif # if BOOST_PP_LOCAL_R(130) BOOST_PP_LOCAL_MACRO(130) # endif # if BOOST_PP_LOCAL_R(129) BOOST_PP_LOCAL_MACRO(129) # endif # if BOOST_PP_LOCAL_R(128) BOOST_PP_LOCAL_MACRO(128) # endif # if BOOST_PP_LOCAL_R(127) BOOST_PP_LOCAL_MACRO(127) # endif # if BOOST_PP_LOCAL_R(126) BOOST_PP_LOCAL_MACRO(126) # endif # if BOOST_PP_LOCAL_R(125) BOOST_PP_LOCAL_MACRO(125) # endif # if BOOST_PP_LOCAL_R(124) BOOST_PP_LOCAL_MACRO(124) # endif # if BOOST_PP_LOCAL_R(123) BOOST_PP_LOCAL_MACRO(123) # endif # if BOOST_PP_LOCAL_R(122) BOOST_PP_LOCAL_MACRO(122) # endif # if BOOST_PP_LOCAL_R(121) BOOST_PP_LOCAL_MACRO(121) # endif # if BOOST_PP_LOCAL_R(120) BOOST_PP_LOCAL_MACRO(120) # endif # if BOOST_PP_LOCAL_R(119) BOOST_PP_LOCAL_MACRO(119) # endif # if BOOST_PP_LOCAL_R(118) BOOST_PP_LOCAL_MACRO(118) # endif # if BOOST_PP_LOCAL_R(117) BOOST_PP_LOCAL_MACRO(117) # endif # if BOOST_PP_LOCAL_R(116) BOOST_PP_LOCAL_MACRO(116) # endif # if BOOST_PP_LOCAL_R(115) BOOST_PP_LOCAL_MACRO(115) # endif # if BOOST_PP_LOCAL_R(114) BOOST_PP_LOCAL_MACRO(114) # endif # if BOOST_PP_LOCAL_R(113) BOOST_PP_LOCAL_MACRO(113) # endif # if BOOST_PP_LOCAL_R(112) BOOST_PP_LOCAL_MACRO(112) # endif # if BOOST_PP_LOCAL_R(111) BOOST_PP_LOCAL_MACRO(111) # endif # if BOOST_PP_LOCAL_R(110) BOOST_PP_LOCAL_MACRO(110) # endif # if BOOST_PP_LOCAL_R(109) BOOST_PP_LOCAL_MACRO(109) # endif # if BOOST_PP_LOCAL_R(108) BOOST_PP_LOCAL_MACRO(108) # endif # if BOOST_PP_LOCAL_R(107) BOOST_PP_LOCAL_MACRO(107) # endif # if BOOST_PP_LOCAL_R(106) BOOST_PP_LOCAL_MACRO(106) # endif # if BOOST_PP_LOCAL_R(105) BOOST_PP_LOCAL_MACRO(105) # endif # if BOOST_PP_LOCAL_R(104) BOOST_PP_LOCAL_MACRO(104) # endif # if BOOST_PP_LOCAL_R(103) BOOST_PP_LOCAL_MACRO(103) # endif # if BOOST_PP_LOCAL_R(102) BOOST_PP_LOCAL_MACRO(102) # endif # if BOOST_PP_LOCAL_R(101) BOOST_PP_LOCAL_MACRO(101) # endif # if BOOST_PP_LOCAL_R(100) BOOST_PP_LOCAL_MACRO(100) # endif # if BOOST_PP_LOCAL_R(99) BOOST_PP_LOCAL_MACRO(99) # endif # if BOOST_PP_LOCAL_R(98) BOOST_PP_LOCAL_MACRO(98) # endif # if BOOST_PP_LOCAL_R(97) BOOST_PP_LOCAL_MACRO(97) # endif # if BOOST_PP_LOCAL_R(96) BOOST_PP_LOCAL_MACRO(96) # endif # if BOOST_PP_LOCAL_R(95) BOOST_PP_LOCAL_MACRO(95) # endif # if BOOST_PP_LOCAL_R(94) BOOST_PP_LOCAL_MACRO(94) # endif # if BOOST_PP_LOCAL_R(93) BOOST_PP_LOCAL_MACRO(93) # endif # if BOOST_PP_LOCAL_R(92) BOOST_PP_LOCAL_MACRO(92) # endif # if BOOST_PP_LOCAL_R(91) BOOST_PP_LOCAL_MACRO(91) # endif # if BOOST_PP_LOCAL_R(90) BOOST_PP_LOCAL_MACRO(90) # endif # if BOOST_PP_LOCAL_R(89) BOOST_PP_LOCAL_MACRO(89) # endif # if BOOST_PP_LOCAL_R(88) BOOST_PP_LOCAL_MACRO(88) # endif # if BOOST_PP_LOCAL_R(87) BOOST_PP_LOCAL_MACRO(87) # endif # if BOOST_PP_LOCAL_R(86) BOOST_PP_LOCAL_MACRO(86) # endif # if BOOST_PP_LOCAL_R(85) BOOST_PP_LOCAL_MACRO(85) # endif # if BOOST_PP_LOCAL_R(84) BOOST_PP_LOCAL_MACRO(84) # endif # if BOOST_PP_LOCAL_R(83) BOOST_PP_LOCAL_MACRO(83) # endif # if BOOST_PP_LOCAL_R(82) BOOST_PP_LOCAL_MACRO(82) # endif # if BOOST_PP_LOCAL_R(81) BOOST_PP_LOCAL_MACRO(81) # endif # if BOOST_PP_LOCAL_R(80) BOOST_PP_LOCAL_MACRO(80) # endif # if BOOST_PP_LOCAL_R(79) BOOST_PP_LOCAL_MACRO(79) # endif # if BOOST_PP_LOCAL_R(78) BOOST_PP_LOCAL_MACRO(78) # endif # if BOOST_PP_LOCAL_R(77) BOOST_PP_LOCAL_MACRO(77) # endif # if BOOST_PP_LOCAL_R(76) BOOST_PP_LOCAL_MACRO(76) # endif # if BOOST_PP_LOCAL_R(75) BOOST_PP_LOCAL_MACRO(75) # endif # if BOOST_PP_LOCAL_R(74) BOOST_PP_LOCAL_MACRO(74) # endif # if BOOST_PP_LOCAL_R(73) BOOST_PP_LOCAL_MACRO(73) # endif # if BOOST_PP_LOCAL_R(72) BOOST_PP_LOCAL_MACRO(72) # endif # if BOOST_PP_LOCAL_R(71) BOOST_PP_LOCAL_MACRO(71) # endif # if BOOST_PP_LOCAL_R(70) BOOST_PP_LOCAL_MACRO(70) # endif # if BOOST_PP_LOCAL_R(69) BOOST_PP_LOCAL_MACRO(69) # endif # if BOOST_PP_LOCAL_R(68) BOOST_PP_LOCAL_MACRO(68) # endif # if BOOST_PP_LOCAL_R(67) BOOST_PP_LOCAL_MACRO(67) # endif # if BOOST_PP_LOCAL_R(66) BOOST_PP_LOCAL_MACRO(66) # endif # if BOOST_PP_LOCAL_R(65) BOOST_PP_LOCAL_MACRO(65) # endif # if BOOST_PP_LOCAL_R(64) BOOST_PP_LOCAL_MACRO(64) # endif # if BOOST_PP_LOCAL_R(63) BOOST_PP_LOCAL_MACRO(63) # endif # if BOOST_PP_LOCAL_R(62) BOOST_PP_LOCAL_MACRO(62) # endif # if BOOST_PP_LOCAL_R(61) BOOST_PP_LOCAL_MACRO(61) # endif # if BOOST_PP_LOCAL_R(60) BOOST_PP_LOCAL_MACRO(60) # endif # if BOOST_PP_LOCAL_R(59) BOOST_PP_LOCAL_MACRO(59) # endif # if BOOST_PP_LOCAL_R(58) BOOST_PP_LOCAL_MACRO(58) # endif # if BOOST_PP_LOCAL_R(57) BOOST_PP_LOCAL_MACRO(57) # endif # if BOOST_PP_LOCAL_R(56) BOOST_PP_LOCAL_MACRO(56) # endif # if BOOST_PP_LOCAL_R(55) BOOST_PP_LOCAL_MACRO(55) # endif # if BOOST_PP_LOCAL_R(54) BOOST_PP_LOCAL_MACRO(54) # endif # if BOOST_PP_LOCAL_R(53) BOOST_PP_LOCAL_MACRO(53) # endif # if BOOST_PP_LOCAL_R(52) BOOST_PP_LOCAL_MACRO(52) # endif # if BOOST_PP_LOCAL_R(51) BOOST_PP_LOCAL_MACRO(51) # endif # if BOOST_PP_LOCAL_R(50) BOOST_PP_LOCAL_MACRO(50) # endif # if BOOST_PP_LOCAL_R(49) BOOST_PP_LOCAL_MACRO(49) # endif # if BOOST_PP_LOCAL_R(48) BOOST_PP_LOCAL_MACRO(48) # endif # if BOOST_PP_LOCAL_R(47) BOOST_PP_LOCAL_MACRO(47) # endif # if BOOST_PP_LOCAL_R(46) BOOST_PP_LOCAL_MACRO(46) # endif # if BOOST_PP_LOCAL_R(45) BOOST_PP_LOCAL_MACRO(45) # endif # if BOOST_PP_LOCAL_R(44) BOOST_PP_LOCAL_MACRO(44) # endif # if BOOST_PP_LOCAL_R(43) BOOST_PP_LOCAL_MACRO(43) # endif # if BOOST_PP_LOCAL_R(42) BOOST_PP_LOCAL_MACRO(42) # endif # if BOOST_PP_LOCAL_R(41) BOOST_PP_LOCAL_MACRO(41) # endif # if BOOST_PP_LOCAL_R(40) BOOST_PP_LOCAL_MACRO(40) # endif # if BOOST_PP_LOCAL_R(39) BOOST_PP_LOCAL_MACRO(39) # endif # if BOOST_PP_LOCAL_R(38) BOOST_PP_LOCAL_MACRO(38) # endif # if BOOST_PP_LOCAL_R(37) BOOST_PP_LOCAL_MACRO(37) # endif # if BOOST_PP_LOCAL_R(36) BOOST_PP_LOCAL_MACRO(36) # endif # if BOOST_PP_LOCAL_R(35) BOOST_PP_LOCAL_MACRO(35) # endif # if BOOST_PP_LOCAL_R(34) BOOST_PP_LOCAL_MACRO(34) # endif # if BOOST_PP_LOCAL_R(33) BOOST_PP_LOCAL_MACRO(33) # endif # if BOOST_PP_LOCAL_R(32) BOOST_PP_LOCAL_MACRO(32) # endif # if BOOST_PP_LOCAL_R(31) BOOST_PP_LOCAL_MACRO(31) # endif # if BOOST_PP_LOCAL_R(30) BOOST_PP_LOCAL_MACRO(30) # endif # if BOOST_PP_LOCAL_R(29) BOOST_PP_LOCAL_MACRO(29) # endif # if BOOST_PP_LOCAL_R(28) BOOST_PP_LOCAL_MACRO(28) # endif # if BOOST_PP_LOCAL_R(27) BOOST_PP_LOCAL_MACRO(27) # endif # if BOOST_PP_LOCAL_R(26) BOOST_PP_LOCAL_MACRO(26) # endif # if BOOST_PP_LOCAL_R(25) BOOST_PP_LOCAL_MACRO(25) # endif # if BOOST_PP_LOCAL_R(24) BOOST_PP_LOCAL_MACRO(24) # endif # if BOOST_PP_LOCAL_R(23) BOOST_PP_LOCAL_MACRO(23) # endif # if BOOST_PP_LOCAL_R(22) BOOST_PP_LOCAL_MACRO(22) # endif # if BOOST_PP_LOCAL_R(21) BOOST_PP_LOCAL_MACRO(21) # endif # if BOOST_PP_LOCAL_R(20) BOOST_PP_LOCAL_MACRO(20) # endif # if BOOST_PP_LOCAL_R(19) BOOST_PP_LOCAL_MACRO(19) # endif # if BOOST_PP_LOCAL_R(18) BOOST_PP_LOCAL_MACRO(18) # endif # if BOOST_PP_LOCAL_R(17) BOOST_PP_LOCAL_MACRO(17) # endif # if BOOST_PP_LOCAL_R(16) BOOST_PP_LOCAL_MACRO(16) # endif # if BOOST_PP_LOCAL_R(15) BOOST_PP_LOCAL_MACRO(15) # endif # if BOOST_PP_LOCAL_R(14) BOOST_PP_LOCAL_MACRO(14) # endif # if BOOST_PP_LOCAL_R(13) BOOST_PP_LOCAL_MACRO(13) # endif # if BOOST_PP_LOCAL_R(12) BOOST_PP_LOCAL_MACRO(12) # endif # if BOOST_PP_LOCAL_R(11) BOOST_PP_LOCAL_MACRO(11) # endif # if BOOST_PP_LOCAL_R(10) BOOST_PP_LOCAL_MACRO(10) # endif # if BOOST_PP_LOCAL_R(9) BOOST_PP_LOCAL_MACRO(9) # endif # if BOOST_PP_LOCAL_R(8) BOOST_PP_LOCAL_MACRO(8) # endif # if BOOST_PP_LOCAL_R(7) BOOST_PP_LOCAL_MACRO(7) # endif # if BOOST_PP_LOCAL_R(6) BOOST_PP_LOCAL_MACRO(6) # endif # if BOOST_PP_LOCAL_R(5) BOOST_PP_LOCAL_MACRO(5) # endif # if BOOST_PP_LOCAL_R(4) BOOST_PP_LOCAL_MACRO(4) # endif # if BOOST_PP_LOCAL_R(3) BOOST_PP_LOCAL_MACRO(3) # endif # if BOOST_PP_LOCAL_R(2) BOOST_PP_LOCAL_MACRO(2) # endif # if BOOST_PP_LOCAL_R(1) BOOST_PP_LOCAL_MACRO(1) # endif # if BOOST_PP_LOCAL_R(0) BOOST_PP_LOCAL_MACRO(0) # endif
{ "pile_set_name": "Github" }
source 'https://rubygems.org' gemspec group :test do if RUBY_VERSION.to_i < 2 gem 'rake', '>= 0.8.7', '< 11' gem 'json', '< 2' else gem 'rake' end gem 'simplecov', '>= 0.14.1' gem 'test-unit' end group :profile do gem 'ruby-prof', :platforms => :ruby end platforms :rbx do gem 'rubysl' gem 'rubysl-test-unit' gem 'racc' gem 'rubinius-coverage', '~> 2.0' end
{ "pile_set_name": "Github" }
# React Server-Side Rendering Example Above is an example playground for you to play with React & Redux on Client and Server sides. Before jumping into it, make sure to [read SSR tutorial on Freecodecamp (Outdated)](https://www.freecodecamp.org/news/server-side-rendering-your-react-app-in-three-simple-steps-7a82b95db82e/). Here’s what we will build in this tutorial: ![Clash of Clans app card](https://cdn-images-1.medium.com/max/1000/1*wk04sWGQkw36_XLFvPACrA.png) ## Development - Clone the repo: ```bash $ git clone https://github.com/Rohitkrops/ssr.git ``` - Go to the project directory and install dependencies: ```bash $ cd ssr && yarn install ``` Launch the server: ```bash $ yarn start ``` Now, the application is running at [http://localhost:3000](http://localhost:3000). The homepage is Sever Rendered and ['/client'](http://localhost:3000/client) is client side rendered. ### Note This tutorial is to help you understand the concept behind server side rendering of a React application. Don't use this in Production. For that use mature solutions, I would recommend Next.js 🥳
{ "pile_set_name": "Github" }
<?php namespace Faker\Provider\ja_JP; class Text extends \Faker\Provider\Text { protected static $separator = ''; protected static $separatorLen = 0; /** * All punctuation in $baseText: 、 。 「 」 『 』 ! ? ー , : ; */ protected static $notEndPunct = array('、', '「', '『', 'ー', ',', ':', ';'); protected static $endPunct = array('。', '」', '』', '!', '?'); protected static $notBeginPunct = array('、', '。', '」', '』', '!', '?', 'ー', ',', ':', ';'); /** * Title: 銀河鉄道の夜 Night On The Milky Way Train * Author: 宮沢賢治 Kenji Miyazawa * Language: Japanese * * @see http://www.aozora.gr.jp/cards/000081/files/43737_19215.html * @var string */ protected static $baseText = <<<'EOT' 一 午後の授業 「ではみなさんは、そういうふうに川だと言いわれたり、乳ちちの流ながれたあとだと言いわれたりしていた、このぼんやりと白いものがほんとうは何かご承知しょうちですか」先生は、黒板こくばんにつるした大きな黒い星座せいざの図の、上から下へ白くけぶった銀河帯ぎんがたいのようなところを指さしながら、みんなに問といをかけました。 カムパネルラが手をあげました。それから四、五人手をあげました。ジョバンニも手をあげようとして、急いそいでそのままやめました。たしかにあれがみんな星だと、いつか雑誌ざっしで読んだのでしたが、このごろはジョバンニはまるで毎日教室でもねむく、本を読むひまも読む本もないので、なんだかどんなこともよくわからないという気持きもちがするのでした。 ところが先生は早くもそれを見つけたのでした。 「ジョバンニさん。あなたはわかっているのでしょう」 ジョバンニは勢いきおいよく立ちあがりましたが、立ってみるともうはっきりとそれを答えることができないのでした。ザネリが前の席せきからふりかえって、ジョバンニを見てくすっとわらいました。ジョバンニはもうどぎまぎしてまっ赤になってしまいました。先生がまた言いいました。 「大きな望遠鏡ぼうえんきょうで銀河ぎんがをよっく調しらべると銀河ぎんがはだいたい何でしょう」 やっぱり星だとジョバンニは思いましたが、こんどもすぐに答えることができませんでした。 先生はしばらく困こまったようすでしたが、眼めをカムパネルラの方へ向むけて、 「ではカムパネルラさん」と名指なざしました。 するとあんなに元気に手をあげたカムパネルラが、やはりもじもじ立ち上がったままやはり答えができませんでした。 先生は意外いがいなようにしばらくじっとカムパネルラを見ていましたが、急いそいで、 「では、よし」と言いいながら、自分で星図を指さしました。 「このぼんやりと白い銀河ぎんがを大きないい望遠鏡ぼうえんきょうで見ますと、もうたくさんの小さな星に見えるのです。ジョバンニさんそうでしょう」 ジョバンニはまっ赤かになってうなずきました。けれどもいつかジョバンニの眼めのなかには涙なみだがいっぱいになりました。そうだ僕ぼくは知っていたのだ、もちろんカムパネルラも知っている、それはいつかカムパネルラのお父さんの博士はかせのうちでカムパネルラといっしょに読んだ雑誌ざっしのなかにあったのだ。それどこでなくカムパネルラは、その雑誌ざっしを読むと、すぐお父さんの書斎しょさいから巨おおきな本をもってきて、ぎんがというところをひろげ、まっ黒な頁ページいっぱいに白に点々てんてんのある美うつくしい写真しゃしんを二人でいつまでも見たのでした。それをカムパネルラが忘わすれるはずもなかったのに、すぐに返事へんじをしなかったのは、このごろぼくが、朝にも午後にも仕事しごとがつらく、学校に出てももうみんなともはきはき遊あそばず、カムパネルラともあんまり物を言いわないようになったので、カムパネルラがそれを知ってきのどくがってわざと返事へんじをしなかったのだ、そう考えるとたまらないほど、じぶんもカムパネルラもあわれなような気がするのでした。 先生はまた言いいました。 「ですからもしもこの天の川がほんとうに川だと考えるなら、その一つ一つの小さな星はみんなその川のそこの砂すなや砂利じゃりの粒つぶにもあたるわけです。またこれを巨おおきな乳ちちの流ながれと考えるなら、もっと天の川とよく似にています。つまりその星はみな、乳ちちのなかにまるで細こまかにうかんでいる脂油あぶらの球たまにもあたるのです。そんなら何がその川の水にあたるかと言いいますと、それは真空しんくうという光をある速はやさで伝つたえるもので、太陽たいようや地球ちきゅうもやっぱりそのなかに浮うかんでいるのです。つまりは私わたしどもも天の川の水のなかに棲すんでいるわけです。そしてその天の川の水のなかから四方を見ると、ちょうど水が深いほど青く見えるように、天の川の底そこの深ふかく遠いところほど星がたくさん集まって見え、したがって白くぼんやり見えるのです。この模型もけいをごらんなさい」 先生は中にたくさん光る砂すなのつぶのはいった大きな両面りょうめんの凸とつレンズを指さしました。 「天の川の形はちょうどこんななのです。このいちいちの光るつぶがみんな私わたしどもの太陽たいようと同じようにじぶんで光っている星だと考えます。私どもの太陽たいようがこのほぼ中ごろにあって地球ちきゅうがそのすぐ近くにあるとします。みなさんは夜にこのまん中に立ってこのレンズの中を見まわすとしてごらんなさい。こっちの方はレンズが薄うすいのでわずかの光る粒つぶすなわち星しか見えないでしょう。こっちやこっちの方はガラスが厚あついので、光る粒つぶすなわち星がたくさん見えその遠いのはぼうっと白く見えるという、これがつまり今日の銀河ぎんがの説せつなのです。そんならこのレンズの大きさがどれくらいあるか、またその中のさまざまの星についてはもう時間ですから、この次つぎの理科の時間にお話します。では今日はその銀河ぎんがのお祭まつりなのですから、みなさんは外へでてよくそらをごらんなさい。ではここまでです。本やノートをおしまいなさい」 そして教室じゅうはしばらく机つくえの蓋ふたをあけたりしめたり本を重かさねたりする音がいっぱいでしたが、まもなくみんなはきちんと立って礼れいをすると教室を出ました。 二 活版所かっぱんじょ ジョバンニが学校の門を出るとき、同じ組の七、八人は家へ帰らずカムパネルラをまん中にして校庭こうていの隅すみの桜さくらの木のところに集あつまっていました。それはこんやの星祭ほしまつりに青いあかりをこしらえて川へ流ながす烏瓜からすうりを取とりに行く相談そうだんらしかったのです。 けれどもジョバンニは手を大きく振ふってどしどし学校の門もんを出て来ました。すると町の家々ではこんやの銀河ぎんがの祭まつりにいちいの葉はの玉たまをつるしたり、ひのきの枝えだにあかりをつけたり、いろいろしたくをしているのでした。 家へは帰らずジョバンニが町を三つ曲まがってある大きな活版所かっぱんじょにはいって靴くつをぬいで上がりますと、突つき当たりの大きな扉とびらをあけました。中にはまだ昼ひるなのに電燈でんとうがついて、たくさんの輪転機りんてんきがばたりばたりとまわり、きれで頭をしばったりラムプシェードをかけたりした人たちが、何か歌うように読んだり数えたりしながらたくさん働はたらいておりました。 ジョバンニはすぐ入口から三番目の高い卓子テーブルにすわった人の所ところへ行っておじぎをしました。その人はしばらく棚たなをさがしてから、 「これだけ拾ひろって行けるかね」と言いいながら、一枚の紙切れを渡わたしました。ジョバンニはその人の卓子テーブルの足もとから一つの小さな平ひらたい函はこをとりだして向むこうの電燈でんとうのたくさんついた、たてかけてある壁かべの隅すみの所ところへしゃがみ込こむと、小さなピンセットでまるで粟粒あわつぶぐらいの活字かつじを次つぎから次つぎへと拾ひろいはじめました。青い胸むねあてをした人がジョバンニのうしろを通りながら、 「よう、虫めがね君くん、お早う」と言いいますと、近くの四、五人の人たちが声もたてずこっちも向むかずに冷つめたくわらいました。 ジョバンニは何べんも眼めをぬぐいながら活字かつじをだんだんひろいました。 六時がうってしばらくたったころ、ジョバンニは拾ひろった活字かつじをいっぱいに入れた平ひらたい箱はこをもういちど手にもった紙きれと引き合わせてから、さっきの卓子テーブルの人へ持もって来ました。その人は黙だまってそれを受うけ取とってかすかにうなずきました。 ジョバンニはおじぎをすると扉とびらをあけて計算台のところに来ました。すると白服しろふくを着きた人がやっぱりだまって小さな銀貨ぎんかを一つジョバンニに渡わたしました。ジョバンニはにわかに顔いろがよくなって威勢いせいよくおじぎをすると、台の下に置おいた鞄かばんをもっておもてへ飛とびだしました。それから元気よく口笛くちぶえを吹ふきながらパン屋やへ寄よってパンの塊かたまりを一つと角砂糖かくざとうを一袋ふくろ買いますといちもくさんに走りだしました。 三 家 ジョバンニが勢いきおいよく帰って来たのは、ある裏町うらまちの小さな家でした。その三つならんだ入口のいちばん左側ひだりがわには空箱あきばこに紫むらさきいろのケールやアスパラガスが植うえてあって小さな二つの窓まどには日覆ひおおいがおりたままになっていました。 「お母さん、いま帰ったよ。ぐあい悪わるくなかったの」ジョバンニは靴くつをぬぎながら言いました。 「ああ、ジョバンニ、お仕事しごとがひどかったろう。今日きょうは涼すずしくてね。わたしはずうっとぐあいがいいよ」 ジョバンニは玄関げんかんを上がって行きますとジョバンニのお母さんがすぐ入口の室へやに白い巾きれをかぶって寝やすんでいたのでした。ジョバンニは窓まどをあけました。 「お母さん、今日は角砂糖かくざとうを買ってきたよ。牛乳ぎゅうにゅうに入れてあげようと思って」 「ああ、お前さきにおあがり。あたしはまだほしくないんだから」 「お母さん。姉ねえさんはいつ帰ったの」 「ああ、三時ころ帰ったよ。みんなそこらをしてくれてね」 「お母さんの牛乳ぎゅうにゅうは来ていないんだろうか」 「来なかったろうかねえ」 「ぼく行ってとって来よう」 「ああ、あたしはゆっくりでいいんだからお前さきにおあがり、姉ねえさんがね、トマトで何かこしらえてそこへ置おいて行ったよ」 「ではぼくたべよう」 ジョバンニは[#「 ジョバンニは」は底本では「「ジョバンニは」]窓まどのところからトマトの皿さらをとってパンといっしょにしばらくむしゃむしゃたべました。 「ねえお母さん。ぼくお父さんはきっとまもなく帰ってくると思うよ」 「ああ、あたしもそう思う。けれどもおまえはどうしてそう思うの」 「だって今朝けさの新聞に今年は北の方の漁りょうはたいへんよかったと書いてあったよ」 「ああだけどねえ、お父さんは漁りょうへ出ていないかもしれない」 「きっと出ているよ。お父さんが監獄かんごくへはいるようなそんな悪わるいことをしたはずがないんだ。この前お父さんが持ってきて学校へ寄贈きぞうした巨おおきな蟹かにの甲こうらだのとなかいの角つのだの今だってみんな標本室ひょうほんしつにあるんだ。六年生なんか授業じゅぎょうのとき先生がかわるがわる教室へ持もって行くよ」 「お父さんはこの次つぎはおまえにラッコの上着うわぎをもってくるといったねえ」 「みんながぼくにあうとそれを言いうよ。ひやかすように言いうんだ」 「おまえに悪口わるくちを言いうの」 「うん、けれどもカムパネルラなんか決けっして言いわない。カムパネルラはみんながそんなことを言いうときはきのどくそうにしているよ」 「カムパネルラのお父さんとうちのお父さんとは、ちょうどおまえたちのように小さいときからのお友達ともだちだったそうだよ」 「ああだからお父さんはぼくをつれてカムパネルラのうちへもつれて行ったよ。あのころはよかったなあ。ぼくは学校から帰る途中とちゅうたびたびカムパネルラのうちに寄よった。カムパネルラのうちにはアルコールランプで走る汽車があったんだ。レールを七つ組み合わせるとまるくなってそれに電柱でんちゅうや信号標しんごうひょうもついていて信号標しんごうひょうのあかりは汽車が通るときだけ青くなるようになっていたんだ。いつかアルコールがなくなったとき石油せきゆをつかったら、缶かんがすっかりすすけたよ」 「そうかねえ」 「いまも毎朝新聞をまわしに行くよ。けれどもいつでも家じゅうまだしいんとしているからな」 「早いからねえ」 「ザウエルという犬がいるよ。しっぽがまるで箒ほうきのようだ。ぼくが行くと鼻はなを鳴らしてついてくるよ。ずうっと町の角かどまでついてくる。もっとついてくることもあるよ。今夜はみんなで烏瓜からすうりのあかりを川へながしに行くんだって。きっと犬もついて行くよ」 「そうだ。今晩こんばんは銀河ぎんがのお祭まつりだねえ」 「うん。ぼく牛乳ぎゅうにゅうをとりながら見てくるよ」 「ああ行っておいで。川へははいらないでね」 「ああぼく岸きしから見るだけなんだ。一時間で行ってくるよ」 「もっと遊あそんでおいで。カムパネルラさんといっしょなら心配しんぱいはないから」 「ああきっといっしょだよ。お母さん、窓をしめておこうか」 「ああ、どうか。もう涼すずしいからね」 ジョバンニは立って窓まどをしめ、お皿さらやパンの袋ふくろをかたづけると勢いきおいよく靴くつをはいて、 「では一時間半はんで帰ってくるよ」と言いいながら暗くらい戸口とぐちを出ました。 四 ケンタウル祭さいの夜 ジョバンニは、口笛くちぶえを吹ふいているようなさびしい口つきで、檜ひのきのまっ黒にならんだ町の坂さかをおりて来たのでした。 坂さかの下に大きな一つの街燈がいとうが、青白く立派りっぱに光って立っていました。ジョバンニが、どんどん電燈でんとうの方へおりて行きますと、いままでばけもののように、長くぼんやり、うしろへ引いていたジョバンニの影かげぼうしは、だんだん濃こく黒くはっきりなって、足をあげたり手を振ふったり、ジョバンニの横よこの方へまわって来るのでした。 (ぼくは立派りっぱな機関車きかんしゃだ。ここは勾配こうばいだから速はやいぞ。ぼくはいまその電燈でんとうを通り越こす。そうら、こんどはぼくの影法師かげぼうしはコンパスだ。あんなにくるっとまわって、前の方へ来た) とジョバンニが思いながら、大股おおまたにその街燈がいとうの下を通り過すぎたとき、いきなりひるまのザネリが、新しいえりのとがったシャツを着きて、電燈でんとうの向むこう側がわの暗くらい小路こうじから出て来て、ひらっとジョバンニとすれちがいました。 「ザネリ、烏瓜からすうりながしに行くの」ジョバンニがまだそう言いってしまわないうちに、 「ジョバンニ、お父さんから、ラッコの上着うわぎが来るよ」その子が投なげつけるようにうしろから叫さけびました。 ジョバンニは、ばっと胸むねがつめたくなり、そこらじゅうきいんと鳴るように思いました。 「なんだい、ザネリ」とジョバンニは高く叫さけび返かえしましたが、もうザネリは向むこうのひばの植うわった家の中へはいっていました。 (ザネリはどうしてぼくがなんにもしないのにあんなことを言いうのだろう。走るときはまるで鼠ねずみのようなくせに。ぼくがなんにもしないのにあんなことを言いうのはザネリがばかなからだ) ジョバンニは、せわしくいろいろのことを考えながら、さまざまの灯あかりや木の枝えだで、すっかりきれいに飾かざられた街まちを通って行きました。時計屋とけいやの店には明るくネオン燈とうがついて、一秒びょうごとに石でこさえたふくろうの赤い眼めが、くるっくるっとうごいたり、いろいろな宝石ほうせきが海のような色をした厚あつい硝子ガラスの盤ばんに載のって、星のようにゆっくり循めぐったり、また向むこう側がわから、銅どうの人馬がゆっくりこっちへまわって来たりするのでした。そのまん中にまるい黒い星座早見せいざはやみが青いアスパラガスの葉はで飾かざってありました。 ジョバンニはわれを忘わすれて、その星座せいざの図に見入りました。 それはひる学校で見たあの図よりはずうっと小さかったのですが、その日と時間に合わせて盤ばんをまわすと、そのとき出ているそらがそのまま楕円形だえんけいのなかにめぐってあらわれるようになっており、やはりそのまん中には上から下へかけて銀河ぎんががぼうとけむったような帯おびになって、その下の方ではかすかに爆発ばくはつして湯ゆげでもあげているように見えるのでした。またそのうしろには三本の脚あしのついた小さな望遠鏡ぼうえんきょうが黄いろに光って立っていましたし、いちばんうしろの壁かべには空じゅうの星座せいざをふしぎな獣けものや蛇へびや魚や瓶びんの形に書いた大きな図ずがかかっていました。ほんとうにこんなような蠍さそりだの勇士ゆうしだのそらにぎっしりいるだろうか、ああぼくはその中をどこまでも歩いてみたいと思ってたりしてしばらくぼんやり立っていました。 それからにわかにお母さんの牛乳ぎゅうにゅうのことを思いだしてジョバンニはその店をはなれました。 そしてきゅうくつな上着うわぎの肩かたを気にしながら、それでもわざと胸むねを張はって大きく手を振ふって町を通って行きました。 空気は澄すみきって、まるで水のように通りや店の中を流ながれましたし、街燈がいとうはみなまっ青なもみや楢ならの枝えだで包つつまれ、電気会社の前の六本のプラタナスの木などは、中にたくさんの豆電燈まめでんとうがついて、ほんとうにそこらは人魚の都みやこのように見えるのでした。子どもらは、みんな新しい折おりのついた着物きものを着きて、星めぐりの口笛くちぶえを吹ふいたり、 「ケンタウルス、露つゆをふらせ」と叫さけんで走ったり、青いマグネシヤの花火を燃もしたりして、たのしそうに遊あそんでいるのでした。けれどもジョバンニは、いつかまた深ふかく首くびをたれて、そこらのにぎやかさとはまるでちがったことを考えながら、牛乳屋ぎゅうにゅうやの方へ急いそぐのでした。 ジョバンニは、いつか町はずれのポプラの木が幾本いくほんも幾本いくほんも、高く星ぞらに浮うかんでいるところに来ていました。その牛乳屋ぎゅうにゅうやの黒い門もんをはいり、牛のにおいのするうすくらい台所だいどころの前に立って、ジョバンニは帽子ぼうしをぬいで、 「今晩こんばんは」と言いいましたら、家の中はしいんとして誰だれもいたようではありませんでした。 「今晩こんばんは、ごめんなさい」ジョバンニはまっすぐに立ってまた叫さけびました。するとしばらくたってから、年とった女の人が、どこかぐあいが悪わるいようにそろそろと出て来て、何か用かと口の中で言いいました。 「あの、今日、牛乳ぎゅうにゅうが僕ぼく※[#小書き平仮名ん、183-7]とこへ来なかったので、もらいにあがったんです」ジョバンニが一生けん命めい勢いきおいよく言いいました。 「いま誰だれもいないでわかりません。あしたにしてください」その人は赤い眼めの下のとこをこすりながら、ジョバンニを見おろして言いいました。 「おっかさんが病気びょうきなんですから今晩こんばんでないと困こまるんです」 「ではもう少したってから来てください」その人はもう行ってしまいそうでした。 「そうですか。ではありがとう」ジョバンニは、お辞儀じぎをして台所だいどころから出ました。 十字になった町のかどを、まがろうとしましたら、向むこうの橋はしへ行く方の雑貨店ざっかてんの前で、黒い影かげやぼんやり白いシャツが入り乱みだれて、六、七人の生徒らが、口笛くちぶえを吹ふいたり笑わらったりして、めいめい烏瓜からすうりの燈火あかりを持もってやって来くるのを見みました。その笑わらい声も口笛くちぶえも、みんな聞きおぼえのあるものでした。ジョバンニの同級どうきゅうの子供こどもらだったのです。ジョバンニは思わずどきっとして戻もどろうとしましたが、思い直なおして、いっそう勢いきおいよくそっちへ歩いて行きました。 「川へ行くの」ジョバンニが言いおうとして、少しのどがつまったように思ったとき、 「ジョバンニ、ラッコの上着うわぎが来るよ」さっきのザネリがまた叫さけびました。 「ジョバンニ、ラッコの上着うわぎが来るよ」すぐみんなが、続つづいて叫さけびました。ジョバンニはまっ赤になって、もう歩いているかもわからず、急いそいで行きすぎようとしましたら、そのなかにカムパネルラがいたのです。カムパネルラはきのどくそうに、だまって少しわらって、おこらないだろうかというようにジョバンニの方を見ていました。 ジョバンニは、にげるようにその眼めを避さけ、そしてカムパネルラのせいの高いかたちが過すぎて行ってまもなく、みんなはてんでに口笛くちぶえを吹ふきました。町かどを曲まがるとき、ふりかえって見ましたら、ザネリがやはりふりかえって見ていました。そしてカムパネルラもまた、高く口笛くちぶえを吹ふいて向むこうにぼんやり見える橋はしの方へ歩いて行ってしまったのでした。ジョバンニは、なんとも言いえずさびしくなって、いきなり走りだしました。すると耳に手をあてて、わあわあと言いいながら片足かたあしでぴょんぴょん跳とんでいた小さな子供こどもらは、ジョバンニがおもしろくてかけるのだと思って、わあいと叫さけびました。 まもなくジョバンニは走りだして黒い丘おかの方へ急いそぎました。 五 天気輪てんきりんの柱はしら 牧場ぼくじょうのうしろはゆるい丘おかになって、その黒い平たいらな頂上ちょうじょうは、北の大熊星おおくまぼしの下に、ぼんやりふだんよりも低ひくく、連つらなって見えました。 ジョバンニは、もう露つゆの降おりかかった小さな林のこみちを、どんどんのぼって行きました。まっくらな草や、いろいろな形に見えるやぶのしげみの間を、その小さなみちが、一すじ白く星あかりに照てらしだされてあったのです。草の中には、ぴかぴか青びかりを出す小さな虫もいて、ある葉はは青くすかし出され、ジョバンニは、さっきみんなの持もって行った烏瓜からすうりのあかりのようだとも思いました。 そのまっ黒な、松まつや楢ならの林を越こえると、にわかにがらんと空がひらけて、天の川がしらしらと南から北へ亙わたっているのが見え、また頂いただきの、天気輪てんきりんの柱はしらも見わけられたのでした。つりがねそうか野ぎくかの花が、そこらいちめんに、夢ゆめの中からでもかおりだしたというように咲さき、鳥が一疋ぴき、丘おかの上を鳴き続つづけながら通って行きました。 ジョバンニは、頂いただきの天気輪てんきりんの柱はしらの下に来て、どかどかするからだを、つめたい草に投なげました。 町の灯あかりは、暗やみの中をまるで海の底そこのお宮みやのけしきのようにともり、子供こどもらの歌う声や口笛くちぶえ、きれぎれの叫さけび声もかすかに聞こえて来るのでした。風が遠くで鳴り、丘おかの草もしずかにそよぎ、ジョバンニの汗あせでぬれたシャツもつめたく冷ひやされました。 野原から汽車の音が聞こえてきました。その小さな列車れっしゃの窓まどは一列いちれつ小さく赤く見え、その中にはたくさんの旅人たびびとが、苹果りんごをむいたり、わらったり、いろいろなふうにしていると考えますと、ジョバンニは、もうなんとも言いえずかなしくなって、また眼めをそらに挙あげました。 (この間原稿げんこう五枚分まいぶんなし) ところがいくら見ていても、そのそらは、ひる先生の言いったような、がらんとした冷つめたいとこだとは思われませんでした。それどころでなく、見れば見るほど、そこは小さな林や牧場ぼくじょうやらある野原のはらのように考えられてしかたなかったのです。そしてジョバンニは青い琴ことの星が、三つにも四つにもなって、ちらちらまたたき、脚あしが何べんも出たり引っ込こんだりして、とうとう蕈きのこのように長く延のびるのを見ました。またすぐ眼めの下のまちまでが、やっぱりぼんやりしたたくさんの星の集あつまりか一つの大きなけむりかのように見えるように思いました。 六 銀河ぎんがステーション そしてジョバンニはすぐうしろの天気輪てんきりんの柱はしらがいつかぼんやりした三角標さんかくひょうの形になって、しばらく蛍ほたるのように、ぺかぺか消きえたりともったりしているのを見ました。それはだんだんはっきりして、とうとうりんとうごかないようになり、濃こい鋼青はがねのそらの野原にたちました。いま新しく灼やいたばかりの青い鋼はがねの板いたのような、そらの野原に、まっすぐにすきっと立ったのです。 するとどこかで、ふしぎな声が、銀河ぎんがステーション、銀河ぎんがステーションと言いう声がしたと思うと、いきなり眼めの前が、ぱっと明るくなって、まるで億万おくまんの蛍烏賊ほたるいかの火を一ぺんに化石かせきさせて、そらじゅうに沈しずめたというぐあい、またダイアモンド会社で、ねだんがやすくならないために、わざと穫とれないふりをして、かくしておいた金剛石こんごうせきを、誰だれかがいきなりひっくりかえして、ばらまいたというふうに、眼めの前がさあっと明るくなって、ジョバンニは、思わず何べんも眼めをこすってしまいました。 気がついてみると、さっきから、ごとごとごとごと、ジョバンニの乗のっている小さな列車れっしゃが走りつづけていたのでした。ほんとうにジョバンニは、夜の軽便鉄道けいべんてつどうの、小さな黄いろの電燈でんとうのならんだ車室に、窓まどから外を見ながらすわっていたのです。車室の中は、青い天鵞絨ビロードを張はった腰掛こしかけが、まるでがらあきで、向むこうの鼠ねずみいろのワニスを塗ぬった壁かべには、真鍮しんちゅうの大きなぼたんが二つ光っているのでした。 すぐ前の席せきに、ぬれたようにまっ黒な上着うわぎを着た、せいの高い子供こどもが、窓から頭を出して外を見ているのに気がつきました。そしてそのこどもの肩かたのあたりが、どうも見たことのあるような気がして、そう思うと、もうどうしても誰だれだかわかりたくて、たまらなくなりました。いきなりこっちも窓まどから顔を出そうとしたとき、にわかにその子供こどもが頭を引っ込こめて、こっちを見ました。 それはカムパネルラだったのです。ジョバンニが、 カムパネルラ、きみは前からここにいたの、と言いおうと思ったとき、カムパネルラが、 「みんなはね、ずいぶん走ったけれども遅おくれてしまったよ。ザネリもね、ずいぶん走ったけれども追おいつかなかった」と言いいました。 ジョバンニは、 (そうだ、ぼくたちはいま、いっしょにさそって出かけたのだ)とおもいながら、 「どこかで待まっていようか」と言いいました。するとカムパネルラは、 「ザネリはもう帰ったよ。お父さんが迎むかいにきたんだ」 カムパネルラは、なぜかそう言いいながら、少し顔いろが青ざめて、どこか苦くるしいというふうでした。するとジョバンニも、なんだかどこかに、何か忘わすれたものがあるというような、おかしな気持きもちがしてだまってしまいました。 ところがカムパネルラは、窓まどから外をのぞきながら、もうすっかり元気が直なおって、勢いきおいよく言いいました。 「ああしまった。ぼく、水筒すいとうを忘わすれてきた。スケッチ帳ちょうも忘わすれてきた。けれどかまわない。もうじき白鳥の停車場ていしゃばだから。ぼく、白鳥を見るなら、ほんとうにすきだ。川の遠くを飛とんでいたって、ぼくはきっと見える」 そして、カムパネルラは、まるい板いたのようになった地図ちずを、しきりにぐるぐるまわして見ていました。まったく、その中に、白くあらわされた天の川の左の岸きしに沿そって一条じょうの鉄道線路てつどうせんろが、南へ南へとたどって行くのでした。そしてその地図の立派りっぱなことは、夜のようにまっ黒な盤ばんの上に、一々の停車場ていしゃばや三角標さんかくひょう、泉水せんすいや森が、青や橙だいだいや緑みどりや、うつくしい光でちりばめられてありました。 ジョバンニはなんだかその地図をどこかで見たようにおもいました。 「この地図ちずはどこで買ったの。黒曜石こくようせきでできてるねえ」 ジョバンニが言いいました。 「銀河ぎんがステーションで、もらったんだ。君きみもらわなかったの」 「ああ、ぼく銀河ぎんがステーションを通ったろうか。いまぼくたちのいるとこ、ここだろう」 ジョバンニは、白鳥と書いてある停車場ていしゃばのしるしの、すぐ北を指さしました。 「そうだ。おや、あの河原かわらは月夜だろうか」そっちを見ますと、青白く光る銀河ぎんがの岸きしに、銀ぎんいろの空のすすきが、もうまるでいちめん、風にさらさらさらさら、ゆられてうごいて、波なみを立てているのでした。 「月夜でないよ。銀河ぎんがだから光るんだよ」ジョバンニは言いいながら、まるではね上がりたいくらい愉快ゆかいになって、足をこつこつ鳴らし、窓まどから顔を出して、高く高く星めぐりの口笛くちぶえを吹ふきながら一生けん命めい延のびあがって、その天の川の水を、見きわめようとしましたが、はじめはどうしてもそれが、はっきりしませんでした。けれどもだんだん気をつけて見ると、そのきれいな水は、ガラスよりも水素すいそよりもすきとおって、ときどき眼めのかげんか、ちらちら紫むらさきいろのこまかな波なみをたてたり、虹にじのようにぎらっと光ったりしながら、声もなくどんどん流ながれて行き、野原にはあっちにもこっちにも、燐光りんこうの三角標さんかくひょうが、うつくしく立っていたのです。遠いものは小さく、近いものは大きく、遠いものは橙だいだいや黄いろではっきりし、近いものは青白く少しかすんで、あるいは三角形さんかくけい、あるいは四辺形しへんけい、あるいは電いなずまや鎖くさりの形、さまざまにならんで、野原いっぱいに光っているのでした。ジョバンニは、まるでどきどきして、頭をやけに振ふりました。するとほんとうに、そのきれいな野原のはらじゅうの青や橙だいだいや、いろいろかがやく三角標さんかくひょうも、てんでに息をつくように、ちらちらゆれたり顫ふるえたりしました。 「ぼくはもう、すっかり天の野原に来た」ジョバンニは言いいました。 「それに、この汽車石炭せきたんをたいていないねえ」ジョバンニが左手をつき出して窓まどから前の方を見ながら言いいました。 「アルコールか電気だろう」カムパネルラが言いいました。 するとちょうど、それに返事へんじするように、どこか遠くの遠くのもやのもやの中から、セロのようなごうごうした声がきこえて来ました。 「ここの汽車は、スティームや電気でうごいていない。ただうごくようにきまっているからうごいているのだ。ごとごと音をたてていると、そうおまえたちは思っているけれども、それはいままで音をたてる汽車にばかりなれているためなのだ」 「あの声、ぼくなんべんもどこかできいた」 「ぼくだって、林の中や川で、何べんも聞いた」 ごとごとごとごと、その小さなきれいな汽車は、そらのすすきの風にひるがえる中を、天の川の水や、三角点さんかくてんの青じろい微光びこうの中を、どこまでもどこまでもと、走って行くのでした。 「ああ、りんどうの花が咲さいている。もうすっかり秋だねえ」カムパネルラが、窓まどの外を指ゆびさして言いいました。 線路せんろのへりになったみじかい芝草しばくさの中に、月長石げっちょうせきででも刻きざまれたような、すばらしい紫むらさきのりんどうの花が咲さいていました。 「ぼく飛とびおりて、あいつをとって、また飛とび乗のってみせようか」ジョバンニは胸むねをおどらせて言いいました。 「もうだめだ。あんなにうしろへ行ってしまったから」 カムパネルラが、そう言いってしまうかしまわないうち、次つぎのりんどうの花が、いっぱいに光って過すぎて行きました。 と思ったら、もう次つぎから次つぎから、たくさんのきいろな底そこをもったりんどうの花のコップが、湧わくように、雨のように、眼めの前を通り、三角標さんかくひょうの列れつは、けむるように燃もえるように、いよいよ光って立ったのです。 七 北十字きたじゅうじとプリオシン海岸かいがん 「おっかさんは、ぼくをゆるしてくださるだろうか」 いきなり、カムパネルラが、思い切ったというように、少しどもりながら、せきこんで言いいました。 ジョバンニは、 (ああ、そうだ、ぼくのおっかさんは、あの遠い一つのちりのように見える橙だいだいいろの三角標さんかくひょうのあたりにいらっしゃって、いまぼくのことを考えているんだった)と思いながら、ぼんやりしてだまっていました。 「ぼくはおっかさんが、ほんとうに幸さいわいになるなら、どんなことでもする。けれども、いったいどんなことが、おっかさんのいちばんの幸さいわいなんだろう」カムパネルラは、なんだか、泣なきだしたいのを、一生けん命めいこらえているようでした。 「きみのおっかさんは、なんにもひどいことないじゃないの」ジョバンニはびっくりして叫さけびました。 「ぼくわからない。けれども、誰だれだって、ほんとうにいいことをしたら、いちばん幸さいわいなんだねえ。だから、おっかさんは、ぼくをゆるしてくださると思う」カムパネルラは、なにかほんとうに決心けっしんしているように見えました。 にわかに、車のなかが、ぱっと白く明るくなりました。見ると、もうじつに、金剛石こんごうせきや草の露つゆやあらゆる立派りっぱさをあつめたような、きらびやかな銀河ぎんがの河床かわどこの上を、水は声もなくかたちもなく流ながれ、その流ながれのまん中に、ぼうっと青白く後光ごこうの射さした一つの島しまが見えるのでした。その島しまの平たいらないただきに、立派りっぱな眼めもさめるような、白い十字架じゅうじかがたって、それはもう、凍こおった北極ほっきょくの雲で鋳いたといったらいいか、すきっとした金いろの円光をいただいて、しずかに永久えいきゅうに立っているのでした。 「ハレルヤ、ハレルヤ」前からもうしろからも声が起おこりました。ふりかえって見ると、車室の中の旅人たびびとたちは、みなまっすぐにきもののひだを垂たれ、黒いバイブルを胸むねにあてたり、水晶すいしょうの数珠じゅずをかけたり、どの人もつつましく指ゆびを組み合わせて、そっちに祈いのっているのでした。思わず二人ふたりともまっすぐに立ちあがりました。カムパネルラの頬ほおは、まるで熟じゅくした苹果りんごのあかしのようにうつくしくかがやいて見えました。 そして島しまと十字架じゅうじかとは、だんだんうしろの方へうつって行きました。 向むこう岸ぎしも、青じろくぼうっと光ってけむり、時々、やっぱりすすきが風にひるがえるらしく、さっとその銀ぎんいろがけむって、息いきでもかけたように見え、また、たくさんのりんどうの花が、草をかくれたり出たりするのは、やさしい狐火きつねびのように思われました。 それもほんのちょっとの間、川と汽車との間は、すすきの列れつでさえぎられ、白鳥の島しまは、二度どばかり、うしろの方に見えましたが、じきもうずうっと遠く小さく、絵えのようになってしまい、またすすきがざわざわ鳴って、とうとうすっかり見えなくなってしまいました。ジョバンニのうしろには、いつから乗のっていたのか、せいの高い、黒いかつぎをしたカトリックふうの尼あまさんが、まんまるな緑みどりの瞳ひとみを、じっとまっすぐに落おとして、まだ何かことばか声かが、そっちから伝つたわって来るのを、虔つつしんで聞いているというように見えました。旅人たびびとたちはしずかに席せきに戻もどり、二人ふたりも胸むねいっぱいのかなしみに似にた新しい気持きもちを、何気なくちがった語ことばで、そっと談はなし合ったのです。 「もうじき白鳥の停車場ていしゃばだねえ」 「ああ、十一時かっきりには着つくんだよ」 早くも、シグナルの緑みどりの燈と、ぼんやり白い柱はしらとが、ちらっと窓まどのそとを過すぎ、それから硫黄いおうのほのおのようなくらいぼんやりした転てんてつ機きの前のあかりが窓まどの下を通り、汽車はだんだんゆるやかになって、まもなくプラットホームの一列れつの電燈でんとうが、うつくしく規則きそく正しくあらわれ、それがだんだん大きくなってひろがって、二人はちょうど白鳥停車場ていしゃじょうの、大きな時計とけいの前に来てとまりました。 さわやかな秋の時計とけいの盤面ばんめんには、青く灼やかれたはがねの二本の針はりが、くっきり十一時を指さしました。みんなは、一ぺんにおりて、車室の中はがらんとなってしまいました。 〔二十分停車ていしゃ〕と時計とけいの下に書いてありました。 「ぼくたちも降おりて見ようか」ジョバンニが言いいました。 「降おりよう」二人ふたりは一度どにはねあがってドアを飛とび出して改札口かいさつぐちへかけて行きました。ところが改札口かいさつぐちには、明るい紫むらさきがかった電燈でんとうが、一つ点ついているばかり、誰だれもいませんでした。そこらじゅうを見ても、駅長えきちょうや赤帽あかぼうらしい人の、影かげもなかったのです。 二人ふたりは、停車場ていしゃばの前の、水晶細工すいしょうざいくのように見える銀杏いちょうの木に囲かこまれた、小さな広場に出ました。 そこから幅はばの広いみちが、まっすぐに銀河ぎんがの青光あおびかりの中へ通っていました。 さきに降おりた人たちは、もうどこへ行ったか一人ひとりも見えませんでした。二人ふたりがその白い道を、肩かたをならべて行きますと、二人ふたりの影かげは、ちょうど四方に窓まどのある室へやの中の、二本の柱はしらの影かげのように、また二つの車輪しゃりんの輻やのように幾本いくほんも幾本いくほんも四方へ出るのでした。そしてまもなく、あの汽車から見えたきれいな河原かわらに来ました。 カムパネルラは、そのきれいな砂すなを一つまみ、掌てのひらにひろげ、指ゆびできしきしさせながら、夢ゆめのように言いっているのでした。 「この砂すなはみんな水晶すいしょうだ。中で小さな火が燃もえている」 「そうだ」どこでぼくは、そんなことを習ならったろうと思いながら、ジョバンニもぼんやり答えていました。 河原かわらの礫こいしは、みんなすきとおって、たしかに水晶すいしょうや黄玉トパーズや、またくしゃくしゃの皺曲しゅうきょくをあらわしたのや、また稜かどから霧きりのような青白い光を出す鋼玉コランダムやらでした。ジョバンニは、走ってその渚なぎさに行って、水に手をひたしました。けれどもあやしいその銀河ぎんがの水は、水素すいそよりももっとすきとおっていたのです。それでもたしかに流ながれていたことは、二人ふたりの手首てくびの、水にひたったとこが、少し水銀すいぎんいろに浮ういたように見え、その手首てくびにぶっつかってできた波なみは、うつくしい燐光りんこうをあげて、ちらちらと燃もえるように見えたのでもわかりました。 川上の方を見ると、すすきのいっぱいにはえている崖がけの下に、白い岩いわが、まるで運動場うんどうじょうのように平たいらに川に沿そって出ているのでした。そこに小さな五、六人の人かげが、何か掘ほり出すか埋うめるかしているらしく、立ったりかがんだり、時々なにかの道具どうぐが、ピカッと光ったりしました。 「行ってみよう」二人ふたりは、まるで一度どに叫さけんで、そっちの方へ走りました。その白い岩いわになったところの入口に、〔プリオシン海岸かいがん〕という、瀬戸物せともののつるつるした標札ひょうさつが立って、向こうの渚なぎさには、ところどころ、細ほそい鉄てつの欄干らんかんも植うえられ、木製もくせいのきれいなベンチも置おいてありました。 「おや、変へんなものがあるよ」カムパネルラが、不思議ふしぎそうに立ちどまって、岩いわから黒い細長ほそながいさきのとがったくるみの実みのようなものをひろいました。 「くるみの実みだよ。そら、たくさんある。流ながれて来たんじゃない。岩いわの中にはいってるんだ」 「大きいね、このくるみ、倍ばいあるね。こいつはすこしもいたんでない」 「早くあすこへ行って見よう。きっと何か掘ほってるから」 二人ふたりは、ぎざぎざの黒いくるみの実みを持もちながら、またさっきの方へ近よって行きました。左手の渚なぎさには、波なみがやさしい稲妻いなずまのように燃もえて寄よせ、右手の崖がけには、いちめん銀ぎんや貝殻かいがらでこさえたようなすすきの穂ほがゆれたのです。 だんだん近づいて見ると、一人のせいの高い、ひどい近眼鏡きんがんきょうをかけ、長靴ながぐつをはいた学者がくしゃらしい人が、手帳てちょうに何かせわしそうに書きつけながら、つるはしをふりあげたり、スコップをつかったりしている、三人の助手じょしゅらしい人たちに夢中むちゅうでいろいろ指図さしずをしていました。 「そこのその突起とっきをこわさないように、スコップを使いたまえ、スコップを。おっと、も少し遠くから掘ほって。いけない、いけない、なぜそんな乱暴らんぼうをするんだ」 見ると、その白い柔やわらかな岩いわの中から、大きな大きな青じろい獣けものの骨ほねが、横に倒たおれてつぶれたというふうになって、半分以上はんぶんいじょう掘ほり出されていました。そして気をつけて見ると、そこらには、蹄ひづめの二つある足跡あしあとのついた岩いわが、四角しかくに十ばかり、きれいに切り取られて番号ばんごうがつけられてありました。 「君たちは参観さんかんかね」その大学士だいがくしらしい人が、眼鏡めがねをきらっとさせて、こっちを見て話しかけました。 「くるみがたくさんあったろう。それはまあ、ざっと百二十万年まんねんぐらい前のくるみだよ。ごく新しい方さ。ここは百二十万年前まんねんまえ、第三紀だいさんきのあとのころは海岸かいがんでね、この下からは貝かいがらも出る。いま川の流れているとこに、そっくり塩水しおみずが寄よせたり引いたりもしていたのだ。このけものかね、これはボスといってね、おいおい、そこ、つるはしはよしたまえ。ていねいに鑿のみでやってくれたまえ。ボスといってね、いまの牛うしの先祖せんぞで、昔むかしはたくさんいたのさ」 「標本ひょうほんにするんですか」 「いや、証明しょうめいするに要いるんだ。ぼくらからみると、ここは厚あつい立派りっぱな地層ちそうで、百二十万年まんねんぐらい前にできたという証拠しょうこもいろいろあがるけれども、ぼくらとちがったやつからみてもやっぱりこんな地層ちそうに見えるかどうか、あるいは風か水や、がらんとした空かに見えやしないかということなのだ。わかったかい。けれども、おいおい、そこもスコップではいけない。そのすぐ下に肋骨ろっこつが埋うもれてるはずじゃないか」 大学士だいがくしはあわてて走って行きました。 「もう時間だよ。行こう」カムパネルラが地図と腕時計うでどけいとをくらべながら言いいました。 「ああ、ではわたくしどもは失礼しつれいいたします」ジョバンニは、ていねいに大学士だいがくしにおじぎしました。 「そうですか。いや、さよなら」大学士だいがくしは、また忙いそがしそうに、あちこち歩きまわって監督かんとくをはじめました。 二人ふたりは、その白い岩いわの上を、一生けん命めい汽車におくれないように走りました。そしてほんとうに、風のように走れたのです。息いきも切れず膝ひざもあつくなりませんでした。 こんなにしてかけるなら、もう世界せかいじゅうだってかけれると、ジョバンニは思いました。 そして二人ふたりは、前のあの河原かわらを通り、改札口かいさつぐちの電燈でんとうがだんだん大きくなって、まもなく二人ふたりは、もとの車室の席せきにすわっていま行って来た方を、窓まどから見ていました。 八 鳥を捕とる人 「ここへかけてもようございますか」 がさがさした、けれども親切そうな、大人おとなの声が、二人ふたりのうしろで聞こえました。 それは、茶いろの少しぼろぼろの外套がいとうを着きて、白い巾きれでつつんだ荷物にもつを、二つに分けて肩かたに掛かけた、赤髯あかひげのせなかのかがんだ人でした。 「ええ、いいんです」ジョバンニは、少し肩かたをすぼめてあいさつしました。その人は、ひげの中でかすかに微笑わらいながら荷物にもつをゆっくり網棚あみだなにのせました。ジョバンニは、なにかたいへんさびしいようなかなしいような気がして、だまって正面しょうめんの時計とけいを見ていましたら、ずうっと前の方で、硝子ガラスの笛ふえのようなものが鳴りました。汽車はもう、しずかにうごいていたのです。カムパネルラは、車室の天井てんじょうを、あちこち見ていました。その一つのあかりに黒い甲虫かぶとむしがとまって、その影かげが大きく天井てんじょうにうつっていたのです。赤ひげの人は、なにかなつかしそうにわらいながら、ジョバンニやカムパネルラのようすを見ていました。汽車はもうだんだん早くなって、すすきと川と、かわるがわる窓まどの外から光りました。 赤ひげの人が、少しおずおずしながら、二人に訊ききました。 「あなた方は、どちらへいらっしゃるんですか」 「どこまでも行くんです」ジョバンニは、少しきまり悪わるそうに答えました。 「それはいいね。この汽車は、じっさい、どこまででも行きますぜ」 「あなたはどこへ行くんです」カムパネルラが、いきなり、喧嘩けんかのようにたずねましたので、ジョバンニは思わずわらいました。すると、向むこうの席せきにいた、とがった帽子ぼうしをかぶり、大きな鍵かぎを腰こしに下げた人も、ちらっとこっちを見てわらいましたので、カムパネルラも、つい顔を赤くして笑わらいだしてしまいました。ところがその人は別べつにおこったでもなく、頬ほおをぴくぴくしながら返事へんじをしました。 「わっしはすぐそこで降おります。わっしは、鳥をつかまえる商売しょうばいでね」 「何鳥ですか」 「鶴つるや雁がんです。さぎも白鳥もです」 「鶴つるはたくさんいますか」 「いますとも、さっきから鳴いてまさあ。聞かなかったのですか」 「いいえ」 「いまでも聞こえるじゃありませんか。そら、耳をすまして聴きいてごらんなさい」 二人ふたりは眼めを挙あげ、耳をすましました。ごとごと鳴る汽車のひびきと、すすきの風との間から、ころんころんと水の湧わくような音が聞こえて来るのでした。 「鶴つる、どうしてとるんですか」 「鶴つるですか、それとも鷺さぎですか」 「鷺さぎです」ジョバンニは、どっちでもいいと思いながら答えました。 「そいつはな、雑作ぞうさない。さぎというものは、みんな天の川の砂すなが凝かたまって、ぼおっとできるもんですからね、そして始終しじゅう川へ帰りますからね、川原で待まっていて、鷺さぎがみんな、脚あしをこういうふうにしておりてくるとこを、そいつが地べたへつくかつかないうちに、ぴたっと押おさえちまうんです。するともう鷺さぎは、かたまって安心あんしんして死しんじまいます。あとはもう、わかり切ってまさあ。押おし葉ばにするだけです」 「鷺さぎを押おし葉ばにするんですか。標本ひょうほんですか」 「標本ひょうほんじゃありません。みんなたべるじゃありませんか」 「おかしいねえ」カムパネルラが首くびをかしげました。 「おかしいも不審ふしんもありませんや。そら」その男は立って、網棚あみだなから包つつみをおろして、手ばやくくるくると解ときました。 「さあ、ごらんなさい。いまとって来たばかりです」 「ほんとうに鷺さぎだねえ」二人ふたりは思わず叫さけびました。まっ白な、あのさっきの北の十字架じゅうじかのように光る鷺さぎのからだが、十ばかり、少しひらべったくなって、黒い脚あしをちぢめて、浮彫うきぼりのようにならんでいたのです。 「眼めをつぶってるね」カムパネルラは、指ゆびでそっと、鷺さぎの三日月みかづきがたの白いつぶった眼めにさわりました。頭の上の槍やりのような白い毛もちゃんとついていました。 「ね、そうでしょう」鳥捕とりとりは風呂敷ふろしきを重かさねて、またくるくると包つつんで紐ひもでくくりました。誰だれがいったいここらで鷺さぎなんぞたべるだろうとジョバンニは思いながら訊ききました。 「鷺さぎはおいしいんですか」 「ええ、毎日注文ちゅうもんがあります。しかし雁がんの方が、もっと売れます。雁がんの方がずっと柄がらがいいし、第一だいいち手数てすうがありませんからな。そら」鳥捕とりとりは、また別べつの方の包つつみを解ときました。すると黄と青じろとまだらになって、なにかのあかりのようにひかる雁がんが、ちょうどさっきの鷺さぎのように、くちばしをそろえて、少しひらべったくなって、ならんでいました。 「こっちはすぐたべられます。どうです、少しおあがりなさい」鳥捕とりとりは、黄いろの雁がんの足を、軽かるくひっぱりました。するとそれは、チョコレートででもできているように、すっときれいにはなれました。 「どうです。すこしたべてごらんなさい」鳥捕とりとりは、それを二つにちぎってわたしました。ジョバンニは、ちょっとたべてみて、 (なんだ、やっぱりこいつはお菓子かしだ。チョコレートよりも、もっとおいしいけれども、こんな雁がんが飛とんでいるもんか。この男は、どこかそこらの野原の菓子屋かしやだ。けれどもぼくは、このひとをばかにしながら、この人のお菓子かしをたべているのは、たいへんきのどくだ)とおもいながら、やっぱりぽくぽくそれをたべていました。 「も少しおあがりなさい」鳥捕とりとりがまた包つつみを出しました。ジョバンニは、もっとたべたかったのですけれども、 「ええ、ありがとう」といって遠慮えんりょしましたら、鳥捕とりとりは、こんどは向むこうの席せきの、鍵かぎをもった人に出しました。 「いや、商売しょうばいものをもらっちゃすみませんな」その人は、帽子ぼうしをとりました。 「いいえ、どういたしまして。どうです、今年の渡わたり鳥どりの景気けいきは」 「いや、すてきなもんですよ。一昨日おとといの第二限だいにげんころなんか、なぜ燈台とうだいの灯ひを、規則以外きそくいがいに間(一時空白)させるかって、あっちからもこっちからも、電話で故障こしょうが来ましたが、なあに、こっちがやるんじゃなくて、渡わたり鳥どりどもが、まっ黒にかたまって、あかしの前を通るのですからしかたありませんや、わたしぁ、べらぼうめ、そんな苦情くじょうは、おれのとこへ持もって来たってしかたがねえや、ばさばさのマントを着きて脚あしと口との途方とほうもなく細ほそい大将たいしょうへやれって、こう言いってやりましたがね、はっは」 すすきがなくなったために、向むこうの野原から、ぱっとあかりが射さして来ました。 「鷺さぎの方はなぜ手数てすうなんですか」カムパネルラは、さっきから、訊きこうと思っていたのです。 「それはね、鷺さぎをたべるには」鳥捕とりとりは、こっちに向むき直なおりました。「天の川の水あかりに、十日もつるしておくかね、そうでなけぁ、砂すなに三、四日うずめなけぁいけないんだ。そうすると、水銀すいぎんがみんな蒸発じょうはつして、たべられるようになるよ」 「こいつは鳥じゃない。ただのお菓子かしでしょう」やっぱりおなじことを考えていたとみえて、カムパネルラが、思い切ったというように、尋たずねました。鳥捕とりとりは、何かたいへんあわてたふうで、 「そうそう、ここで降おりなけぁ」と言いいながら、立って荷物にもつをとったと思うと、もう見えなくなっていました。 「どこへ行ったんだろう」二人ふたりは顔を見合わせましたら、燈台守とうだいもりは、にやにや笑わらって、少し伸のびあがるようにしながら、二人の横よこの窓まどの外をのぞきました。二人ふたりもそっちを見ましたら、たったいまの鳥捕とりとりが、黄いろと青じろの、うつくしい燐光りんこうを出す、いちめんのかわらははこぐさの上に立って、まじめな顔をして両手りょうてをひろげて、じっとそらを見ていたのです。 「あすこへ行ってる。ずいぶん奇体きたいだねえ。きっとまた鳥をつかまえるとこだねえ。汽車が走って行かないうちに、早く鳥がおりるといいな」と言いったとたん、がらんとした桔梗ききょういろの空から、さっき見たような鷺さぎが、まるで雪の降ふるように、ぎゃあぎゃあ叫さけびながら、いっぱいに舞まいおりて来ました。するとあの鳥捕とりとりは、すっかり注文ちゅうもん通りだというようにほくほくして、両足りょうあしをかっきり六十度どに開いて立って、鷺さぎのちぢめて降おりて来る黒い脚あしを両手りょうてで片かたっぱしから押おさえて、布ぬのの袋ふくろの中に入れるのでした。すると鷺さぎは、蛍ほたるのように、袋ふくろの中でしばらく、青くぺかぺか光ったり消きえたりしていましたが、おしまいとうとう、みんなぼんやり白くなって、眼めをつぶるのでした。ところが、つかまえられる鳥よりは、つかまえられないで無事ぶじに天の川の砂すなの上に降おりるものの方が多おおかったのです。それは見ていると、足が砂すなへつくや否いなや、まるで雪ゆきの解とけるように、縮ちぢまってひらべったくなって、まもなく溶鉱炉ようこうろから出た銅どうの汁しるのように、砂すなや砂利じゃりの上にひろがり、しばらくは鳥の形が、砂すなについているのでしたが、それも二、三度ど明るくなったり暗くらくなったりしているうちに、もうすっかりまわりと同じいろになってしまうのでした。 鳥捕とりとりは、二十疋ぴきばかり、袋ふくろに入れてしまうと、急きゅうに両手りょうてをあげて、兵隊へいたいが鉄砲弾てっぽうだまにあたって、死しぬときのような形をしました。と思ったら、もうそこに鳥捕とりとりの形はなくなって、かえって、 「ああせいせいした。どうもからだにちょうど合うほど稼かせいでいるくらい、いいことはありませんな」というききおぼえのある声が、ジョバンニの隣となりにしました。見ると鳥捕とりとりは、もうそこでとって来た鷺さぎを、きちんとそろえて、一つずつ重かさね直なおしているのでした。 「どうして、あすこから、いっぺんにここへ来たんですか」ジョバンニが、なんだかあたりまえのような、あたりまえでないような、おかしな気がして問といました。 「どうしてって、来ようとしたから来たんです。ぜんたいあなた方は、どちらからおいでですか」 ジョバンニは、すぐ返事へんじをしようと思いましたけれども、さあ、ぜんたいどこから来たのか、もうどうしても考えつきませんでした。カムパネルラも、顔をまっ赤にして何か思い出そうとしているのでした。 「ああ、遠くからですね」鳥捕とりとりは、わかったというように雑作ぞうさなくうなずきました。 九 ジョバンニの切符きっぷ 「もうここらは白鳥区くのおしまいです。ごらんなさい。あれが名高いアルビレオの観測所かんそくじょです」 窓まどの外の、まるで花火でいっぱいのような、あまの川のまん中に、黒い大きな建物たてものが四棟むねばかり立って、その一つの平屋根ひらやねの上に、眼めもさめるような、青宝玉サファイアと黄玉トパーズの大きな二つのすきとおった球たまが、輪わになってしずかにくるくるとまわっていました。黄いろのがだんだん向むこうへまわって行って、青い小さいのがこっちへ進すすんで来、まもなく二つのはじは、重かさなり合って、きれいな緑みどりいろの両面凸りょうめんとつレンズのかたちをつくり、それもだんだん、まん中がふくらみだして、とうとう青いのは、すっかりトパーズの正面しょうめんに来ましたので、緑みどりの中心と黄いろな明るい環わとができました。それがまただんだん横よこへ外それて、前のレンズの形を逆ぎゃくにくり返かえし、とうとうすっとはなれて、サファイアは向むこうへめぐり、黄いろのはこっちへ進すすみ、またちょうどさっきのようなふうになりました。銀河ぎんがの、かたちもなく音もない水にかこまれて、ほんとうにその黒い測候所そっこうじょが、睡ねむっているように、しずかによこたわったのです。 「あれは、水の速はやさをはかる器械きかいです。水も……」鳥捕とりとりが言いいかけたとき、 「切符きっぷを拝見はいけんいたします」三人の席せきの横よこに、赤い帽子ぼうしをかぶったせいの高い車掌しゃしょうが、いつかまっすぐに立っていて言いいました。鳥捕とりとりは、だまってかくしから、小さな紙きれを出しました。車掌しゃしょうはちょっと見て、すぐ眼めをそらして(あなた方のは?)というように、指ゆびをうごかしながら、手をジョバンニたちの方へ出しました。 「さあ」ジョバンニは困こまって、もじもじしていましたら、カムパネルラはわけもないというふうで、小さな鼠ねずみいろの切符きっぷを出しました。ジョバンニは、すっかりあわててしまって、もしか上着うわぎのポケットにでも、はいっていたかとおもいながら、手を入れてみましたら、何か大きなたたんだ紙きれにあたりました。こんなものはいっていたろうかと思って、急いそいで出してみましたら、それは四つに折おったはがきぐらいの大さ[#「大さ」はママ]の緑みどりいろの紙でした。車掌しゃしょうが手を出しているもんですからなんでもかまわない、やっちまえと思って渡わたしましたら、車掌しゃしょうはまっすぐに立ち直なおってていねいにそれを開いて見ていました。そして読みながら上着うわぎのぼたんやなんかしきりに直なおしたりしていましたし燈台看守とうだいかんしゅも下からそれを熱心ねっしんにのぞいていましたから、ジョバンニはたしかにあれは証明書しょうめいしょか何かだったと考えて少し胸むねが熱あつくなるような気がしました。 「これは三次空間じくうかんの方からお持もちになったのですか」車掌しゃしょうがたずねました。 「なんだかわかりません」もう大丈夫だいじょうぶだと安心しながらジョバンニはそっちを見あげてくつくつ笑わらいました。 「よろしゅうございます。南十字サウザンクロスへ着つきますのは、次つぎの第だい三時ころになります」車掌しゃしょうは紙をジョバンニに渡わたして向むこうへ行きました。 カムパネルラは、その紙切れが何だったか待まちかねたというように急いそいでのぞきこみました。ジョバンニも全まったく早く見たかったのです。ところがそれはいちめん黒い唐草からくさのような模様もようの中に、おかしな十ばかりの字を印刷いんさつしたもので、だまって見ているとなんだかその中へ吸すい込こまれてしまうような気がするのでした。すると鳥捕とりとりが横からちらっとそれを見てあわてたように言いいました。 「おや、こいつはたいしたもんですぜ。こいつはもう、ほんとうの天上へさえ行ける切符きっぷだ。天上どこじゃない、どこでもかってにあるける通行券つうこうけんです。こいつをお持もちになれぁ、なるほど、こんな不完全ふかんぜんな幻想第四次げんそうだいよじの銀河鉄道ぎんがてつどうなんか、どこまででも行けるはずでさあ、あなた方たいしたもんですね」 「なんだかわかりません」ジョバンニが赤くなって答えながら、それをまたたたんでかくしに入れました。そしてきまりが悪わるいのでカムパネルラと二人ふたり、また窓まどの外をながめていましたが、その鳥捕とりとりの時々たいしたもんだというように、ちらちらこっちを見ているのがぼんやりわかりました。 「もうじき鷲わしの停車場ていしゃじょうだよ」カムパネルラが向むこう岸ぎしの、三つならんだ小さな青じろい三角標さんかくひょうと、地図とを見くらべて言いいました。 ジョバンニはなんだかわけもわからずに、にわかにとなりの鳥捕とりとりがきのどくでたまらなくなりました。鷺さぎをつかまえてせいせいしたとよろこんだり、白いきれでそれをくるくる包つつんだり、ひとの切符きっぷをびっくりしたように横目よこめで見てあわててほめだしたり、そんなことを一々考えていると、もうその見ず知らずの鳥捕とりとりのために、ジョバンニの持もっているものでも食べるものでもなんでもやってしまいたい、もうこの人のほんとうの幸さいわいになるなら、自分があの光る天の川の河原かわらに立って百年つづけて立って鳥をとってやってもいいというような気がして、どうしてももう黙だまっていられなくなりました。ほんとうにあなたのほしいものはいったい何ですかと訊きこうとして、それではあんまり出し抜ぬけだから、どうしようかと考えてふり返かえって見ましたら、そこにはもうあの鳥捕とりとりがいませんでした。網棚あみだなの上には白い荷物にもつも見えなかったのです。また窓まどの外で足をふんばってそらを見上げて鷺さぎを捕とるしたくをしているのかと思って、急いそいでそっちを見ましたが、外はいちめんのうつくしい砂子すなごと白いすすきの波なみばかり、あの鳥捕とりとりの広いせなかもとがった帽子ぼうしも見えませんでした。 「あの人どこへ行ったろう」カムパネルラもぼんやりそう言いっていました。 「どこへ行ったろう。いったいどこでまたあうのだろう。僕ぼくはどうしても少しあの人に物ものを言いわなかったろう」 「ああ、僕ぼくもそう思っているよ」 「僕ぼくはあの人が邪魔じゃまなような気がしたんだ。だから僕ぼくはたいへんつらい」ジョバンニはこんなへんてこな気もちは、ほんとうにはじめてだし、こんなこと今まで言いったこともないと思いました。 「なんだか苹果りんごのにおいがする。僕ぼくいま苹果りんごのことを考えたためだろうか」カムパネルラが不思議ふしぎそうにあたりを見まわしました。 「ほんとうに苹果りんごのにおいだよ。それから野茨のいばらのにおいもする」 ジョバンニもそこらを見ましたがやっぱりそれは窓まどからでもはいって来るらしいのでした。いま秋だから野茨のいばらの花のにおいのするはずはないとジョバンニは思いました。 そしたらにわかにそこに、つやつやした黒い髪かみの六つばかりの男の子が赤いジャケツのぼたんもかけず、ひどくびっくりしたような顔をして、がたがたふるえてはだしで立っていました。隣となりには黒い洋服ようふくをきちんと着きたせいの高い青年がいっぱいに風に吹ふかれているけやきの木のような姿勢しせいで、男の子の手をしっかりひいて立っていました。 「あら、ここどこでしょう。まあ、きれいだわ」青年のうしろに、もひとり、十二ばかりの眼めの茶いろな可愛かわいらしい女の子が、黒い外套がいとうを着きて青年の腕うでにすがって不思議ふしぎそうに窓まどの外を見ているのでした。 「ああ、ここはランカシャイヤだ。いや、コンネクテカット州しゅうだ。いや、ああ、ぼくたちはそらへ来たのだ。わたしたちは天へ行くのです。ごらんなさい。あのしるしは天上のしるしです。もうなんにもこわいことありません。わたくしたちは神かみさまに召めされているのです」黒服くろふくの青年はよろこびにかがやいてその女の子に言いいました。けれどもなぜかまた額ひたいに深ふかく皺しわを刻きざんで、それにたいへんつかれているらしく、無理むりに笑わらいながら男の子をジョバンニのとなりにすわらせました。それから女の子にやさしくカムパネルラのとなりの席せきを指ゆびさしました。女の子はすなおにそこへすわって、きちんと両手りょうてを組み合わせました。 「ぼく、おおねえさんのとこへ行くんだよう」腰掛こしかけたばかりの男の子は顔を変へんにして燈台看守とうだいかんしゅの向むこうの席せきにすわったばかりの青年に言いいました。青年はなんとも言いえず悲かなしそうな顔をして、じっとその子の、ちぢれたぬれた頭を見ました。女の子は、いきなり両手りょうてを顔にあててしくしく泣ないてしまいました。 「お父さんやきくよねえさんはまだいろいろお仕事しごとがあるのです。けれどももうすぐあとからいらっしゃいます。それよりも、おっかさんはどんなに永ながく待まっていらっしゃったでしょう。わたしの大事だいじなタダシはいまどんな歌をうたっているだろう、雪ゆきの降ふる朝にみんなと手をつないで、ぐるぐるにわとこのやぶをまわってあそんでいるだろうかと考えたり、ほんとうに待まって心配しんぱいしていらっしゃるんですから、早く行って、おっかさんにお目にかかりましょうね」 「うん、だけど僕ぼく、船に乗のらなけぁよかったなあ」 「ええ、けれど、ごらんなさい、そら、どうです、あの立派りっぱな川、ね、あすこはあの夏じゅう、ツィンクル、ツィンクル、リトル、スターをうたってやすむとき、いつも窓まどからぼんやり白く見えていたでしょう。あすこですよ。ね、きれいでしょう、あんなに光っています」 泣ないていた姉あねもハンケチで眼めをふいて外を見ました。青年は教えるようにそっと姉弟きょうだいにまた言いいました。 「わたしたちはもう、なんにもかなしいことないのです。わたしたちはこんないいとこを旅たびして、じき神かみさまのとこへ行きます。そこならもう、ほんとうに明るくてにおいがよくて立派りっぱな人たちでいっぱいです。そしてわたしたちの代かわりにボートへ乗のれた人たちは、きっとみんな助たすけられて、心配しんぱいして待まっているめいめいのお父さんやお母さんや自分のお家へやら行くのです。さあ、もうじきですから元気を出しておもしろくうたって行きましょう」青年は男の子のぬれたような黒い髪かみをなで、みんなを慰なぐさめながら、自分もだんだん顔いろがかがやいてきました。 「あなた方はどちらからいらっしゃったのですか。どうなすったのですか」 さっきの燈台看守とうだいかんしゅがやっと少しわかったように青年にたずねました。青年はかすかにわらいました。 「いえ、氷山ひょうざんにぶっつかって船が沈しずみましてね、わたしたちはこちらのお父さんが急きゅうな用ようで二か月前、一足さきに本国へお帰りになったので、あとから発たったのです。私は大学へはいっていて、家庭教師かていきょうしにやとわれていたのです。ところがちょうど十二日目、今日か昨日きのうのあたりです、船が氷山ひょうざんにぶっつかって一ぺんに傾かたむきもう沈しずみかけました。月のあかりはどこかぼんやりありましたが、霧きりが非常ひじょうに深ふかかったのです。ところがボートは左舷さげんの方半分はんぶんはもうだめになっていましたから、とてもみんなは乗のり切らないのです。もうそのうちにも船は沈しずみますし、私は必死ひっしとなって、どうか小さな人たちを乗のせてくださいと叫さけびました。近くの人たちはすぐみちを開いて、そして子供たちのために祈いのってくれました。けれどもそこからボートまでのところには、まだまだ小さな子どもたちや親たちやなんかいて、とても押おしのける勇気ゆうきがなかったのです。それでもわたくしはどうしてもこの方たちをお助たすけするのが私の義務ぎむだと思いましたから前にいる子供らを押おしのけようとしました。けれどもまた、そんなにして助たすけてあげるよりはこのまま神かみの御前みまえにみんなで行く方が、ほんとうにこの方たちの幸福こうふくだとも思いました。それからまた、その神かみにそむく罪つみはわたくしひとりでしょってぜひとも助たすけてあげようと思いました。けれども、どうしても見ているとそれができないのでした。子どもらばかりのボートの中へはなしてやって、お母さんが狂気きょうきのようにキスを送おくりお父さんがかなしいのをじっとこらえてまっすぐに立っているなど、とてももう腸はらわたもちぎれるようでした。そのうち船はもうずんずん沈しずみますから、私たちはかたまって、もうすっかり覚悟かくごして、この人たち二人を抱だいて、浮うかべるだけは浮うかぼうと船の沈しずむのを待まっていました。誰だれが投なげたかライフヴイが一つ飛とんで来ましたけれどもすべってずうっと向むこうへ行ってしまいました。私は一生けん命めいで甲板かんぱんの格子こうしになったとこをはなして、三人それにしっかりとりつきました。どこからともなく三〇六番の声があがりました。たちまちみんなはいろいろな国語で一ぺんにそれをうたいました。そのときにわかに大きな音がして私たちは水に落おち、もう渦うずにはいったと思いながらしっかりこの人たちをだいて、それからぼうっとしたと思ったらもうここへ来ていたのです。この方たちのお母さんは一昨年さくねん没なくなられました。ええ、ボートはきっと助たすかったにちがいありません、なにせよほど熟練じゅくれんな水夫すいふたちが漕こいで、すばやく船からはなれていましたから」 そこらから小さな嘆息たんそくやいのりの声が聞こえジョバンニもカムパネルラもいままで忘わすれていたいろいろのことをぼんやり思い出して眼めが熱あつくなりました。 (ああ、その大きな海はパシフィックというのではなかったろうか。その氷山ひょうざんの流ながれる北のはての海で、小さな船に乗のって、風や凍こおりつく潮水しおみずや、はげしい寒さむさとたたかって、たれかが一生けんめいはたらいている。ぼくはそのひとにほんとうにきのどくでそしてすまないような気がする。ぼくはそのひとのさいわいのためにいったいどうしたらいいのだろう) ジョバンニは首くびをたれて、すっかりふさぎ込こんでしまいました。 「なにがしあわせかわからないです。ほんとうにどんなつらいことでもそれがただしいみちを進すすむ中でのできごとなら、峠とうげの上りも下りもみんなほんとうの幸福こうふくに近づく一あしずつですから」 燈台守とうだいもりがなぐさめていました。 「ああそうです。ただいちばんのさいわいに至いたるためにいろいろのかなしみもみんなおぼしめしです」 青年が祈いのるようにそう答えました。 そしてあの姉弟きょうだいはもうつかれてめいめいぐったり席せきによりかかって睡ねむっていました。さっきのあのはだしだった足にはいつか白い柔やわらかな靴くつをはいていたのです。 ごとごとごとごと汽車はきらびやかな燐光りんこうの川の岸きしを進すすみました。向むこうの方の窓まどを見ると、野原はまるで幻燈げんとうのようでした。百も千もの大小さまざまの三角標さんかくひょう、その大きなものの上には赤い点々をうった測量旗そくりょうきも見え、野原のはらのはてはそれらがいちめん、たくさんたくさん集あつまってぼおっと青白い霧きりのよう、そこからか、またはもっと向むこうからか、ときどきさまざまの形のぼんやりした狼煙のろしのようなものが、かわるがわるきれいな桔梗ききょういろのそらにうちあげられるのでした。じつにそのすきとおった奇麗きれいな風は、ばらのにおいでいっぱいでした。 「いかがですか。こういう苹果りんごはおはじめてでしょう」向むこうの席せきの燈台看守とうだいかんしゅがいつか黄金きんと紅べにでうつくしくいろどられた大きな苹果りんごを落おとさないように両手りょうてで膝ひざの上にかかえていました。 「おや、どっから来たのですか。立派りっぱですねえ。ここらではこんな苹果りんごができるのですか」青年はほんとうにびっくりしたらしく、燈台看守とうだいかんしゅの両手りょうてにかかえられた一もりの苹果りんごを、眼めを細ほそくしたり首くびをまげたりしながら、われを忘わすれてながめていました。 「いや、まあおとりください。どうか、まあおとりください」 青年は一つとってジョバンニたちの方をちょっと見ました。 「さあ、向むこうの坊ぼっちゃんがた。いかがですか。おとりください」 ジョバンニは坊ぼっちゃんといわれたので、すこししゃくにさわってだまっていましたが、カムパネルラは、 「ありがとう」と言いいました。 すると青年は自分でとって一つずつ二人に送おくってよこしましたので、ジョバンニも立って、ありがとうと言いいました。 燈台看守とうだいかんしゅはやっと両腕りょううでがあいたので、こんどは自分で一つずつ睡ねむっている姉弟きょうだいの膝ひざにそっと置おきました。 「どうもありがとう。どこでできるのですか。こんな立派りっぱな苹果りんごは」 青年はつくづく見ながら言いいました。 「この辺あたりではもちろん農業のうぎょうはいたしますけれどもたいていひとりでにいいものができるような約束やくそくになっております。農業のうぎょうだってそんなにほねはおれはしません。たいてい自分の望のぞむ種子たねさえ播まけばひとりでにどんどんできます。米だってパシフィック辺へんのように殻からもないし十倍ばいも大きくてにおいもいいのです。けれどもあなたがたのいらっしゃる方なら農業のうぎょうはもうありません。苹果りんごだってお菓子かしだって、かすが少しもありませんから、みんなそのひとそのひとによってちがった、わずかのいいかおりになって毛あなからちらけてしまうのです」 にわかに男の子がばっちり眼めをあいて言いいました。 「ああぼくいまお母っかさんの夢ゆめをみていたよ。お母っかさんがね、立派りっぱな戸棚とだなや本のあるとこにいてね、ぼくの方を見て手をだしてにこにこにこにこわらったよ。ぼく、おっかさん。りんごをひろってきてあげましょうか、と言いったら眼めがさめちゃった。ああここ、さっきの汽車のなかだねえ」 「その苹果りんごがそこにあります。このおじさんにいただいたのですよ」青年が言いいました。 「ありがとうおじさん。おや、かおるねえさんまだねてるねえ、ぼくおこしてやろう。ねえさん。ごらん、りんごをもらったよ。おきてごらん」 姉あねはわらって眼めをさまし、まぶしそうに両手りょうてを眼めにあてて、それから苹果りんごを見ました。 男の子はまるでパイをたべるように、もうそれをたべていました。またせっかくむいたそのきれいな皮かわも、くるくるコルク抜ぬきのような形になって床ゆかへ落おちるまでの間にはすうっと、灰はいいろに光って蒸発じょうはつしてしまうのでした。 二人ふたりはりんごをたいせつにポケットにしまいました。 川下の向むこう岸ぎしに青く茂しげった大きな林が見え、その枝えだには熟じゅくしてまっ赤に光るまるい実みがいっぱい、その林のまん中に高い高い三角標さんかくひょうが立って、森の中からはオーケストラベルやジロフォンにまじってなんとも言いえずきれいな音ねいろが、とけるように浸しみるように風につれて流ながれて来るのでした。 青年はぞくっとしてからだをふるうようにしました。 だまってその譜ふを聞いていると、そこらにいちめん黄いろや、うすい緑みどりの明るい野原のはらか敷物しきものかがひろがり、またまっ白な蝋ろうのような露つゆが太陽たいようの面めんをかすめて行くように思われました。 「まあ、あの烏からす」カムパネルラのとなりの、かおると呼よばれた女の子が叫さけびました。 「からすでない。みんなかささぎだ」カムパネルラがまた何気なくしかるように叫さけびましたので、ジョバンニはまた思わず笑わらい、女の子はきまり悪わるそうにしました。まったく河原かわらの青じろいあかりの上に、黒い鳥がたくさんたくさんいっぱいに列れつになってとまってじっと川の微光びこうを受けているのでした。 「かささぎですねえ、頭のうしろのとこに毛がぴんと延のびてますから」青年はとりなすように言いいました。 向むこうの青い森の中の三角標さんかくひょうはすっかり汽車の正面しょうめんに来ました。そのとき汽車のずうっとうしろの方から、あの聞きなれた三〇六番の讃美歌さんびかのふしが聞こえてきました。よほどの人数で合唱がっしょうしているらしいのでした。青年はさっと顔いろが青ざめ、たって一ぺんそっちへ行きそうにしましたが思いかえしてまたすわりました。かおる子はハンケチを顔にあててしまいました。 ジョバンニまでなんだか鼻はなが変へんになりました。けれどもいつともなく誰だれともなくその歌は歌い出されだんだんはっきり強くなりました。思わずジョバンニもカムパネルラもいっしょにうたいだしたのです。 そして青い橄欖かんらんの森が、見えない天の川の向むこうにさめざめと光りながらだんだんうしろの方へ行ってしまい、そこから流ながれて来るあやしい楽器がっきの音も、もう汽車のひびきや風の音にすりへらされてずうっとかすかになりました。 「あ、孔雀くじゃくがいるよ。あ、孔雀くじゃくがいるよ」 「あの森琴ライラの宿やどでしょう。あたしきっとあの森の中にむかしの大きなオーケストラの人たちが集あつまっていらっしゃると思うわ、まわりには青い孔雀くじゃくやなんかたくさんいると思うわ」 「ええ、たくさんいたわ」女の子がこたえました。 ジョバンニはその小さく小さくなっていまはもう一つの緑みどりいろの貝かいぼたんのように見える森の上にさっさっと青じろく時々光ってその孔雀くじゃくがはねをひろげたりとじたりする光の反射はんしゃを見ました。 「そうだ、孔雀くじゃくの声だってさっき聞こえた」カムパネルラが女の子に言いいました。 「ええ、三十疋ぴきぐらいはたしかにいたわ」女の子が答えました。 ジョバンニはにわかになんとも言いえずかなしい気がして思わず、 「カムパネルラ、ここからはねおりて遊あそんで行こうよ」とこわい顔をして言いおうとしたくらいでした。 ところがそのときジョバンニは川下の遠くの方に不思議ふしぎなものを見ました。それはたしかになにか黒いつるつるした細長ほそながいもので、あの見えない天の川の水の上に飛とび出してちょっと弓ゆみのようなかたちに進すすんで、また水の中にかくれたようでした。おかしいと思ってまたよく気をつけていましたら、こんどはずっと近くでまたそんなことがあったらしいのでした。そのうちもうあっちでもこっちでも、その黒いつるつるした変へんなものが水から飛とび出して、まるく飛とんでまた頭から水へくぐるのがたくさん見えてきました。みんな魚のように川上へのぼるらしいのでした。 「まあ、なんでしょう。たあちゃん。ごらんなさい。まあたくさんだわね。なんでしょうあれ」 睡ねむそうに眼めをこすっていた男の子はびっくりしたように立ちあがりました。 「なんだろう」青年も立ちあがりました。 「まあ、おかしな魚だわ、なんでしょうあれ」 「海豚いるかです」カムパネルラがそっちを見ながら答えました。 「海豚いるかだなんてあたしはじめてだわ。けどここ海じゃないんでしょう」 「いるかは海にいるときまっていない」あの不思議ふしぎな低ひくい声がまたどこからかしました。 ほんとうにそのいるかのかたちのおかしいことは、二つのひれをちょうど両手りょうてをさげて不動ふどうの姿勢しせいをとったようなふうにして水の中から飛とび出して来て、うやうやしく頭を下にして不動ふどうの姿勢しせいのまままた水の中へくぐって行くのでした。見えない天の川の水もそのときはゆらゆらと青い焔ほのおのように波なみをあげるのでした。 「いるかお魚でしょうか」女の子がカムパネルラにはなしかけました。男の子はぐったりつかれたように席せきにもたれて睡ねむっていました。 「いるか、魚じゃありません。くじらと同じようなけだものです」カムパネルラが答えました。 「あなたくじら見たことあって」 「僕ぼくあります。くじら、頭と黒いしっぽだけ見えます。潮しおを吹ふくとちょうど本にあるようになります」 「くじらなら大きいわねえ」 「くじら大きいです。子供こどもだっているかぐらいあります」 「そうよ、あたしアラビアンナイトで見たわ」姉あねは細ほそい銀ぎんいろの指輪ゆびわをいじりながらおもしろそうにはなししていました。 (カムパネルラ、僕ぼくもう行っちまうぞ。僕ぼくなんか鯨くじらだって見たことないや) ジョバンニはまるでたまらないほどいらいらしながら、それでも堅かたく、唇くちびるを噛かんでこらえて窓まどの外を見ていました。その窓まどの外には海豚いるかのかたちももう見えなくなって川は二つにわかれました。そのまっくらな島しまのまん中に高い高いやぐらが一つ組まれて、その上に一人の寛ゆるい服ふくを着きて赤い帽子ぼうしをかぶった男が立っていました。そして両手りょうてに赤と青の旗はたをもってそらを見上げて信号しんごうしているのでした。 ジョバンニが見ている間その人はしきりに赤い旗はたをふっていましたが、にわかに赤旗あかはたをおろしてうしろにかくすようにし、青い旗はたを高く高くあげてまるでオーケストラの指揮者しきしゃのようにはげしく振ふりました。すると空中にざあっと雨のような音がして、何かまっくらなものが、いくかたまりもいくかたまりも鉄砲丸てっぽうだまのように川の向むこうの方へ飛とんで行くのでした。ジョバンニは思わず窓まどからからだを半分出して、そっちを見あげました。美うつくしい美うつくしい桔梗ききょういろのがらんとした空の下を、実じつに何万なんまんという小さな鳥どもが、幾組いくくみも幾組いくくみもめいめいせわしくせわしく鳴いて通って行くのでした。 「鳥が飛とんで行くな」ジョバンニが窓まどの外で言いました。 「どら」カムパネルラもそらを見ました。 そのときあのやぐらの上のゆるい服ふくの男はにわかに赤い旗はたをあげて狂気きょうきのようにふりうごかしました。するとぴたっと鳥の群むれは通らなくなり、それと同時にぴしゃあんというつぶれたような音が川下の方で起おこって、それからしばらくしいんとしました。と思ったらあの赤帽あかぼうの信号手しんごうしゅがまた青い旗はたをふって叫さけんでいたのです。 「いまこそわたれわたり鳥、いまこそわたれわたり鳥」その声もはっきり聞こえました。 それといっしょにまた幾万いくまんという鳥の群むれがそらをまっすぐにかけたのです。二人ふたりの顔を出しているまん中の窓まどからあの女の子が顔を出して美うつくしい頬ほおをかがやかせながらそらを仰あおぎました。 「まあ、この鳥、たくさんですわねえ、あらまあそらのきれいなこと」女の子はジョバンニにはなしかけましたけれどもジョバンニは生意気なまいきな、いやだいと思いながら、だまって口をむすんでそらを見あげていました。女の子は小さくほっと息いきをして、だまって席せきへ戻もどりました。カムパネルラがきのどくそうに窓まどから顔を引っ込こめて地図を見ていました。 「あの人鳥へ教えてるんでしょうか」女の子がそっとカムパネルラにたずねました。 「わたり鳥へ信号しんごうしてるんです。きっとどこからかのろしがあがるためでしょう」 カムパネルラが少しおぼつかなそうに答えました。そして車の中はしいんとなりました。ジョバンニはもう頭を引っ込こめたかったのですけれども明るいとこへ顔を出すのがつらかったので、だまってこらえてそのまま立って口笛くちぶえを吹ふいていました。 (どうして僕ぼくはこんなにかなしいのだろう。僕ぼくはもっとこころもちをきれいに大きくもたなければいけない。あすこの岸きしのずうっと向むこうにまるでけむりのような小さな青い火が見える。あれはほんとうにしずかでつめたい。僕ぼくはあれをよく見てこころもちをしずめるんだ) ジョバンニは熱ほてって痛いたいあたまを両手りょうてで押おさえるようにして、そっちの方を見ました。 (ああほんとうにどこまでもどこまでも僕ぼくといっしょに行くひとはないだろうか。カムパネルラだってあんな女の子とおもしろそうに談はなしているし僕ぼくはほんとうにつらいなあ) ジョバンニの眼めはまた泪なみだでいっぱいになり、天の川もまるで遠くへ行いったようにぼんやり白く見えるだけでした。 そのとき汽車はだんだん川からはなれて崖がけの上を通るようになりました。向むこう岸ぎしもまた黒いいろの崖がけが川の岸きしを下流かりゅうに下るにしたがって、だんだん高くなっていくのでした。そしてちらっと大きなとうもろこしの木を見ました。その葉ははぐるぐるに縮ちぢれ葉はの下にはもう美しい緑みどりいろの大きな苞ほうが赤い毛を吐はいて真珠しんじゅのような実みもちらっと見えたのでした。それはだんだん数を増ましてきて、もういまは列れつのように崖がけと線路せんろとの間にならび、思わずジョバンニが窓まどから顔を引っ込こめて向むこう側がわの窓まどを見ましたときは、美うつくしいそらの野原の地平線ちへいせんのはてまで、その大きなとうもろこしの木がほとんどいちめんに植うえられて、さやさや風にゆらぎ、その立派りっぱなちぢれた葉はのさきからは、まるでひるの間にいっぱい日光を吸すった金剛石こんごうせきのように露つゆがいっぱいについて、赤や緑みどりやきらきら燃もえて光っているのでした。カムパネルラが、 「あれとうもろこしだねえ」とジョバンニに言いいましたけれども、ジョバンニはどうしても気持きもちがなおりませんでしたから、ただぶっきらぼうに野原を見たまま、 「そうだろう」と答えました。 そのとき汽車はだんだんしずかになって、いくつかのシグナルとてんてつ器きの灯あかりを過ぎ、小さな停車場ていしゃばにとまりました。 その正面しょうめんの青じろい時計とけいはかっきり第二時だいにじを示しめし、風もなくなり汽車もうごかず、しずかなしずかな野原のなかにその振ふり子こはカチッカチッと正しく時を刻きざんでいくのでした。 そしてまったくその振ふり子この音のたえまを遠くの遠くの野原のはてから、かすかなかすかな旋律せんりつが糸のように流ながれて来るのでした。 「新世界交響楽しんせかいこうきょうがくだわ」向むこうの席せきの姉あねがひとりごとのようにこっちを見ながらそっと言いいました。 全まったくもう車の中ではあの黒服くろふくの丈高たけたかい青年も誰だれもみんなやさしい夢ゆめを見ているのでした。 (こんなしずかないいとこで僕ぼくはどうしてもっと愉快ゆかいになれないだろう。どうしてこんなにひとりさびしいのだろう。けれどもカムパネルラなんかあんまりひどい、僕ぼくといっしょに汽車に乗のっていながら、まるであんな女の子とばかり談はなしているんだもの。僕ぼくはほんとうにつらい) ジョバンニはまた手で顔を半分はんぶんかくすようにして向むこうの窓まどのそとを見つめていました。 すきとおった硝子ガラスのような笛ふえが鳴って汽車はしずかに動きだし、カムパネルラもさびしそうに星めぐりの口笛くちぶえを吹ふきました。 「ええ、ええ、もうこの辺へんはひどい高原ですから」 うしろの方で誰だれかとしよりらしい人の、いま眼めがさめたというふうではきはき談はなしている声がしました。 「とうもろこしだって棒ぼうで二尺も孔あなをあけておいてそこへ播まかないとはえないんです」 「そうですか。川まではよほどありましょうかねえ」 「ええ、ええ、河かわまでは二千尺じゃくから六千尺じゃくあります。もうまるでひどい峡谷きょうこくになっているんです」 そうそうここはコロラドの高原じゃなかったろうか、ジョバンニは思わずそう思いました。 あの姉あねは弟を自分の胸むねによりかからせて睡ねむらせながら黒い瞳ひとみをうっとりと遠くへ投なげて何を見るでもなしに考え込こんでいるのでしたし、カムパネルラはまださびしそうにひとり口笛くちぶえを吹ふき、男の子はまるで絹きぬで包つつんだ苹果りんごのような顔いろをしてジョバンニの見る方を見ているのでした。 突然とつぜんとうもろこしがなくなって巨おおきな黒い野原のはらがいっぱいにひらけました。 新世界交響楽しんせかいこうきょうがくはいよいよはっきり地平線ちへいせんのはてから湧わき、そのまっ黒な野原のはらのなかを一人のインデアンが白い鳥の羽根はねを頭につけ、たくさんの石を腕うでと胸むねにかざり、小さな弓ゆみに矢やをつがえていちもくさんに汽車を追おって来るのでした。 「あら、インデアンですよ。インデアンですよ。おねえさまごらんなさい」 黒服くろふくの青年も眼めをさましました。 ジョバンニもカムパネルラも立ちあがりました。 「走って来るわ、あら、走って来るわ。追おいかけているんでしょう」 「いいえ、汽車を追おってるんじゃないんですよ。猟りょうをするか踊おどるかしてるんですよ」 青年はいまどこにいるか忘わすれたというふうにポケットに手を入れて立ちながら言いいました。 まったくインデアンは半分はんぶんは踊おどっているようでした。第一だいいちかけるにしても足のふみようがもっと経済けいざいもとれ本気にもなれそうでした。にわかにくっきり白いその羽根はねは前の方へ倒たおれるようになり、インデアンはぴたっと立ちどまって、すばやく弓ゆみを空にひきました。そこから一羽わの鶴つるがふらふらと落おちて来て、また走り出したインデアンの大きくひろげた両手りょうてに落おちこみました。インデアンはうれしそうに立ってわらいました。そしてその鶴つるをもってこっちを見ている影かげも、もうどんどん小さく遠くなり、電しんばしらの碍子がいしがきらっきらっと続つづいて二つばかり光って、またとうもろこしの林になってしまいました。こっち側がわの窓まどを見ますと汽車はほんとうに高い高い崖がけの上を走っていて、その谷の底そこには川がやっぱり幅はばひろく明るく流ながれていたのです。 「ええ、もうこの辺へんから下りです。なんせこんどは一ぺんにあの水面すいめんまでおりて行くんですから容易よういじゃありません。この傾斜けいしゃがあるもんですから汽車は決けっして向むこうからこっちへは来ないんです。そら、もうだんだん早くなったでしょう」さっきの老人ろうじんらしい声が言いいました。 どんどんどんどん汽車は降おりて行きました。崖がけのはじに鉄道てつどうがかかるときは川が明るく下にのぞけたのです。ジョバンニはだんだんこころもちが明るくなってきました。汽車が小さな小屋こやの前を通って、その前にしょんぼりひとりの子供こどもが立ってこっちを見ているときなどは思わず、ほう、と叫さけびました。 どんどんどんどん汽車は走って行きました。室中へやじゅうのひとたちは半分はんぶんうしろの方へ倒たおれるようになりながら腰掛こしかけにしっかりしがみついていました。ジョバンニは思わずカムパネルラとわらいました。もうそして天の川は汽車のすぐ横手よこてをいままでよほど激はげしく流ながれて来たらしく、ときどきちらちら光ってながれているのでした。うすあかい河原かわらなでしこの花があちこち咲さいていました。汽車はようやく落おち着ついたようにゆっくり走っていました。 向むこうとこっちの岸きしに星のかたちとつるはしを書いた旗はたがたっていました。 「あれなんの旗はただろうね」ジョバンニがやっとものを言いいました。 「さあ、わからないねえ、地図にもないんだもの。鉄てつの舟ふねがおいてあるねえ」 「ああ」 「橋はしを架かけるとこじゃないんでしょうか」女の子が言いいました。 「ああ、あれ工兵こうへいの旗はただねえ。架橋演習かきょうえんしゅうをしてるんだ。けれど兵隊へいたいのかたちが見えないねえ」 その時向むこう岸ぎしちかくの少し下流かりゅうの方で、見えない天の川の水がぎらっと光って、柱はしらのように高くはねあがり、どおとはげしい音がしました。 「発破はっぱだよ、発破はっぱだよ」カムパネルラはこおどりしました。 その柱はしらのようになった水は見えなくなり、大きな鮭さけや鱒ますがきらっきらっと白く腹はらを光らせて空中にほうり出されてまるい輪わを描えがいてまた水に落おちました。ジョバンニはもうはねあがりたいくらい気持きもちが軽かるくなって言いいました。 「空の工兵大隊こうへいだいたいだ。どうだ、鱒ますなんかがまるでこんなになってはねあげられたねえ。僕ぼくこんな愉快ゆかいな旅たびはしたことない。いいねえ」 「あの鱒ますなら近くで見たらこれくらいあるねえ、たくさんさかないるんだな、この水の中に」 「小さなお魚もいるんでしょうか」女の子が談はなしにつり込こまれて言いいました。 「いるんでしょう。大きなのがいるんだから小さいのもいるんでしょう。けれど遠くだから、いま小さいの見えなかったねえ」ジョバンニはもうすっかり機嫌きげんが直なおっておもしろそうにわらって女の子に答えました。 「あれきっと双子ふたごのお星さまのお宮みやだよ」男の子がいきなり窓まどの外をさして叫さけびました。 右手の低ひくい丘おかの上に小さな水晶すいしょうででもこさえたような二つのお宮みやがならんで立っていました。 「双子ふたごのお星さまのお宮みやってなんだい」 「あたし前になんべんもお母っかさんから聞いたわ。ちゃんと小さな水晶すいしょうのお宮みやで二つならんでいるからきっとそうだわ」 「はなしてごらん。双子ふたごのお星さまが何をしたっての」 「ぼくも知ってらい。双子ふたごのお星さまが野原へ遊あそびにでて、からすと喧嘩けんかしたんだろう」 「そうじゃないわよ。あのね、天の川の岸きしにね、おっかさんお話しなすったわ、……」 「それから彗星ほうきぼしがギーギーフーギーギーフーて言いって来たねえ」 「いやだわ、たあちゃん、そうじゃないわよ。それはべつの方だわ」 「するとあすこにいま笛ふえを吹ふいているんだろうか」 「いま海へ行ってらあ」 「いけないわよ。もう海からあがっていらっしゃったのよ」 「そうそう。ぼく知ってらあ、ぼくおはなししよう」 川の向こう岸ぎしがにわかに赤くなりました。 楊やなぎの木や何かもまっ黒にすかし出され、見えない天の川の波なみも、ときどきちらちら針はりのように赤く光りました。まったく向むこう岸ぎしの野原に大きなまっ赤な火が燃もやされ、その黒いけむりは高く桔梗ききょういろのつめたそうな天をも焦こがしそうでした。ルビーよりも赤くすきとおり、リチウムよりもうつくしく酔よったようになって、その火は燃もえているのでした。 「あれはなんの火だろう。あんな赤く光る火は何を燃もやせばできるんだろう」ジョバンニが言いいました。 「蠍さそりの火だな」カムパネルラがまた地図と首くびっぴきして答えました。 「あら、蠍さそりの火のことならあたし知ってるわ」 「蠍さそりの火ってなんだい」ジョバンニがききました。 「蠍さそりがやけて死んだのよ。その火がいまでも燃もえてるって、あたし何べんもお父さんから聴きいたわ」 「蠍さそりって、虫だろう」 「ええ、蠍さそりは虫よ。だけどいい虫だわ」 「蠍さそりいい虫じゃないよ。僕ぼく博物館はくぶつかんでアルコールにつけてあるの見た。尾おにこんなかぎがあってそれで螫さされると死しぬって先生が言いってたよ」 「そうよ。だけどいい虫だわ、お父さんこう言いったのよ。むかしのバルドラの野原に一ぴきの蠍さそりがいて小さな虫やなんか殺ころしてたべて生きていたんですって。するとある日いたちに見つかって食べられそうになったんですって。さそりは一生けん命めいにげてにげたけど、とうとういたちに押おさえられそうになったわ、そのときいきなり前に井戸いどがあってその中に落おちてしまったわ、もうどうしてもあがられないで、さそりはおぼれはじめたのよ。そのときさそりはこう言いってお祈いのりしたというの。 ああ、わたしはいままで、いくつのものの命いのちをとったかわからない、そしてその私がこんどいたちにとられようとしたときはあんなに一生けん命めいにげた。それでもとうとうこんなになってしまった。ああなんにもあてにならない。どうしてわたしはわたしのからだを、だまっていたちにくれてやらなかったろう。そしたらいたちも一日生きのびたろうに。どうか神かみさま。私の心をごらんください。こんなにむなしく命いのちをすてず、どうかこの次つぎには、まことのみんなの幸さいわいのために私のからだをおつかいください。って言いったというの。 そしたらいつか蠍さそりはじぶんのからだが、まっ赤なうつくしい火になって燃もえて、よるのやみを照てらしているのを見たって。いまでも燃もえてるってお父さんおっしゃったわ。ほんとうにあの火、それだわ」 「そうだ。見たまえ。そこらの三角標さんかくひょうはちょうどさそりの形にならんでいるよ」 ジョバンニはまったくその大きな火の向むこうに三つの三角標さんかくひょうが、ちょうどさそりの腕うでのように、こっちに五つの三角標さんかくひょうがさそりの尾おやかぎのようにならんでいるのを見ました。そしてほんとうにそのまっ赤なうつくしいさそりの火は音なくあかるくあかるく燃もえたのです。 その火がだんだんうしろの方になるにつれて、みんなはなんとも言いえずにぎやかな、さまざまの楽がくの音ねや草花のにおいのようなもの、口笛くちぶえや人々のざわざわ言いう声やらを聞きました。それはもうじきちかくに町か何かがあって、そこにお祭まつりでもあるというような気がするのでした。 「ケンタウル露つゆをふらせ」いきなりいままで睡ねむっていたジョバンニのとなりの男の子が向むこうの窓まどを見ながら叫さけんでいました。 ああそこにはクリスマストリイのようにまっ青な唐檜とうひかもみの木がたって、その中にはたくさんのたくさんの豆電燈まめでんとうがまるで千の蛍ほたるでも集あつまったようについていました。 「ああ、そうだ、今夜ケンタウル祭さいだねえ」 「ああ、ここはケンタウルの村だよ」カムパネルラがすぐ言いいました。 (此この間原稿げんこうなし) 「ボール投げなら僕ぼく決けっしてはずさない」 男の子が大いばりで言いいました。 「もうじきサウザンクロスです。おりるしたくをしてください」青年がみんなに言いいました。 「僕ぼく、も少し汽車に乗ってるんだよ」男の子が言いいました。 カムパネルラのとなりの女の子はそわそわ立ってしたくをはじめましたけれどもやっぱりジョバンニたちとわかれたくないようなようすでした。 「ここでおりなけぁいけないのです」青年はきちっと口を結むすんで男の子を見おろしながら言いいました。 「厭いやだい。僕ぼくもう少し汽車へ乗のってから行くんだい」 ジョバンニがこらえかねて言いいました。 「僕ぼくたちといっしょに乗のって行こう。僕ぼくたちどこまでだって行ける切符きっぷ持もってるんだ」 「だけどあたしたち、もうここで降おりなけぁいけないのよ。ここ天上へ行くとこなんだから」 女の子がさびしそうに言いいました。 「天上へなんか行かなくたっていいじゃないか。ぼくたちここで天上よりももっといいとこをこさえなけぁいけないって僕ぼくの先生が言いったよ」 「だっておっ母かさんも行ってらっしゃるし、それに神かみさまがおっしゃるんだわ」 「そんな神かみさまうその神かみさまだい」 「あなたの神かみさまうその神かみさまよ」 「そうじゃないよ」 「あなたの神かみさまってどんな神かみさまですか」青年は笑わらいながら言いいました。 「ぼくほんとうはよく知りません。けれどもそんなんでなしに、ほんとうのたった一人ひとりの神かみさまです」 「ほんとうの神かみさまはもちろんたった一人ひとりです」 「ああ、そんなんでなしに、たったひとりのほんとうのほんとうの神かみさまです」 「だからそうじゃありませんか。わたくしはあなた方がいまにそのほんとうの神かみさまの前に、わたくしたちとお会いになることを祈いのります」青年はつつましく両手りょうてを組みました。 女の子もちょうどその通りにしました。みんなほんとうに別わかれが惜おしそうで、その顔いろも少し青ざめて見えました。ジョバンニはあぶなく声をあげて泣なき出そうとしました。 「さあもうしたくはいいんですか。じきサウザンクロスですから」 ああそのときでした。見えない天の川のずうっと川下に青や橙だいだいや、もうあらゆる光でちりばめられた十字架じゅうじかが、まるで一本の木というふうに川の中から立ってかがやき、その上には青じろい雲がまるい環わになって後光のようにかかっているのでした。汽車の中がまるでざわざわしました。みんなあの北の十字のときのようにまっすぐに立ってお祈いのりをはじめました。あっちにもこっちにも子供が瓜うりに飛とびついたときのようなよろこびの声や、なんとも言いようない深ふかいつつましいためいきの音ばかりきこえました。そしてだんだん十字架じゅうじかは窓まどの正面しょうめんになり、あの苹果りんごの肉にくのような青じろい環わの雲も、ゆるやかにゆるやかに繞めぐっているのが見えました。 「ハレルヤ、ハレルヤ」明るくたのしくみんなの声はひびき、みんなはそのそらの遠くから、つめたいそらの遠くから、すきとおったなんとも言いえずさわやかなラッパの声をききました。そしてたくさんのシグナルや電燈でんとうの灯あかりのなかを汽車はだんだんゆるやかになり、とうとう十字架じゅうじかのちょうどま向むかいに行ってすっかりとまりました。 「さあ、おりるんですよ」青年は男の子の手をひき姉あねは互たがいにえりや肩かたをなおしてやってだんだん向むこうの出口の方へ歩き出しました。 「じゃさよなら」女の子がふりかえって二人に言いいました。 「さよなら」ジョバンニはまるで泣なき出したいのをこらえておこったようにぶっきらぼうに言いいました。 女の子はいかにもつらそうに眼めを大きくして、も一度どこっちをふりかえって、それからあとはもうだまって出て行ってしまいました。汽車の中はもう半分以上はんぶんいじょうも空すいてしまいにわかにがらんとして、さびしくなり風がいっぱいに吹ふき込こみました。 そして見ているとみんなはつつましく列れつを組んで、あの十字架じゅうじかの前の天の川のなぎさにひざまずいていました。そしてその見えない天の川の水をわたって、ひとりのこうごうしい白いきものの人が手をのばしてこっちへ来るのを二人は見ました。けれどもそのときはもう硝子ガラスの呼よび子は鳴らされ汽車はうごきだし、と思ううちに銀ぎんいろの霧きりが川下の方から、すうっと流ながれて来て、もうそっちは何も見えなくなりました。ただたくさんのくるみの木が葉はをさんさんと光らしてその霧きりの中に立ち、黄金きんの円光をもった電気栗鼠でんきりすが可愛かわいい顔をその中からちらちらのぞいているだけでした。 そのとき、すうっと霧きりがはれかかりました。どこかへ行く街道かいどうらしく小さな電燈でんとうの一列いちれつについた通りがありました。それはしばらく線路せんろに沿そって進すすんでいました。そして二人ふたりがそのあかしの前を通って行くときは、その小さな豆いろの火はちょうどあいさつでもするようにぽかっと消きえ、二人ふたりが過ぎて行くときまた点つくのでした。 ふりかえって見ると、さっきの十字架じゅうじかはすっかり小さくなってしまい、ほんとうにもうそのまま胸むねにもつるされそうになり、さっきの女の子や青年たちがその前の白い渚なぎさにまだひざまずいているのか、それともどこか方角ほうがくもわからないその天上へ行ったのか、ぼんやりして見分けられませんでした。 ジョバンニは、ああ、と深ふかく息いきしました。 「カムパネルラ、また僕ぼくたち二人ふたりきりになったねえ、どこまでもどこまでもいっしょに行こう。僕ぼくはもう、あのさそりのように、ほんとうにみんなの幸さいわいのためならば僕ぼくのからだなんか百ぺん灼やいてもかまわない」 「うん。僕ぼくだってそうだ」カムパネルラの眼めにはきれいな涙なみだがうかんでいました。 「けれどもほんとうのさいわいはいったいなんだろう」 ジョバンニが言いいました。 「僕ぼくわからない」カムパネルラがぼんやり言いいました。 「僕ぼくたちしっかりやろうねえ」ジョバンニが胸むねいっぱい新しい力が湧わくように、ふうと息いきをしながら言いいました。 「あ、あすこ石炭袋せきたんぶくろだよ。そらの孔あなだよ」カムパネルラが少しそっちを避さけるようにしながら天の川のひととこを指ゆびさしました。 ジョバンニはそっちを見て、まるでぎくっとしてしまいました。天の川の一とこに大きなまっくらな孔あなが、どおんとあいているのです。その底そこがどれほど深ふかいか、その奥おくに何があるか、いくら眼めをこすってのぞいてもなんにも見えず、ただ眼めがしんしんと痛いたむのでした。ジョバンニが言いいました。 「僕ぼくもうあんな大きな暗やみの中だってこわくない。きっとみんなのほんとうのさいわいをさがしに行く。どこまでもどこまでも僕ぼくたちいっしょに進すすんで行こう」 「ああきっと行くよ。ああ、あすこの野原はなんてきれいだろう。みんな集あつまってるねえ。あすこがほんとうの天上なんだ。あっ、あすこにいるのはぼくのお母さんだよ」 カムパネルラはにわかに窓まどの遠くに見えるきれいな野原を指さして叫さけびました。 ジョバンニもそっちを見ましたけれども、そこはぼんやり白くけむっているばかり、どうしてもカムパネルラが言いったように思われませんでした。 なんとも言いえずさびしい気がして、ぼんやりそっちを見ていましたら、向むこうの河岸かわぎしに二本の電信でんしんばしらが、ちょうど両方りょうほうから腕うでを組んだように赤い腕木うでぎをつらねて立っていました。 「カムパネルラ、僕ぼくたちいっしょに行こうねえ」ジョバンニがこう言いいながらふりかえって見ましたら、そのいままでカムパネルラのすわっていた席せきに、もうカムパネルラの形は見えず、ただ黒いびろうどばかりひかっていました。 ジョバンニはまるで鉄砲丸てっぽうだまのように立ちあがりました。そして誰だれにも聞こえないように窓まどの外へからだを乗のり出して、力いっぱいはげしく胸むねをうって叫さけび、それからもう咽喉のどいっぱい泣なきだしました。 もうそこらが一ぺんにまっくらになったように思いました。そのとき、 「おまえはいったい何を泣ないているの。ちょっとこっちをごらん」いままでたびたび聞こえた、あのやさしいセロのような声が、ジョバンニのうしろから聞こえました。 ジョバンニは、はっと思って涙なみだをはらってそっちをふり向むきました、さっきまでカムパネルラのすわっていた席せきに黒い大きな帽子ぼうしをかぶった青白い顔のやせた大人おとなが、やさしくわらって大きな一冊さつの本をもっていました。 「おまえのともだちがどこかへ行ったのだろう。あのひとはね、ほんとうにこんや遠くへ行ったのだ。おまえはもうカムパネルラをさがしてもむだだ」 「ああ、どうしてなんですか。ぼくはカムパネルラといっしょにまっすぐに行こうと言いったんです」 「ああ、そうだ。みんながそう考える。けれどもいっしょに行けない。そしてみんながカムパネルラだ。おまえがあうどんなひとでも、みんな何べんもおまえといっしょに苹果りんごをたべたり汽車に乗のったりしたのだ。だからやっぱりおまえはさっき考えたように、あらゆるひとのいちばんの幸福こうふくをさがし、みんなといっしょに早くそこに行くがいい、そこでばかりおまえはほんとうにカムパネルラといつまでもいっしょに行けるのだ」 「ああぼくはきっとそうします。ぼくはどうしてそれをもとめたらいいでしょう」 「ああわたくしもそれをもとめている。おまえはおまえの切符きっぷをしっかりもっておいで。そして一しんに勉強べんきょうしなけぁいけない。おまえは化学かがくをならったろう、水は酸素さんそと水素すいそからできているということを知っている。いまはたれだってそれを疑うたがやしない。実験じっけんしてみるとほんとうにそうなんだから。けれども昔むかしはそれを水銀すいぎんと塩しおでできていると言いったり、水銀すいぎんと硫黄いおうでできていると言いったりいろいろ議論ぎろんしたのだ。みんながめいめいじぶんの神かみさまがほんとうの神さまだというだろう、けれどもお互たがいほかの神かみさまを信しんずる人たちのしたことでも涙なみだがこぼれるだろう。それからぼくたちの心がいいとかわるいとか議論ぎろんするだろう。そして勝負しょうぶがつかないだろう。けれども、もしおまえがほんとうに勉強べんきょうして実験じっけんでちゃんとほんとうの考えと、うその考えとを分けてしまえば、その実験じっけんの方法ほうほうさえきまれば、もう信仰しんこうも化学かがくと同じようになる。けれども、ね、ちょっとこの本をごらん、いいかい、これは地理ちりと歴史れきしの辞典じてんだよ。この本のこの頁ページはね、紀元前きげんぜん二千二百年の地理ちりと歴史れきしが書いてある。よくごらん、紀元前きげんぜん二千二百年のことでないよ、紀元前きげんぜん二千二百年のころにみんなが考えていた地理ちりと歴史れきしというものが書いてある。 だからこの頁ページ一つが一冊さつの地歴ちれきの本にあたるんだ。いいかい、そしてこの中に書いてあることは紀元前きげんぜん二千二百年ころにはたいてい本当ほんとうだ。さがすと証拠しょうこもぞくぞく出ている。けれどもそれが少しどうかなとこう考えだしてごらん、そら、それは次つぎの頁ページだよ。 紀元前きげんぜん一千年。だいぶ、地理ちりも歴史れきしも変かわってるだろう。このときにはこうなのだ。変へんな顔をしてはいけない。ぼくたちはぼくたちのからだだって考えだって、天の川だって汽車だって歴史れきしだって、ただそう感じているのなんだから、そらごらん、ぼくといっしょにすこしこころもちをしずかにしてごらん。いいか」 そのひとは指ゆびを一本あげてしずかにそれをおろしました。するといきなりジョバンニは自分というものが、じぶんの考えというものが、汽車やその学者がくしゃや天の川や、みんないっしょにぽかっと光って、しいんとなくなって、ぽかっとともってまたなくなって、そしてその一つがぽかっとともると、あらゆる広ひろい世界せかいががらんとひらけ、あらゆる歴史れきしがそなわり、すっと消きえると、もうがらんとした、ただもうそれっきりになってしまうのを見ました。だんだんそれが早くなって、まもなくすっかりもとのとおりになりました。 「さあいいか。だからおまえの実験じっけんは、このきれぎれの考えのはじめから終おわりすべてにわたるようでなければいけない。それがむずかしいことなのだ。けれども、もちろんそのときだけのでもいいのだ。ああごらん、あすこにプレシオスが見える。おまえはあのプレシオスの鎖くさりを解とかなければならない」 そのときまっくらな地平線ちへいせんの向むこうから青じろいのろしが、まるでひるまのようにうちあげられ、汽車の中はすっかり明るくなりました。そしてのろしは高くそらにかかって光りつづけました。 「ああマジェランの星雲せいうんだ。さあもうきっと僕ぼくは僕ぼくのために、僕ぼくのお母さんのために、カムパネルラのために、みんなのために、ほんとうのほんとうの幸福こうふくをさがすぞ」 ジョバンニは唇くちびるを噛かんで、そのマジェランの星雲せいうんをのぞんで立ちました。そのいちばん幸福こうふくなそのひとのために! 「さあ、切符きっぷをしっかり持もっておいで。お前はもう夢ゆめの鉄道てつどうの中でなしにほんとうの世界せかいの火やはげしい波なみの中を大股おおまたにまっすぐに歩いて行かなければいけない。天の川のなかでたった一つの、ほんとうのその切符きっぷを決けっしておまえはなくしてはいけない」 あのセロのような声がしたと思うとジョバンニは、あの天の川がもうまるで遠く遠くなって風が吹ふき自分はまっすぐに草の丘おかに立っているのを見、また遠くからあのブルカニロ博士はかせの足おとのしずかに近づいて来るのをききました。 「ありがとう。私はたいへんいい実験じっけんをした。私はこんなしずかな場所ばしょで遠くから私の考えを人に伝つたえる実験じっけんをしたいとさっき考えていた。お前の言いった語はみんな私の手帳てちょうにとってある。さあ帰っておやすみ。お前は夢ゆめの中で決心けっしんしたとおりまっすぐに進すすんで行くがいい。そしてこれからなんでもいつでも私のとこへ相談そうだんにおいでなさい」 「僕ぼくきっとまっすぐに進すすみます。きっとほんとうの幸福こうふくを求もとめます」ジョバンニは力強ちからづよく言いいました。 「ああではさよなら。これはさっきの切符きっぷです」 博士はかせは小さく折おった緑みどりいろの紙をジョバンニのポケットに入れました。そしてもうそのかたちは天気輪てんきりんの柱はしらの向むこうに見えなくなっていました。 ジョバンニはまっすぐに走って丘おかをおりました。 そしてポケットがたいへん重おもくカチカチ鳴るのに気がつきました。林の中でとまってそれをしらべてみましたら、あの緑みどりいろのさっき夢ゆめの中で見たあやしい天の切符きっぷの中に大きな二枚まいの金貨きんかが包つつんでありました。 「博士はかせありがとう、おっかさん。すぐ乳ちちをもって行きますよ」 ジョバンニは叫さけんでまた走りはじめました。何かいろいろのものが一ぺんにジョバンニの胸むねに集あつまってなんとも言いえずかなしいような新しいような気がするのでした。 琴ことの星がずうっと西の方へ移うつってそしてまた夢ゆめのように足をのばしていました。 ジョバンニは眼めをひらきました。もとの丘おかの草の中につかれてねむっていたのでした。胸むねはなんだかおかしく熱ほてり、頬ほおにはつめたい涙なみだがながれていました。 ジョバンニはばねのようにはね起おきました。町はすっかりさっきの通りに下でたくさんの灯あかりを綴つづってはいましたが、その光はなんだかさっきよりは熱ねっしたというふうでした。 そしてたったいま夢ゆめであるいた天の川もやっぱりさっきの通りに白くぼんやりかかり、まっ黒な南の地平線ちへいせんの上ではことにけむったようになって、その右には蠍座さそりざの赤い星がうつくしくきらめき、そらぜんたいの位置いちはそんなに変かわってもいないようでした。 ジョバンニはいっさんに丘おかを走って下りました。まだ夕ごはんをたべないで待まっているお母さんのことが胸むねいっぱいに思いだされたのです。どんどん黒い松まつの林の中を通って、それからほの白い牧場ぼくじょうの柵さくをまわって、さっきの入口から暗くらい牛舎ぎゅうしゃの前へまた来ました。そこには誰だれかがいま帰ったらしく、さっきなかった一つの車が何かの樽たるを二つ載のっけて置おいてありました。 「今晩こんばんは」ジョバンニは叫さけびました。 「はい」白い太いずぼんをはいた人がすぐ出て来て立ちました。 「なんのご用ですか」 「今日牛乳ぎゅうにゅうがぼくのところへ来なかったのですが」 「あ、済すみませんでした」その人はすぐ奥おくへ行って一本の牛乳瓶ぎゅうにゅうびんをもって来てジョバンニに渡わたしながら、また言いいました。 「ほんとうに済すみませんでした。今日はひるすぎ、うっかりしてこうしの柵さくをあけておいたもんですから、大将たいしょうさっそく親牛おやうしのところへ行って半分はんぶんばかりのんでしまいましてね……」その人はわらいました。 「そうですか。ではいただいて行きます」 「ええ、どうも済すみませんでした」 「いいえ」 ジョバンニはまだ熱あつい乳ちちの瓶びんを両方りょうほうのてのひらで包つつむようにもって牧場ぼくじょうの柵さくを出ました。 そしてしばらく木のある町を通って大通りへ出てまたしばらく行きますとみちは十文字になって、その右手の方、通りのはずれにさっきカムパネルラたちのあかりを流ながしに行った川へかかった大きな橋はしのやぐらが夜のそらにぼんやり立っていました。 ところがその十字になった町かどや店の前に女たちが七、八人ぐらいずつ集あつまって橋はしの方を見ながら何かひそひそ談はなしているのです。それから橋はしの上にもいろいろなあかりがいっぱいなのでした。 ジョバンニはなぜかさあっと胸むねが冷つめたくなったように思いました。そしていきなり近くの人たちへ、 「何かあったんですか」と叫さけぶようにききました。 「こどもが水へ落おちたんですよ」一人ひとりが言いいますと、その人たちは一斉いっせいにジョバンニの方を見ました。ジョバンニはまるで夢中むちゅうで橋はしの方へ走りました。橋はしの上は人でいっぱいで河かわが見えませんでした。白い服ふくを着きた巡査じゅんさも出ていました。 ジョバンニは橋はしの袂たもとから飛とぶように下の広い河原かわらへおりました。 その河原かわらの水ぎわに沿そってたくさんのあかりがせわしくのぼったり下ったりしていました。向むこう岸ぎしの暗くらいどてにも火が七つ八つうごいていました。そのまん中をもう烏瓜からすうりのあかりもない川が、わずかに音をたてて灰はいいろにしずかに流ながれていたのでした。 河原かわらのいちばん下流かりゅうの方へ洲すのようになって出たところに人の集あつまりがくっきりまっ黒に立っていました。ジョバンニはどんどんそっちへ走りました。するとジョバンニはいきなりさっきカムパネルラといっしょだったマルソに会あいました。マルソがジョバンニに走り寄よって言いいました。 「ジョバンニ、カムパネルラが川へはいったよ」 「どうして、いつ」 「ザネリがね、舟ふねの上から烏からすうりのあかりを水の流ながれる方へ押おしてやろうとしたんだ。そのとき舟ふねがゆれたもんだから水へ落おっこったろう。するとカムパネルラがすぐ飛とびこんだんだ。そしてザネリを舟ふねの方へ押おしてよこした。ザネリはカトウにつかまった。けれどもあとカムパネルラが見えないんだ」 「みんなさがしてるんだろう」 「ああ、すぐみんな来た。カムパネルラのお父さんも来た。けれども見つからないんだ。ザネリはうちへ連つれられてった」 ジョバンニはみんなのいるそっちの方へ行きました。そこに学生たちや町の人たちに囲かこまれて青じろいとがったあごをしたカムパネルラのお父さんが黒い服ふくを着きてまっすぐに立って左手に時計とけいを持もってじっと見つめていたのです。 みんなもじっと河かわを見ていました。誰だれも一言ひとことも物ものを言いう人もありませんでした。ジョバンニはわくわくわくわく足がふるえました。魚をとるときのアセチレンランプがたくさんせわしく行ったり来たりして、黒い川の水はちらちら小さな波なみをたてて流ながれているのが見えるのでした。 下流かりゅうの方の川はばいっぱい銀河ぎんがが巨おおきく写うつって、まるで水のないそのままのそらのように見えました。 ジョバンニは、そのカムパネルラはもうあの銀河ぎんがのはずれにしかいないというような気がしてしかたなかったのです。 けれどもみんなはまだ、どこかの波なみの間から、 「ぼくずいぶん泳およいだぞ」と言いながらカムパネルラが出て来るか、あるいはカムパネルラがどこかの人の知らない洲すにでも着ついて立っていて誰だれかの来るのを待まっているかというような気がしてしかたないらしいのでした。けれどもにわかにカムパネルラのお父さんがきっぱり言いいました。 「もう駄目だめです。落おちてから四十五分たちましたから」 ジョバンニは思わずかけよって博士はかせの前に立って、ぼくはカムパネルラの行った方を知っています、ぼくはカムパネルラといっしょに歩いていたのです、と言いおうとしましたが、もうのどがつまってなんとも言いえませんでした。すると博士はかせはジョバンニがあいさつに来たとでも思ったものですか、しばらくしげしげジョバンニを見ていましたが、 「あなたはジョバンニさんでしたね。どうも今晩こんばんはありがとう」とていねいに言いいました。 ジョバンニは何も言いえずにただおじぎをしました。 「あなたのお父さんはもう帰っていますか」博士はかせは堅かたく時計とけいを握にぎったまま、またききました。 「いいえ」ジョバンニはかすかに頭をふりました。 「どうしたのかなあ、ぼくには一昨日おとといたいへん元気な便たよりがあったんだが。今日きょうあたりもう着つくころなんだが。船ふねが遅おくれたんだな。ジョバンニさん。あした放課後ほうかごみなさんとうちへ遊あそびに来てくださいね」 そう言いいながら博士はかせはまた、川下の銀河ぎんがのいっぱいにうつった方へじっと眼めを送おくりました。 ジョバンニはもういろいろなことで胸むねがいっぱいで、なんにも言いえずに博士はかせの前をはなれて、早くお母さんに牛乳ぎゅうにゅうを持もって行って、お父さんの帰ることを知らせようと思うと、もういちもくさんに河原かわらを街まちの方へ走りました。 EOT; protected static function explode($text) { $chars = array(); foreach (preg_split('//u', preg_replace('/\s+/', '', $text)) as $char) { if ($char !== '') { $chars[] = $char; } } return $chars; } protected static function strlen($text) { return function_exists('mb_strlen') ? mb_strlen($text, 'UTF-8') : count(static::explode($text)); } protected static function validStart($word) { return !in_array($word, static::$notBeginPunct); } protected static function appendEnd($text) { // extract the last char of $text if (function_exists('mb_substr')) { $last = mb_substr($text, mb_strlen($text)-1, 'UTF-8'); } else { $chars = static::split($text); $last = end($chars); } // if the last char is a not-valid-end punctuation, remove it if (in_array($last, static::$notEndPunct)) { $text = preg_replace('/.$/u', '', $text); } // if the last char is not a valid punctuation, append a default one. return in_array($last, static::$endPunct) ? $text : $text.'。'; } }
{ "pile_set_name": "Github" }
"use strict"; function __export(m) { for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; } Object.defineProperty(exports, "__esModule", { value: true }); __export(require("rxjs-compat/observable/PairsObservable")); //# sourceMappingURL=PairsObservable.js.map
{ "pile_set_name": "Github" }
<!doctype html> <!--[if lte IE 8]><SCRIPT src='source/excanvas.js'></script><![endif]--><SCRIPT src='../ChartNew.js'></script> <!SCRIPT src='Add-ins/shapesInChart.js'><!/script> <SCRIPT> defCanvasWidth=1200; defCanvasHeight=600; var mydata1 = { labels : ["January","February","March","April","May","June","July"], datasets : [ { fillColor : "rgba(220,220,220,0.5)", strokeColor : "rgba(220,220,220,1)", pointColor : "rgba(220,220,220,1)", pointStrokeColor : "#fff", data : [0.65,0.59,-0.90,0.81,0.56,0.55,0.40], title : "pFirst data" }, { fillColor : "rgba(151,187,205,0.5)", strokeColor : "rgba(151,187,205,1)", pointColor : "rgba(151,187,205,1)", pointStrokeColor : "#fff", data : [0.28,0.48,0.40,-0.19,-0.96,0.27,1.00], title : "pSecond data" } ] } var mydata3 = { labels : ["January","February","March","April","May","June","July"], datasets : [ { fillColor : "rgba(220,220,220,0.5)", strokeColor : "rgba(220,220,220,1)", pointColor : "rgba(220,220,220,1)", pointStrokeColor : "#fff", pointDotRadius : [10,20,15,21,7,34,26], data : [0.65,0.59,-0.90,0.81,0.56,0.55,0.40], title : "pFirst data" }, { fillColor : "rgba(151,187,205,0.5)", strokeColor : "rgba(151,187,205,1)", pointColor : "rgba(151,187,205,1)", pointStrokeColor : "#fff", pointDotRadius : [10,20,15,21,7,34,26], data : [0.28,0.48,0.40,-0.19,-0.96,0.27,1.00], title : "pSecond data" } ] } var mydata2 = { labels : ["2014","2015","2016"], datasets : [ { data : [30,15,14], fillColor : "#D97041", title : "data1" }, { data : [90,,25], fillColor : "#C7604C", title : "data2" }, { data : [24,10], fillColor : "#21323D", title : "data3" }, { data : [58], fillColor : "#9D9B7F", title : "data4" }, { data : [,82,17], fillColor : "#7D4F6D", title : "data5" }, { data : [,8,], fillColor : "#584A5E", title : "data6" } ] }; var newopts = { // inGraphDataShow : isHighLighted, highLight : true, datasetFill : true, scaleLabel: "<%=value%>", scaleFontSize : 16, canvasBorders : true, graphTitle : "Sample ChartNew.js", graphTitleFontFamily : "'Arial'", graphTitleFontSize : 24, graphTitleFontStyle : "bold", graphTitleFontColor : "#666", inGraphDataShow : true, footNote : "Footnote for the graph", legend : true, yAxisLabel : "Y Axis Label", xAxisLabel : "X Axis Label", yAxisUnit : "Y Unit", annotateDisplay : true, animation : false, barBorderRadius : 5, dynamicDisplay : false } </SCRIPT> <html> <meta http-equiv="Content-Type" content="text/html;charset=utf-8" /> <head> <title>Demo ChartNew.js</title> </head> <body> <!div id="divCursor" style="position:absolute"> <!/div> <center> <FONT SIZE=6><B>Demo of ChartNew.js !</B></FONT> <BR> <script> document.write("<canvas id=\"canvas_radar\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>"); document.write("<canvas id=\"canvas_line\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>"); document.write("<canvas id=\"canvas_bubble\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>"); document.write("<canvas id=\"canvas_bar\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>"); document.write("<canvas id=\"canvas_stackedbar\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>"); document.write("<canvas id=\"canvas_horizontalbar\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>"); document.write("<canvas id=\"canvas_horizontalstackedbar\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>"); document.write("<canvas id=\"canvas_polararea\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>"); document.write("<canvas id=\"canvas_pie\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>"); document.write("<canvas id=\"canvas_doughnut\" height=\""+defCanvasHeight+"\" width=\""+defCanvasWidth+"\"></canvas>"); window.onload = function() { if(1==1) { var myLine = new Chart(document.getElementById("canvas_line").getContext("2d")).Line(mydata1,newopts); var myLine = new Chart(document.getElementById("canvas_bubble").getContext("2d")).Bubble(mydata3,newopts); var myLine = new Chart(document.getElementById("canvas_bar").getContext("2d")).Bar(mydata1,newopts); var myLine = new Chart(document.getElementById("canvas_stackedbar").getContext("2d")).StackedBar(mydata1,newopts); var myLine = new Chart(document.getElementById("canvas_horizontalbar").getContext("2d")).HorizontalBar(mydata1,newopts); var myLine = new Chart(document.getElementById("canvas_horizontalstackedbar").getContext("2d")).HorizontalStackedBar(mydata1,newopts); var myLine = new Chart(document.getElementById("canvas_radar").getContext("2d")).Radar(mydata1,newopts); var myLine = new Chart(document.getElementById("canvas_polararea").getContext("2d")).PolarArea(mydata2,newopts); var myLine = new Chart(document.getElementById("canvas_pie").getContext("2d")).Pie(mydata2,newopts); var myLine = new Chart(document.getElementById("canvas_doughnut").getContext("2d")).Doughnut(mydata2,newopts); } } </script> </body> </html>
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 4dbba0f12e8184b3a8ab30d670ca9692 timeCreated: 1432070018 licenseType: Pro MonoImporter: serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {instanceID: 0} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
import React, { PureComponent } from 'react'; import PropTypes from 'prop-types'; import { Input, Icon, AutoComplete } from 'antd'; import classNames from 'classnames'; import Debounce from 'lodash-decorators/debounce'; import Bind from 'lodash-decorators/bind'; import styles from './index.less'; export default class HeaderSearch extends PureComponent { static propTypes = { className: PropTypes.string, placeholder: PropTypes.string, onSearch: PropTypes.func, onPressEnter: PropTypes.func, defaultActiveFirstOption: PropTypes.bool, dataSource: PropTypes.array, defaultOpen: PropTypes.bool, onVisibleChange: PropTypes.func, }; static defaultProps = { defaultActiveFirstOption: false, onPressEnter: () => {}, onSearch: () => {}, className: '', placeholder: '', dataSource: [], defaultOpen: false, onVisibleChange: () => {}, }; static getDerivedStateFromProps(props) { if ('open' in props) { return { searchMode: props.open, }; } return null; } constructor(props) { super(props); this.state = { searchMode: props.defaultOpen, value: '', }; } componentWillUnmount() { clearTimeout(this.timeout); } onKeyDown = e => { if (e.key === 'Enter') { const { onPressEnter } = this.props; const { value } = this.state; this.timeout = setTimeout(() => { onPressEnter(value); // Fix duplicate onPressEnter }, 0); } }; onChange = value => { const { onChange } = this.props; this.setState({ value }); if (onChange) { onChange(value); } }; enterSearchMode = () => { const { onVisibleChange } = this.props; onVisibleChange(true); this.setState({ searchMode: true }, () => { const { searchMode } = this.state; if (searchMode) { this.input.focus(); } }); }; leaveSearchMode = () => { this.setState({ searchMode: false, value: '', }); }; // NOTE: 不能小于500,如果长按某键,第一次触发auto repeat的间隔是500ms,小于500会导致触发2次 @Bind() @Debounce(500, { leading: true, trailing: false, }) debouncePressEnter() { const { onPressEnter } = this.props; const { value } = this.state; onPressEnter(value); } render() { const { className, placeholder, open, ...restProps } = this.props; const { searchMode, value } = this.state; delete restProps.defaultOpen; // for rc-select not affected const inputClass = classNames(styles.input, { [styles.show]: searchMode, }); return ( <span className={classNames(className, styles.headerSearch)} onClick={this.enterSearchMode} onTransitionEnd={({ propertyName }) => { if (propertyName === 'width' && !searchMode) { const { onVisibleChange } = this.props; onVisibleChange(searchMode); } }} > <Icon type="search" key="Icon" /> <AutoComplete key="AutoComplete" {...restProps} className={inputClass} value={value} onChange={this.onChange} > <Input ref={node => { this.input = node; }} aria-label={placeholder} placeholder={placeholder} onKeyDown={this.onKeyDown} onBlur={this.leaveSearchMode} /> </AutoComplete> </span> ); } }
{ "pile_set_name": "Github" }
/* * Copyright (C) 2013-2015 RoboVM AB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.robovm.apple.foundation; /*<imports>*/ import java.io.*; import java.nio.*; import java.util.*; import org.robovm.objc.*; import org.robovm.objc.annotation.*; import org.robovm.objc.block.*; import org.robovm.rt.*; import org.robovm.rt.annotation.*; import org.robovm.rt.bro.*; import org.robovm.rt.bro.annotation.*; import org.robovm.rt.bro.ptr.*; import org.robovm.apple.corefoundation.*; import org.robovm.apple.uikit.*; import org.robovm.apple.coretext.*; import org.robovm.apple.coreanimation.*; import org.robovm.apple.coredata.*; import org.robovm.apple.coregraphics.*; import org.robovm.apple.coremedia.*; import org.robovm.apple.security.*; import org.robovm.apple.dispatch.*; /*</imports>*/ /*<javadoc>*/ /*</javadoc>*/ /*<annotations>*/@Marshaler(ValuedEnum.AsMachineSizedSIntMarshaler.class)/*</annotations>*/ public enum /*<name>*/NSOperationQueuePriority/*</name>*/ implements ValuedEnum { /*<values>*/ VeryLow(-8L), Low(-4L), Normal(0L), High(4L), VeryHigh(8L); /*</values>*/ private final long n; private /*<name>*/NSOperationQueuePriority/*</name>*/(long n) { this.n = n; } public long value() { return n; } public static /*<name>*/NSOperationQueuePriority/*</name>*/ valueOf(long n) { for (/*<name>*/NSOperationQueuePriority/*</name>*/ v : values()) { if (v.n == n) { return v; } } throw new IllegalArgumentException("No constant with value " + n + " found in " + /*<name>*/NSOperationQueuePriority/*</name>*/.class.getName()); } }
{ "pile_set_name": "Github" }
/* * Copyright (c) 2018-2020 Atmosphère-NX * * This program is free software; you can redistribute it and/or modify it * under the terms and conditions of the GNU General Public License, * version 2, as published by the Free Software Foundation. * * This program is distributed in the hope it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #pragma once #include <mesosphere/kern_common.hpp> #include <mesosphere/kern_select_interrupt_name.hpp> #if defined(ATMOSPHERE_ARCH_ARM64) #include <mesosphere/arch/arm64/kern_k_interrupt_controller.hpp> namespace ams::kern { using ams::kern::arch::arm64::KInterruptController; } #elif defined(ATMOSPHERE_ARCH_ARM) #include <mesosphere/arch/arm/kern_k_interrupt_controller.hpp> namespace ams::kern { using ams::kern::arch::arm::KInterruptController; } #else #error "Unknown architecture for KInterruptController" #endif
{ "pile_set_name": "Github" }
@font-face { font-family: 'Source Sans Pro'; src: url('source-sans-pro-regular.eot'); src: url('source-sans-pro-regular.eot?#iefix') format('embedded-opentype'), url('source-sans-pro-regular.woff') format('woff'), url('source-sans-pro-regular.ttf') format('truetype'); font-weight: normal; font-style: normal; } @font-face { font-family: 'Source Sans Pro'; src: url('source-sans-pro-italic.eot'); src: url('source-sans-pro-italic.eot?#iefix') format('embedded-opentype'), url('source-sans-pro-italic.woff') format('woff'), url('source-sans-pro-italic.ttf') format('truetype'); font-weight: normal; font-style: italic; } @font-face { font-family: 'Source Sans Pro'; src: url('source-sans-pro-semibold.eot'); src: url('source-sans-pro-semibold.eot?#iefix') format('embedded-opentype'), url('source-sans-pro-semibold.woff') format('woff'), url('source-sans-pro-semibold.ttf') format('truetype'); font-weight: 600; font-style: normal; } @font-face { font-family: 'Source Sans Pro'; src: url('source-sans-pro-semibolditalic.eot'); src: url('source-sans-pro-semibolditalic.eot?#iefix') format('embedded-opentype'), url('source-sans-pro-semibolditalic.woff') format('woff'), url('source-sans-pro-semibolditalic.ttf') format('truetype'); font-weight: 600; font-style: italic; }
{ "pile_set_name": "Github" }
# SPDX-License-Identifier: GPL-2.0 # Copyright (C) 2016-present Team LibreELEC (https://libreelec.tv) PKG_NAME="mediainfo" PKG_VERSION="18.05" PKG_SHA256="d94093aaf910759f302fb6b5ac23540a217eb940cfbb21834de2381de975fa65" PKG_LICENSE="GPL" PKG_SITE="http://mediaarea.net/en/MediaInfo/Download/Source" PKG_URL="http://mediaarea.net/download/source/mediainfo/${PKG_VERSION}/mediainfo_${PKG_VERSION}.tar.xz" PKG_DEPENDS_TARGET="toolchain libmediainfo" PKG_LONGDESC="A convenient unified display of the most relevant technical and tag data for video and audio files." PKG_TOOLCHAIN="manual" pre_configure_target() { export LDFLAGS="$LDFLAGS -L$(get_build_dir libmediainfo)/Project/GNU/Library/.libs -L$(get_build_dir libzen)/Project/GNU/Library/.libs" export LIBS="-lmediainfo -lzen" } make_target() { cd Project/GNU/CLI do_autoreconf echo $PATH ./configure \ --host=$TARGET_NAME \ --build=$HOST_NAME \ --prefix=/usr make }
{ "pile_set_name": "Github" }
import warnings warnings.warn("Importing discussion.django_comment_client.tests.utils instead of lms.djangoapps.discussion.django_comment_client.tests.utils is deprecated", stacklevel=2) from lms.djangoapps.discussion.django_comment_client.tests.utils import *
{ "pile_set_name": "Github" }
package io.youi.net import scala.reflect.macros.blackbox case class Path(parts: List[PathPart]) extends Location { lazy val absolute: Path = { var entries = Vector.empty[PathPart] parts.foreach { case UpLevelPathPart => entries = entries.dropRight(1) case SameLevelPathPart => // Ignore case part => entries = entries :+ part } Path(entries.toList) } lazy val encoded: String = absolute.parts.map(_.value).map(URL.encode).mkString("/", "/", "") lazy val decoded: String = absolute.parts.map(_.value).mkString("/", "/", "") lazy val arguments: List[String] = parts.collect { case part: ArgumentPathPart => part.name } def withArguments(arguments: Map[String, String]): Path = copy(parts = parts.map { case part: ArgumentPathPart if arguments.contains(part.name) => new LiteralPathPart(arguments(part.name)) case part => part }) def withParams(params: (String, String)*): String = if (params.nonEmpty) { s"$this?${params.map(t => s"${t._1}=${t._2}").mkString("&")}" } else { toString } def extractArguments(literal: Path): Map[String, String] = { assert(parts.length == literal.parts.length, s"Literal path must have the same number of parts as the one being extracted for") parts.zip(literal.parts).flatMap { case (p1, p2) => p1 match { case ap: ArgumentPathPart => Some(ap.name -> p2.value) case _ => None } }.toMap } def append(path: String): Path = if (path.startsWith("/")) { Path.parse(path) } else { val left = parts.dropRight(1) val right = Path.parse(path, absolutize = false) Path(left ::: right.parts) } def merge(that: Path): Path = Path(this.parts ::: that.parts) override def equals(obj: Any): Boolean = obj match { case that: Path if this.parts.length == that.parts.length => { this.parts.zip(that.parts).forall { case (thisPart, thatPart) => PathPart.equals(thisPart, thatPart) } } case _ => false } override def toString: String = encoded } object Path { val empty = Path(Nil) def parse(path: String, absolutize: Boolean = true): Path = { val updated = if (path.startsWith("/")) { path.substring(1) } else { path } val parts = updated.split('/').toList.map(URL.decode).flatMap(PathPart.apply) Path(parts) match { case p if absolutize => p.absolute case p => p } } def interpolate(c: blackbox.Context)(args: c.Expr[Any]*): c.Expr[Path] = { import c.universe._ implicit val liftablePathPart: Liftable[PathPart] = new Liftable[PathPart] { override def apply(value: PathPart): c.universe.Tree = { q"""io.youi.net.PathPart(${value.value}).getOrElse(throw new RuntimeException("Invalid PathPart value"))""" } } c.prefix.tree match { case Apply(_, List(Apply(_, rawParts))) => { val parts = rawParts map { case t @ Literal(Constant(const: String)) => (const, t.pos) } val b = new StringBuilder parts.zipWithIndex.foreach { case ((raw, _), index) => { if (index > 0) { c.abort(c.enclosingPosition, "Path interpolation can only contain string literals. Use Path.parse for runtime parsing.") } b.append(raw) } } val path = Path.parse(b.toString()) c.Expr[Path](q"Path(List(..${path.parts}))") } case _ => c.abort(c.enclosingPosition, "Bad usage of Path interpolation.") } } }
{ "pile_set_name": "Github" }
#!/usr/bin/env sh ############################################################################## ## ## Gradle start up script for UN*X ## ############################################################################## # Attempt to set APP_HOME # Resolve links: $0 may be a link PRG="$0" # Need this for relative symlinks. while [ -h "$PRG" ] ; do ls=`ls -ld "$PRG"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then PRG="$link" else PRG=`dirname "$PRG"`"/$link" fi done SAVED="`pwd`" cd "`dirname \"$PRG\"`/" >/dev/null APP_HOME="`pwd -P`" cd "$SAVED" >/dev/null APP_NAME="Gradle" APP_BASE_NAME=`basename "$0"` # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS="" # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD="maximum" warn () { echo "$*" } die () { echo echo "$*" echo exit 1 } # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false case "`uname`" in CYGWIN* ) cygwin=true ;; Darwin* ) darwin=true ;; MINGW* ) msys=true ;; NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD="$JAVA_HOME/jre/sh/java" else JAVACMD="$JAVA_HOME/bin/java" fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else JAVACMD="java" which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi # Increase the maximum file descriptors if we can. if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then MAX_FD_LIMIT=`ulimit -H -n` if [ $? -eq 0 ] ; then if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then MAX_FD="$MAX_FD_LIMIT" fi ulimit -n $MAX_FD if [ $? -ne 0 ] ; then warn "Could not set maximum file descriptor limit: $MAX_FD" fi else warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" fi fi # For Darwin, add options to specify how the application appears in the dock if $darwin; then GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" fi # For Cygwin, switch paths to Windows format before running java if $cygwin ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` JAVACMD=`cygpath --unix "$JAVACMD"` # We build the pattern for arguments to be converted via cygpath ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` SEP="" for dir in $ROOTDIRSRAW ; do ROOTDIRS="$ROOTDIRS$SEP$dir" SEP="|" done OURCYGPATTERN="(^($ROOTDIRS))" # Add a user-defined pattern to the cygpath arguments if [ "$GRADLE_CYGPATTERN" != "" ] ; then OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" fi # Now convert the arguments - kludge to limit ourselves to /bin/sh i=0 for arg in "$@" ; do CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` else eval `echo args$i`="\"$arg\"" fi i=$((i+1)) done case $i in (0) set -- ;; (1) set -- "$args0" ;; (2) set -- "$args0" "$args1" ;; (3) set -- "$args0" "$args1" "$args2" ;; (4) set -- "$args0" "$args1" "$args2" "$args3" ;; (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; esac fi # Escape application args save () { for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done echo " " } APP_ARGS=$(save "$@") # Collect all arguments for the java command, following the shell quoting and substitution rules eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then cd "$(dirname "$0")" fi exec "$JAVACMD" "$@"
{ "pile_set_name": "Github" }
kopenbsd /kopenbsd -h kopenbsd_ramdisk /ramdisk boot # Shouln't happen halt
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <meta http-equiv="content-type" content="text/html; charset=UTF-8" /> <title>The page you were looking for doesn't exist (404)</title> <style type="text/css"> body { background-color: #fff; color: #666; text-align: center; font-family: arial, sans-serif; } div.dialog { width: 25em; padding: 0 4em; margin: 4em auto 0 auto; border: 1px solid #ccc; border-right-color: #999; border-bottom-color: #999; } h1 { font-size: 100%; color: #f00; line-height: 1.5em; } </style> </head> <body> <!-- This file lives in public/404.html --> <div class="dialog"> <h1>The page you were looking for doesn't exist.</h1> <p>You may have mistyped the address or the page may have moved.</p> </div> </body> </html>
{ "pile_set_name": "Github" }
<resources> <!--Welcome Activity--> <string name="modules">Moodulid</string> <string name="superuser">Superkasutaja</string> <string name="logs">Logi</string> <string name="settings">Seaded</string> <string name="install">Installi</string> <string name="unsupport_magisk_title">Mittetoetatud Magisk\'i versioon</string> <!--Status Fragment--> <string name="invalid_update_channel">Sobimatu uuenduste kanal</string> <string name="safetynet_api_error">SafetyNet\'i API viga</string> <string name="safetynet_res_invalid">Vastus on sobimatu.</string> <string name="keep_force_encryption">Säilita sunnitud krüpteering</string> <string name="keep_dm_verity">Säilita AVB 2.0/dm-verity</string> <string name="recovery_mode">Taastusrežiim</string> <string name="uninstall_magisk_title">Eemalda Magisk</string> <string name="uninstall_magisk_msg">Kõik moodulid keelatakse/eemaldatakse. Juurkasutaja eemaldatakse ning su andmed potensiaalselt krüpteeritakse, kui need ei ole juba krüpteeritud.</string> <string name="update">Uuenda</string> <!--Module Fragment--> <string name="no_info_provided">(Info puudub)</string> <string name="reboot_recovery">Taaskäivita taastusesse</string> <string name="reboot_bootloader">Taaskäivita käivitushaldurisse</string> <string name="reboot_download">Taaskäivita allalaadimisrežiimi</string> <string name="reboot_edl">Taaskäivita EDL\'i</string> <!--Repo Fragment--> <string name="update_available">Uuendus saadaval</string> <string name="home_installed_version">Installitud</string> <string name="sorting_order">Sorteerimisjärjekord</string> <!--Log Fragment--> <string name="menuSaveLog">Salvesta logi</string> <string name="menuClearLog">Tühjenda logi nüüd</string> <string name="logs_cleared">Logi edukalt tühjendatud.</string> <!--About Activity--> <string name="app_changelog">Muutuste logi</string> <!-- System Components, Notifications --> <string name="update_channel">Magisk\'i uuendused</string> <string name="progress_channel">Edenemise teated</string> <string name="download_complete">Allalaadimine valmis</string> <string name="download_file_error">Faili allalaadimisel esines viga</string> <string name="download_open_parent">Kuva ülemkaustas</string> <string name="download_open_self">Kuva fail</string> <string name="magisk_update_title">Magisk\'ile on uuendus saadaval!</string> <string name="manager_update_title">Magisk Manager\'ile on uuendus saadaval!</string> <!-- Installation --> <string name="manager_download_install">Vajuta allalaadimiseks ja installimiseks.</string> <string name="download_zip_only">Laadi ainult ZIP alla</string> <string name="direct_install">Otsene install (soovitatud)</string> <string name="install_inactive_slot">Installi ebaaktiivsesse lahtrisse (pärast üle-õhu uuendust)</string> <string name="install_inactive_slot_msg">Sinu seade SUNNITAKSE peale taaskäivitust käivituma praegusesse ebaaktiivsesse lahtrisse!\nKasuta seda valikut vaid peale üle-õhu uuenduse teostamist.\nJätkad?</string> <string name="setup_title">Lisaseadistus</string> <string name="select_patch_file">Vali ja paika fail</string> <string name="patch_file_msg">Vali toortõmmis (*.img) või ODIN tar-fail (*.tar)</string> <string name="reboot_delay_toast">Taaskäivitamine 5 sekundi pärast…</string> <!--Toasts, Dialogs--> <string name="repo_install_title">Installi %1$s</string> <string name="repo_install_msg">Kas soovid kohe installida %1$s?</string> <string name="download">Allalaadimine</string> <string name="reboot">Taaskäivita</string> <string name="release_notes">Väljalaske märkmed</string> <string name="repo_cache_cleared">Hoidla vahemälu tühjendatud</string> <string name="flashing">Välgutamine</string> <string name="done">Valmis!</string> <string name="failure">Ebaõnnestus</string> <string name="hide_manager_title">Peidan Magisk Manager\'i…</string> <string name="hide_manager_fail_toast">Magisk Manager\'i peitmine ebaõnnestus.</string> <string name="open_link_failed_toast">Lingi avamiseks sobivat rakendust ei leitud.</string> <string name="complete_uninstall">Täielik eemaldus</string> <string name="restore_img">Taasta tõmmised</string> <string name="restore_img_msg">Taastamine…</string> <string name="restore_done">Taastamine valmis!</string> <string name="restore_fail">Originaalne varundus puudub!</string> <string name="proprietary_title">Laadi alla suletud koodi</string> <string name="proprietary_notice">Magisk Manager on vaba ja avatud lähtekoodiga ning ei sisalda Google\'i suletud SafetyNet\'i API koodi.\n\nKas lubad Magisk Manager\'il SafetyNet\'i kontrollide jaoks laadida alla laiendus (sisaldab GoogleApiClient\'i)?</string> <string name="setup_fail">Seadistus ebaõnnnestus.</string> <string name="env_fix_title">Vajab lisaseadistust</string> <string name="env_fix_msg">Sinu seade vajab lisaseadistust, et Magisk töötaks korralikult. Laadime alla Magisk\'i seadistus-zip\'i, kas soovid kohe jätkata?</string> <string name="setup_msg">Käivitan keskkonnaseadistust…</string> <!--Settings Activity --> <string name="settings_download_path_title">Allalaadimise failitee</string> <string name="settings_download_path_message">Failid salvestatakse kausta %1$s</string> <string name="settings_clear_cache_title">Tühjenda hoidla vahemälu</string> <string name="settings_clear_cache_summary">Tühjenda vahemälus olev teave võrgus olevate hoidlate kohta. See sunnib rakendust võrgust värskendama.</string> <string name="settings_hide_manager_title">Peida Magisk Manager</string> <string name="settings_hide_manager_summary">Taaspaki Magisk Manager juhusliku nimega.</string> <string name="settings_restore_manager_title">Taasta Magisk Manager</string> <string name="settings_restore_manager_summary">Taasta Magisk Manager\'i originaalpakett</string> <string name="language">Keel</string> <string name="system_default">(Süsteemi vaikesäte)</string> <string name="settings_check_update_title">Kontrolli uuendusi</string> <string name="settings_check_update_summary">Kontrolli taustal perioodiliselt uuendusi.</string> <string name="settings_update_channel_title">Uuenduste kanal</string> <string name="settings_update_stable">Stabiilne</string> <string name="settings_update_beta">Beeta</string> <string name="settings_update_custom">Kohandatud</string> <string name="settings_update_custom_msg">Sisesta kohandatud URL</string> <string name="settings_magiskhide_summary">Peida Magisk erinevate tuvastuste eest</string> <string name="settings_hosts_title">Süsteemivaba hosts</string> <string name="settings_hosts_summary">Süsteemivaba hosts-tugi reklaamiblokeerijatest rakendustele</string> <string name="settings_hosts_toast">Süsteemivaba hosts\'i moodul lisatud</string> <string name="settings_su_app_adb">Rakendused ja ADB</string> <string name="settings_su_app">Ainult rakendused</string> <string name="settings_su_adb">Ainult ADB</string> <string name="settings_su_disable">Keelatud</string> <string name="settings_su_request_10">10 sekundit</string> <string name="settings_su_request_15">15 sekundit</string> <string name="settings_su_request_20">20 sekundit</string> <string name="settings_su_request_30">30 sekundit</string> <string name="settings_su_request_45">45 sekundit</string> <string name="settings_su_request_60">60 sekundit</string> <string name="superuser_access">Superkasutaja ligipääs</string> <string name="auto_response">Automaatne vastus</string> <string name="request_timeout">Taotluse ajalõpp</string> <string name="superuser_notification">Superkasutaja teade</string> <string name="settings_su_reauth_title">Taas-autendi peale täiendust</string> <string name="settings_su_reauth_summary">Taas-autendi superkasutaja õigused peale rakenduse täiendamist</string> <string name="multiuser_mode">Mitmikkasutaja režiim</string> <string name="settings_owner_only">Ainult seadme omanik</string> <string name="settings_owner_manage">Seadme omaniku hallatud</string> <string name="settings_user_independent">Kasutajast sõltumatu</string> <string name="owner_only_summary">Ainult omanikul on juurkasutaja õigused.</string> <string name="owner_manage_summary">Ainult omanik saab hallata juurkasutaja ligipääsu ja saada taotlusküsimusi.</string> <string name="user_indepenent_summary">Igal kasutajal on oma isiklikud juurkasutaja reeglid.</string> <string name="mount_namespace_mode">Nimeruumi monteerimisrežiim</string> <string name="settings_ns_global">Globaalne nimeruum</string> <string name="settings_ns_requester">Võta nimeruum üle</string> <string name="settings_ns_isolate">Isoleeritud nimeruum</string> <string name="global_summary">Kõik juurkasutaja sessioonid kasutavad globaalset monteerimise nimeruumi.</string> <string name="requester_summary">Juurkasutaja sessioonid võtavad üle selle taotleja nimeruumi.</string> <string name="isolate_summary">Iga juurkasutaja sessioon saab oma isoleeritud nimeruumi.</string> <!--Superuser--> <string name="su_request_title">Superkasutaja taotlus</string> <string name="deny">Keela</string> <string name="prompt">Küsi</string> <string name="grant">Luba</string> <string name="su_warning">Annab täieliku ligipääsu sinu seadmele.\nKeela, kui sa pole kindel!</string> <string name="forever">Igavesti</string> <string name="once">Üks kord</string> <string name="tenmin">10 min</string> <string name="twentymin">20 min</string> <string name="thirtymin">30 min</string> <string name="sixtymin">60 min</string> <string name="su_allow_toast">Rakendusele %1$s anti superkasutaja õigused</string> <string name="su_deny_toast">Rakendusel %1$s keelati superkasutaja õigused</string> <string name="su_snack_grant">Superkasutaja õigused antud rakendusele %1$s</string> <string name="su_snack_deny">Superkasutaja õigused keelatud rakendusele %1$s</string> <string name="su_snack_notif_on">Teated lubatud rakendusele %1$s</string> <string name="su_snack_notif_off">Teated keelatud rakendusele %1$s</string> <string name="su_snack_log_on">Logimine lubatud rakendusele %1$s</string> <string name="su_snack_log_off">Logimine keelatud rakendusele %1$s</string> <string name="su_revoke_title">Eemaldad?</string> <string name="su_revoke_msg">Kinnitad rakenduse %1$s õiguste eemaldamise?</string> <string name="toast">Hüpik</string> <string name="none">Puudub</string> <!--Superuser logs--> <string name="pid">PID: %1$d</string> <string name="target_uid">Siht-UID: %1$d</string> <!-- MagiskHide --> <string name="show_system_app">Kuva süsteemirakendused</string> </resources>
{ "pile_set_name": "Github" }
//! moment.js locale configuration //! locale : Luxembourgish (lb) //! author : mweimerskirch : https://github.com/mweimerskirch, David Raison : https://github.com/kwisatz (function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('../moment')) : typeof define === 'function' && define.amd ? define(['moment'], factory) : factory(global.moment) }(this, function (moment) { 'use strict'; function processRelativeTime(number, withoutSuffix, key, isFuture) { var format = { 'm': ['eng Minutt', 'enger Minutt'], 'h': ['eng Stonn', 'enger Stonn'], 'd': ['een Dag', 'engem Dag'], 'M': ['ee Mount', 'engem Mount'], 'y': ['ee Joer', 'engem Joer'] }; return withoutSuffix ? format[key][0] : format[key][1]; } function processFutureTime(string) { var number = string.substr(0, string.indexOf(' ')); if (eifelerRegelAppliesToNumber(number)) { return 'a ' + string; } return 'an ' + string; } function processPastTime(string) { var number = string.substr(0, string.indexOf(' ')); if (eifelerRegelAppliesToNumber(number)) { return 'viru ' + string; } return 'virun ' + string; } /** * Returns true if the word before the given number loses the '-n' ending. * e.g. 'an 10 Deeg' but 'a 5 Deeg' * * @param number {integer} * @returns {boolean} */ function eifelerRegelAppliesToNumber(number) { number = parseInt(number, 10); if (isNaN(number)) { return false; } if (number < 0) { // Negative Number --> always true return true; } else if (number < 10) { // Only 1 digit if (4 <= number && number <= 7) { return true; } return false; } else if (number < 100) { // 2 digits var lastDigit = number % 10, firstDigit = number / 10; if (lastDigit === 0) { return eifelerRegelAppliesToNumber(firstDigit); } return eifelerRegelAppliesToNumber(lastDigit); } else if (number < 10000) { // 3 or 4 digits --> recursively check first digit while (number >= 10) { number = number / 10; } return eifelerRegelAppliesToNumber(number); } else { // Anything larger than 4 digits: recursively check first n-3 digits number = number / 1000; return eifelerRegelAppliesToNumber(number); } } var lb = moment.defineLocale('lb', { months: 'Januar_Februar_Mäerz_Abrëll_Mee_Juni_Juli_August_September_Oktober_November_Dezember'.split('_'), monthsShort: 'Jan._Febr._Mrz._Abr._Mee_Jun._Jul._Aug._Sept._Okt._Nov._Dez.'.split('_'), weekdays: 'Sonndeg_Méindeg_Dënschdeg_Mëttwoch_Donneschdeg_Freideg_Samschdeg'.split('_'), weekdaysShort: 'So._Mé._Dë._Më._Do._Fr._Sa.'.split('_'), weekdaysMin: 'So_Mé_Dë_Më_Do_Fr_Sa'.split('_'), longDateFormat: { LT: 'H:mm [Auer]', LTS: 'H:mm:ss [Auer]', L: 'DD.MM.YYYY', LL: 'D. MMMM YYYY', LLL: 'D. MMMM YYYY H:mm [Auer]', LLLL: 'dddd, D. MMMM YYYY H:mm [Auer]' }, calendar: { sameDay: '[Haut um] LT', sameElse: 'L', nextDay: '[Muer um] LT', nextWeek: 'dddd [um] LT', lastDay: '[Gëschter um] LT', lastWeek: function () { // Different date string for 'Dënschdeg' (Tuesday) and 'Donneschdeg' (Thursday) due to phonological rule switch (this.day()) { case 2: case 4: return '[Leschten] dddd [um] LT'; default: return '[Leschte] dddd [um] LT'; } } }, relativeTime : { future : processFutureTime, past : processPastTime, s : 'e puer Sekonnen', m : processRelativeTime, mm : '%d Minutten', h : processRelativeTime, hh : '%d Stonnen', d : processRelativeTime, dd : '%d Deeg', M : processRelativeTime, MM : '%d Méint', y : processRelativeTime, yy : '%d Joer' }, ordinalParse: /\d{1,2}\./, ordinal: '%d.', week: { dow: 1, // Monday is the first day of the week. doy: 4 // The week that contains Jan 4th is the first week of the year. } }); return lb; }));
{ "pile_set_name": "Github" }
goog.provide('ga_search_type_directives'); goog.require('ga_debounce_service'); goog.require('ga_layerfilters_service'); goog.require('ga_layermetadatapopup_service'); goog.require('ga_layers_service'); goog.require('ga_maputils_service'); goog.require('ga_marker_overlay_service'); goog.require('ga_previewfeatures_service'); goog.require('ga_previewlayers_service'); goog.require('ga_search_service'); goog.require('ga_topic_service'); goog.require('ga_urlutils_service'); (function() { // We can't put strings in zoomlevel attribute of search results. That's // why we put huge numbers to indicate that we want to use the bbox for // zooming instead of the delivered zoomlevel. var ZOOM_LIMIT = 100; var parseExtent = function(stringBox2D) { var extent = stringBox2D.replace(/(BOX\(|\))/gi, '').replace(',', ' '). split(' '); return $.map(extent, parseFloat); }; var addOverlay = function(gaMarkerOverlay, map, res) { var visible = /^(address|parcel|gazetteer)$/.test(res.attrs.origin); var center = [res.attrs.y, res.attrs.x]; if (!res.attrs.y || !res.attrs.x) { center = ol.proj.transform([res.attrs.lon, res.attrs.lat], 'EPSG:4326', map.getView().getProjection()); } gaMarkerOverlay.add(map, center, visible, parseExtent(res.attrs.geom_st_box2d)); }; var removeOverlay = function(gaMarkerOverlay, map) { gaMarkerOverlay.remove(map); }; var listenerMoveEnd; var registerMove = function(gaMarkerOverlay, gaDebounce, map) { listenerMoveEnd = map.on('moveend', gaDebounce.debounce(function() { var zoom = map.getView().getZoom(); gaMarkerOverlay.setVisibility(zoom); }, 200, false, false)); }; var unregisterMove = function() { if (listenerMoveEnd) { ol.Observable.unByKey(listenerMoveEnd); listenerMoveEnd = undefined; } }; var tabStarts = [ 100000, 200000, 300000 ]; var nextTabGroup = function(val) { for (var i = 0; i < tabStarts.length - 1; i++) { if (val >= tabStarts[i] && val < tabStarts[i + 1]) { return tabStarts[i + 1]; } } return undefined; }; var prevTabGroup = function(val) { for (var i = tabStarts.length - 1; i > 0; i--) { if (val >= tabStarts[i]) { return tabStarts[i - 1]; } } return undefined; }; var focusElement = function(el, evt) { evt.preventDefault(); el.trigger('focus'); }; var elExists = function(el) { if (el.length === 1 && el[0].className.indexOf('ga-search-result') > -1) { return true; } return false; }; var focusToElement = function(next, step, evt) { var newEl; if (next) { newEl = $(evt.target).nextAll('.ga-search-result').first(); } else { newEl = $(evt.target).prevAll('.ga-search-result').first(); } if (elExists(newEl)) { var existingEl = newEl; step -= 1; while (step > 0 && elExists(newEl)) { existingEl = newEl; step -= 1; if (next) { newEl = newEl.nextAll('.ga-search-result').first(); } else { newEl = newEl.prevAll('.ga-search-result').first(); } } focusElement(existingEl, evt); } else { focusToCategory(next, evt); } }; var focusToCategory = function(next, evt) { var el = $(evt.target); if (el.length && el[0] && el[0].attributes && el[0].attributes.tabindex) { var jumpGroup, newEl; if (next) { jumpGroup = nextTabGroup(el[0].attributes.tabindex.value); while (jumpGroup) { newEl = $('[tabindex=' + jumpGroup + ']'); if (elExists(newEl)) { focusElement(newEl, evt); break; } jumpGroup = nextTabGroup(jumpGroup); } } else { jumpGroup = prevTabGroup(el[0].attributes.tabindex.value); while (jumpGroup) { newEl = $('[tabindex=' + jumpGroup + ']'); if (elExists(newEl)) { var existingEl = newEl; // Go to last element of category while (elExists(newEl)) { existingEl = newEl; jumpGroup += 1; newEl = $('[tabindex=' + jumpGroup + ']'); } focusElement(existingEl, evt); return; } jumpGroup = prevTabGroup(jumpGroup); } // Nothing found, so jump back to input (ignore bad design...) newEl = $('.ga-search-input'); if (newEl.length === 1 && newEl[0].className.indexOf('ga-search-input') > -1) { focusElement(newEl, evt); } } } }; var module = angular.module('ga_search_type_directives', [ 'ga_debounce_service', 'ga_layerfilters_service', 'ga_layermetadatapopup_service', 'ga_layers_service', 'ga_maputils_service', 'ga_marker_overlay_service', 'ga_previewfeatures_service', 'ga_previewlayers_service', 'ga_search_service', 'ga_urlutils_service', 'pascalprecht.translate', 'ga_topic_service' ]); /* * We have 3 distinct directives for each type of result * set (locations, features and layers) * * All 3 result directives share the same template and the * same controller code. Put anything that is common for * all 3 types in the controller code. * * Put type specific behaviour in the corresponding * directive's code. */ module.controller('GaSearchTypesController', function($scope, $http, $q, $sce, gaUrlUtils, gaSearchLabels, gaMarkerOverlay, gaDebounce) { // This value is used to block blur/mouseleave event, when a value // is selected. See #2284. It's reinitialized when a new search is // triggered. var blockEvent = false; var canceler; var cancel = function() { $scope.results = []; $scope.fuzzy = ''; if (canceler !== undefined) { canceler.resolve(); canceler = undefined; } }; var triggerSearch = gaDebounce.debounce(function() { if (!$scope.doSearch()) { $scope.options.announceResults($scope.type, 0); return; } canceler = $q.defer(); var url = gaUrlUtils.append($scope.options.searchUrl, 'type=' + $scope.type); url = $scope.typeSpecificUrl(url); $http.get(url, { cache: true, timeout: canceler.promise }).then(function(response) { var data = response.data; $scope.results = data.results; if (data.fuzzy) { $scope.fuzzy = '_fuzzy'; } $scope.options.announceResults($scope.type, data.results.length); }, function(response) { // If request is canceled, statuscode is 0 and we don't announce it if (response.status !== 0) { $scope.options.announceResults($scope.type, 0); } }); }, 133, false, false); // 133 filters out 'stuck key' events while staying responsive $scope.doSearch = function() { return true; }; $scope.typeSpecificUrl = function(url) { return url; }; $scope.keydown = function(evt, res) { if (evt.keyCode === 13) { // Enter key $scope.removePreview(); blockEvent = true; $scope.select(res); } else if (evt.keyCode === 9) { // Tab key focusToCategory(!evt.shiftKey, evt); } else if (evt.keyCode === 40 || evt.keyCode === 34) { // Down Arrow or PageDown key focusToElement(true, evt.keyCode === 40 ? 1 : 5, evt); } else if (evt.keyCode === 38 || evt.keyCode === 33) { // Up Arrow or PageUp key focusToElement(false, evt.keyCode === 38 ? 1 : 5, evt); } }; $scope.click = function(res) { $scope.removePreview(); blockEvent = true; $scope.select(res); }; $scope.out = function(evt) { if (!blockEvent) { $scope.removePreview(); } }; $scope.preview = function(res) { addOverlay(gaMarkerOverlay, $scope.map, res); }; $scope.removePreview = function() { removeOverlay(gaMarkerOverlay, $scope.map); }; $scope.prepareLabel = function(attrs) { var h = gaSearchLabels.highlight(attrs.label, $scope.options.query); return $sce.trustAsHtml(h); }; $scope.cleanLabel = function(attrs) { return gaSearchLabels.cleanLabel(attrs.label); }; $scope.fuzzy = ''; $scope.$watch('options.searchUrl', function() { // cancel old requests cancel(); if ($scope.options.query !== '') { blockEvent = false; triggerSearch(); } else { unregisterMove(); } }); } ); module.directive('gaSearchLocations', function($sce, $translate, gaMarkerOverlay, gaSearchLabels, gaMapUtils, gaDebounce) { return { restrict: 'A', templateUrl: 'components/search/partials/searchtypes.html', scope: { options: '=gaSearchLocationsOptions', map: '=gaSearchLocationsMap', ol3d: '=gaSearchLocationsOl3d' }, controller: 'GaSearchTypesController', link: function($scope, element, attrs) { $scope.type = 'locations'; $scope.tabstart = tabStarts[0]; // Can be removed onnce real type contains gazetter $scope.typeSpecificUrl = function(url) { return url; }; $scope.select = function(res) { var isGazetteerPoly = false; var e = parseExtent(res.attrs.geom_st_box2d); unregisterMove(); // Gazetteer results that are not points zoom to full bbox extent if (res.attrs.origin === 'gazetteer') { isGazetteerPoly = (Math.abs(e[0] - e[2]) > 100 && Math.abs(e[1] - e[3]) > 100); } if (res.attrs.zoomlevel < ZOOM_LIMIT && !isGazetteerPoly) { gaMapUtils.moveTo($scope.map, $scope.ol3d, res.attrs.zoomlevel, [res.attrs.y, res.attrs.x]); } else { gaMapUtils.zoomToExtent($scope.map, $scope.ol3d, e); } addOverlay(gaMarkerOverlay, $scope.map, res); $scope.options.valueSelected( gaSearchLabels.cleanLabel(res.attrs.label)); registerMove(gaMarkerOverlay, gaDebounce, $scope.map); }; $scope.prepareLabel = function(attrs) { var l = gaSearchLabels.highlight(attrs.label, $scope.options.query); if (attrs.origin === 'zipcode') { l = '<span>' + $translate.instant('plz') + ' ' + l + '</span>'; } else if (attrs.origin === 'kantone') { l = '<span>' + $translate.instant('ct') + ' ' + l + '</span>'; } else if (attrs.origin === 'district') { l = '<span>' + $translate.instant('district') + ' ' + l + '</span>'; } else if (attrs.origin === 'parcel') { l += ' <span>' + $translate.instant('parcel') + ' ' + '</span>'; } return $sce.trustAsHtml(l); }; } }; }); module.directive('gaSearchFeatures', function($rootScope, $http, $q, $sce, $timeout, gaUrlUtils, gaLayerFilters, gaSearchLabels, gaLayers, gaMarkerOverlay, gaPreviewFeatures, gaTopic) { var selectedFeatures = {}; var loadGeometry = function(layerId, featureId, topic, urlbase, cb) { var key = layerId + featureId; if (!selectedFeatures.hasOwnProperty(key)) { var featureUrl = urlbase.replace('{Topic}', topic). replace('{Layer}', layerId). replace('{Feature}', featureId); $http.get(featureUrl, { params: { geometryFormat: 'geojson' } }).then(function(response) { var result = response.data; selectedFeatures[key] = result.feature; cb(result.feature); }); } else { $timeout(function() { cb(selectedFeatures[key]); }, 0, false); } }; return { restrict: 'A', templateUrl: 'components/search/partials/searchtypes.html', scope: { options: '=gaSearchFeaturesOptions', map: '=gaSearchFeaturesMap', ol3d: '=gaSearchFeaturesOl3d' }, controller: 'GaSearchTypesController', link: function($scope, element, attrs) { var geojsonParser = new ol.format.GeoJSON(); var searchableLayers = []; var timeEnabled = []; var timeStamps = []; $scope.type = 'featuresearch'; $scope.tabstart = tabStarts[1]; $scope.doSearch = function() { return searchableLayers.length > 0; }; $scope.typeSpecificUrl = function(url) { url = gaUrlUtils.append(url, 'features=' + searchableLayers.join(',')); url = gaUrlUtils.append(url, 'timeEnabled=' + timeEnabled.join(',')); return gaUrlUtils.append(url, 'timeStamps=' + timeStamps.join(',')); }; $scope.select = function(res) { unregisterMove(); loadGeometry(res.attrs.layer, res.attrs.featureId, gaTopic.get().id, $scope.options.featureUrl, function(f) { $rootScope.$broadcast('gaTriggerTooltipRequest', { features: [f], onCloseCB: angular.noop }); var feature = geojsonParser.readFeature(f); gaPreviewFeatures.zoom($scope.map, $scope.ol3d, feature); }); $scope.options.valueSelected( gaSearchLabels.cleanLabel(res.attrs.label)); }; $scope.prepareLabel = function(attrs) { var l = gaSearchLabels.highlight(attrs.label, $scope.options.query); if (attrs.origin === 'feature') { l = '<b>' + gaLayers.getLayerProperty(attrs.layer, 'label') + '</b><br>' + l; } return $sce.trustAsHtml(l); }; $scope.layers = $scope.map.getLayers().getArray(); $scope.searchableLayersFilter = gaLayerFilters.searchable; $scope.$watchCollection('layers | filter:searchableLayersFilter', function(layers) { // TODO: this isn't updated when layers param (like 'time') // changes searchableLayers = []; timeEnabled = []; timeStamps = []; angular.forEach(layers, function(layer) { var ts = ''; if (layer.time && layer.time.substr(0, 4) !== '9999' && layer.timeEnabled) { ts = layer.time.substr(0, 4); } searchableLayers.push(layer.bodId); timeEnabled.push(layer.timeEnabled); timeStamps.push(ts); }); }); } }; }); module.directive('gaSearchLayers', function($http, $q, $sce, gaUrlUtils, gaSearchLabels, gaPreviewLayers, gaMapUtils, gaLayers, gaLayerMetadataPopup) { return { restrict: 'A', templateUrl: 'components/search/partials/searchtypes.html', scope: { options: '=gaSearchLayersOptions', map: '=gaSearchLayersMap' }, controller: 'GaSearchTypesController', link: function($scope, element, attrs) { $scope.type = 'layers'; $scope.tabstart = tabStarts[2]; $scope.preview = function(res) { var layer = gaMapUtils.getMapOverlayForBodId($scope.map, res.attrs.layer); // Don't add preview layer if the layer is already on the map if (!layer || !layer.visible) { gaPreviewLayers.addBodLayer($scope.map, res.attrs.layer); } }; $scope.removePreview = function() { gaPreviewLayers.removeAll($scope.map); }; $scope.select = function(res) { unregisterMove(); var l = gaMapUtils.getMapOverlayForBodId($scope.map, res.attrs.layer); if (!angular.isDefined(l)) { var olLayer = gaLayers.getOlLayerById(res.attrs.layer); $scope.map.addLayer(olLayer); } else { // Assure layer is visible l.visible = true; } $scope.options.valueSelected( gaSearchLabels.cleanLabel(res.attrs.label)); }; $scope.getLegend = function(evt, bodId) { gaLayerMetadataPopup.toggle(bodId); evt.stopPropagation(); }; } }; }); })();
{ "pile_set_name": "Github" }
[OPTIONS] Compatibility=1.1 Full-text search=Yes Contents file=index.hhc Default Window=main Default topic=main.html Index file=index.hhk Language=0x409 English (United States) Binary TOC=YES Create CHI file=YES Title=uIP 1.0 [WINDOWS] main="uIP 1.0","index.hhc","index.hhk","main.html","main.html",,,,,0x23520,,0x387e,,,,,,,,0 [FILES] main.html files.html a00048.html a00049.html a00042.html a00043.html a00036.html a00037.html a00046.html a00047.html a00038.html a00039.html a00044.html a00045.html a00051.html a00050.html a00040.html a00041.html a00168.html a00169.html a00170.html a00171.html a00172.html a00173.html a00174.html a00175.html a00176.html a00177.html a00178.html a00179.html a00180.html a00181.html a00182.html a00183.html a00184.html a00185.html a00186.html a00187.html a00188.html a00189.html a00190.html a00191.html a00192.html a00193.html a00194.html a00195.html a00196.html a00197.html a00198.html a00199.html a00200.html a00201.html a00202.html a00203.html a00204.html a00205.html a00206.html a00207.html a00100.html a00101.html a00102.html a00103.html a00104.html a00105.html a00107.html a00110.html a00111.html a00112.html a00113.html a00114.html a00120.html a00121.html a00123.html a00124.html a00125.html a00127.html a00128.html a00129.html a00130.html a00131.html a00132.html a00134.html a00135.html a00136.html a00137.html a00138.html a00139.html a00140.html a00141.html annotated.html classes.html hierarchy.html functions.html functions_vars.html a00077.html a00078.html a00079.html a00080.html a00081.html a00082.html a00083.html a00084.html a00085.html a00086.html a00087.html a00088.html a00089.html a00090.html a00091.html a00092.html a00093.html a00094.html a00095.html a00096.html a00097.html a00142.html a00143.html a00144.html a00145.html a00146.html a00147.html a00148.html a00149.html a00150.html a00151.html a00152.html a00153.html a00154.html a00155.html a00156.html a00157.html a00158.html a00159.html a00160.html a00161.html a00162.html a00163.html a00164.html modules.html examples.html globals.html globals_0x61.html globals_0x62.html globals_0x64.html globals_0x65.html globals_0x66.html globals_0x68.html globals_0x69.html globals_0x6c.html globals_0x6d.html globals_0x6e.html globals_0x70.html globals_0x72.html globals_0x73.html globals_0x74.html globals_0x75.html globals_0x77.html globals_func.html globals_vars.html globals_type.html globals_defs.html globals_defs_0x61.html globals_defs_0x62.html globals_defs_0x64.html globals_defs_0x65.html globals_defs_0x66.html globals_defs_0x68.html globals_defs_0x69.html globals_defs_0x6c.html globals_defs_0x6d.html globals_defs_0x6e.html globals_defs_0x70.html globals_defs_0x72.html globals_defs_0x73.html globals_defs_0x74.html globals_defs_0x75.html globals_defs_0x77.html tabs.css tab_b.gif tab_l.gif tab_r.gif
{ "pile_set_name": "Github" }
xof 0303txt 0032 template XSkinMeshHeader { <3cf169ce-ff7c-44ab-93c0-f78f62d172e2> WORD nMaxSkinWeightsPerVertex; WORD nMaxSkinWeightsPerFace; WORD nBones; } template VertexDuplicationIndices { <b8d65549-d7c9-4995-89cf-53a9a8b031e3> DWORD nIndices; DWORD nOriginalVertices; array DWORD indices[nIndices]; } template SkinWeights { <6f0d123b-bad2-4167-a0d0-80224f25fabb> STRING transformNodeName; DWORD nWeights; array DWORD vertexIndices[nWeights]; array FLOAT weights[nWeights]; Matrix4x4 matrixOffset; } template AnimTicksPerSecond { <9e415a43-7ba6-4a73-8743-b73d47e88476> DWORD AnimTicksPerSecond; } template FVFData { <b6e70a0e-8ef9-4e83-94ad-ecc8b0c04897> DWORD dwFVF; DWORD nDWords; array DWORD data[nDWords]; } AnimTicksPerSecond { 24; } Frame Body { FrameTransformMatrix { 1.000000,0.000000,0.000000,0.000000,0.000000,1.000000,0.000000,0.000000,0.000000,0.000000,1.000000,0.000000,0.000000,0.000000,0.000000,1.000000;; } Mesh Body { 24; 2.224800;18.319014;-7.052299;, 2.224800;18.319014;7.052303;, -2.224800;18.319014;7.052303;, -2.224800;18.319014;-7.052299;, 2.224800;0.000000;-7.052299;, 2.224800;0.000000;7.052204;, 2.224800;18.319014;7.052303;, 2.224800;18.319014;-7.052299;, -2.224800;0.000000;-7.052299;, -2.224800;0.000000;7.052204;, 2.224800;0.000000;7.052204;, 2.224800;0.000000;-7.052299;, -2.224800;18.319014;-7.052299;, -2.224800;18.319014;7.052303;, -2.224800;0.000000;7.052204;, -2.224800;0.000000;-7.052299;, 2.224800;18.319014;7.052303;, 2.224800;0.000000;7.052204;, -2.224800;0.000000;7.052204;, -2.224800;18.319014;7.052303;, -2.224800;18.319014;-7.052299;, -2.224800;0.000000;-7.052299;, 2.224800;0.000000;-7.052299;, 2.224800;18.319014;-7.052299;; 12; 3;0,2,1;, 3;0,3,2;, 3;4,6,5;, 3;4,7,6;, 3;8,10,9;, 3;8,11,10;, 3;12,14,13;, 3;12,15,14;, 3;16,18,17;, 3;16,19,18;, 3;20,22,21;, 3;20,23,22;; MeshNormals { 24; 0.577350;0.577350;-0.577350;, 0.577351;0.577348;0.577351;, -0.577351;0.577348;0.577351;, -0.577350;0.577350;-0.577350;, 0.577350;-0.577350;-0.577350;, 0.577349;-0.577352;0.577349;, 0.577351;0.577348;0.577351;, 0.577350;0.577350;-0.577350;, -0.577350;-0.577350;-0.577350;, -0.577349;-0.577352;0.577349;, 0.577349;-0.577352;0.577349;, 0.577350;-0.577350;-0.577350;, -0.577350;0.577350;-0.577350;, -0.577351;0.577348;0.577351;, -0.577349;-0.577352;0.577349;, -0.577350;-0.577350;-0.577350;, 0.577351;0.577348;0.577351;, 0.577349;-0.577352;0.577349;, -0.577349;-0.577352;0.577349;, -0.577351;0.577348;0.577351;, -0.577350;0.577350;-0.577350;, -0.577350;-0.577350;-0.577350;, 0.577350;-0.577350;-0.577350;, 0.577350;0.577350;-0.577350;; 12; 3;0,2,1;, 3;0,3,2;, 3;4,6,5;, 3;4,7,6;, 3;8,10,9;, 3;8,11,10;, 3;12,14,13;, 3;12,15,14;, 3;16,18,17;, 3;16,19,18;, 3;20,22,21;, 3;20,23,22;; } MeshTextureCoords { 24; 0.633046;0.356894;, 0.633046;0.069380;, 0.551681;0.069380;, 0.551681;0.356894;, 0.516444;0.430326;, 0.774361;0.430326;, 0.774363;0.056903;, 0.516444;0.056903;, 0.639446;0.069380;, 0.639446;0.356892;, 0.720812;0.356892;, 0.720812;0.069380;, 0.802791;0.053109;, 0.544873;0.053109;, 0.544874;0.426531;, 0.802791;0.426531;, 0.780763;0.056903;, 0.780763;0.430326;, 0.862129;0.430326;, 0.862129;0.056903;, 0.033710;0.049293;, 0.033710;0.659292;, 0.167302;0.659292;, 0.167302;0.049293;; } VertexDuplicationIndices { 24; 24; 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23; } MeshMaterialList { 1; 12; 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0; Material def_surf_mat { 0.992157;0.992157;0.992157;1.000000;; 128.000000; 0.149020;0.149020;0.149020;; 0.000000;0.000000;0.000000;; TextureFilename { "boxfile_a.jpg"; } } } XSkinMeshHeader { 1; 1; 1; } SkinWeights { "Body"; 24; 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23; 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000, 1.000000; 1.000000,0.000000,0.000000,0.000000,0.000000,1.000000,0.000000,0.000000,0.000000,0.000000,1.000000,0.000000,0.000000,0.000000,0.000000,1.000000;; } } }
{ "pile_set_name": "Github" }
package com.sivalabs.domain; import java.io.Serializable; import java.util.Date; /** * @author Siva * */ public class User implements Serializable { private static final long serialVersionUID = 1L; private Integer id; private String userName; private String password; private String email; private String firstName; private String lastName; private Date dob; private boolean enabled=true; public User() { } public User(Integer id) { this.id = id; } public User(Integer id, String userName, String password, String firstName, String email) { this.id = id; this.userName = userName; this.password = password; this.firstName = firstName; this.email = email; } public User(Integer id, String userName, String password, String firstName, String lastName, String email, Date dob) { this.id = id; this.userName = userName; this.password = password; this.firstName = firstName; this.lastName = lastName; this.email = email; this.dob = dob; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getUserName() { return userName; } public void setUserName(String userName) { this.userName = userName; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getLastName() { return lastName; } public void setLastName(String lastName) { this.lastName = lastName; } public String getEmail() { return email; } public void setEmail(String email) { this.email = email; } public Date getDob() { return dob; } public void setDob(Date dob) { this.dob = dob; } public boolean isEnabled() { return enabled; } public void setEnabled(boolean enabled) { this.enabled = enabled; } }
{ "pile_set_name": "Github" }
// -*- Mode: C++; c-basic-offset: 2; indent-tabs-mode: nil -*- // Copyright (c) 2005, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // --- // Author: Sanjay Ghemawat // // Routine that uses sbrk/mmap to allocate memory from the system. // Useful for implementing malloc. #ifndef TCMALLOC_SYSTEM_ALLOC_H_ #define TCMALLOC_SYSTEM_ALLOC_H_ #include <config.h> #include <stddef.h> // for size_t class SysAllocator; // REQUIRES: "alignment" is a power of two or "0" to indicate default alignment // // Allocate and return "N" bytes of zeroed memory. // // If actual_bytes is NULL then the returned memory is exactly the // requested size. If actual bytes is non-NULL then the allocator // may optionally return more bytes than asked for (i.e. return an // entire "huge" page if a huge page allocator is in use). // // The returned pointer is a multiple of "alignment" if non-zero. The // returned pointer will always be aligned suitably for holding a // void*, double, or size_t. In addition, if this platform defines // CACHELINE_ALIGNED, the return pointer will always be cacheline // aligned. // // Returns NULL when out of memory. extern PERFTOOLS_DLL_DECL void* TCMalloc_SystemAlloc(size_t bytes, size_t *actual_bytes, size_t alignment = 0); // This call is a hint to the operating system that the pages // contained in the specified range of memory will not be used for a // while, and can be released for use by other processes or the OS. // Pages which are released in this way may be destroyed (zeroed) by // the OS. The benefit of this function is that it frees memory for // use by the system, the cost is that the pages are faulted back into // the address space next time they are touched, which can impact // performance. (Only pages fully covered by the memory region will // be released, partial pages will not.) // // Returns false if release failed or not supported. extern PERFTOOLS_DLL_DECL bool TCMalloc_SystemRelease(void* start, size_t length); // Called to ressurect memory which has been previously released // to the system via TCMalloc_SystemRelease. An attempt to // commit a page that is already committed does not cause this // function to fail. extern PERFTOOLS_DLL_DECL void TCMalloc_SystemCommit(void* start, size_t length); // The current system allocator. extern PERFTOOLS_DLL_DECL SysAllocator* tcmalloc_sys_alloc; // Number of bytes taken from system. extern PERFTOOLS_DLL_DECL size_t TCMalloc_SystemTaken; #endif /* TCMALLOC_SYSTEM_ALLOC_H_ */
{ "pile_set_name": "Github" }
// // © Copyright Henrik Ravn 2004 // // Use, modification and distribution are subject to the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // using System; using System.IO; using System.Runtime.InteropServices; using System.Text; namespace DotZLib { #region Internal types /// <summary> /// Defines constants for the various flush types used with zlib /// </summary> internal enum FlushTypes { None, Partial, Sync, Full, Finish, Block } #region ZStream structure // internal mapping of the zlib zstream structure for marshalling [StructLayoutAttribute(LayoutKind.Sequential, Pack=4, Size=0, CharSet=CharSet.Ansi)] internal struct ZStream { public IntPtr next_in; public uint avail_in; public uint total_in; public IntPtr next_out; public uint avail_out; public uint total_out; [MarshalAs(UnmanagedType.LPStr)] string msg; uint state; uint zalloc; uint zfree; uint opaque; int data_type; public uint adler; uint reserved; } #endregion #endregion #region Public enums /// <summary> /// Defines constants for the available compression levels in zlib /// </summary> public enum CompressLevel : int { /// <summary> /// The default compression level with a reasonable compromise between compression and speed /// </summary> Default = -1, /// <summary> /// No compression at all. The data are passed straight through. /// </summary> None = 0, /// <summary> /// The maximum compression rate available. /// </summary> Best = 9, /// <summary> /// The fastest available compression level. /// </summary> Fastest = 1 } #endregion #region Exception classes /// <summary> /// The exception that is thrown when an error occurs on the zlib dll /// </summary> public class ZLibException : ApplicationException { /// <summary> /// Initializes a new instance of the <see cref="ZLibException"/> class with a specified /// error message and error code /// </summary> /// <param name="errorCode">The zlib error code that caused the exception</param> /// <param name="msg">A message that (hopefully) describes the error</param> public ZLibException(int errorCode, string msg) : base(String.Format("ZLib error {0} {1}", errorCode, msg)) { } /// <summary> /// Initializes a new instance of the <see cref="ZLibException"/> class with a specified /// error code /// </summary> /// <param name="errorCode">The zlib error code that caused the exception</param> public ZLibException(int errorCode) : base(String.Format("ZLib error {0}", errorCode)) { } } #endregion #region Interfaces /// <summary> /// Declares methods and properties that enables a running checksum to be calculated /// </summary> public interface ChecksumGenerator { /// <summary> /// Gets the current value of the checksum /// </summary> uint Value { get; } /// <summary> /// Clears the current checksum to 0 /// </summary> void Reset(); /// <summary> /// Updates the current checksum with an array of bytes /// </summary> /// <param name="data">The data to update the checksum with</param> void Update(byte[] data); /// <summary> /// Updates the current checksum with part of an array of bytes /// </summary> /// <param name="data">The data to update the checksum with</param> /// <param name="offset">Where in <c>data</c> to start updating</param> /// <param name="count">The number of bytes from <c>data</c> to use</param> /// <exception cref="ArgumentException">The sum of offset and count is larger than the length of <c>data</c></exception> /// <exception cref="ArgumentNullException"><c>data</c> is a null reference</exception> /// <exception cref="ArgumentOutOfRangeException">Offset or count is negative.</exception> void Update(byte[] data, int offset, int count); /// <summary> /// Updates the current checksum with the data from a string /// </summary> /// <param name="data">The string to update the checksum with</param> /// <remarks>The characters in the string are converted by the UTF-8 encoding</remarks> void Update(string data); /// <summary> /// Updates the current checksum with the data from a string, using a specific encoding /// </summary> /// <param name="data">The string to update the checksum with</param> /// <param name="encoding">The encoding to use</param> void Update(string data, Encoding encoding); } /// <summary> /// Represents the method that will be called from a codec when new data /// are available. /// </summary> /// <paramref name="data">The byte array containing the processed data</paramref> /// <paramref name="startIndex">The index of the first processed byte in <c>data</c></paramref> /// <paramref name="count">The number of processed bytes available</paramref> /// <remarks>On return from this method, the data may be overwritten, so grab it while you can. /// You cannot assume that startIndex will be zero. /// </remarks> public delegate void DataAvailableHandler(byte[] data, int startIndex, int count); /// <summary> /// Declares methods and events for implementing compressors/decompressors /// </summary> public interface Codec { /// <summary> /// Occurs when more processed data are available. /// </summary> event DataAvailableHandler DataAvailable; /// <summary> /// Adds more data to the codec to be processed. /// </summary> /// <param name="data">Byte array containing the data to be added to the codec</param> /// <remarks>Adding data may, or may not, raise the <c>DataAvailable</c> event</remarks> void Add(byte[] data); /// <summary> /// Adds more data to the codec to be processed. /// </summary> /// <param name="data">Byte array containing the data to be added to the codec</param> /// <param name="offset">The index of the first byte to add from <c>data</c></param> /// <param name="count">The number of bytes to add</param> /// <remarks>Adding data may, or may not, raise the <c>DataAvailable</c> event</remarks> void Add(byte[] data, int offset, int count); /// <summary> /// Finishes up any pending data that needs to be processed and handled. /// </summary> void Finish(); /// <summary> /// Gets the checksum of the data that has been added so far /// </summary> uint Checksum { get; } } #endregion #region Classes /// <summary> /// Encapsulates general information about the ZLib library /// </summary> public class Info { #region DLL imports [DllImport("ZLIB1.dll", CallingConvention=CallingConvention.Cdecl)] private static extern uint zlibCompileFlags(); [DllImport("ZLIB1.dll", CallingConvention=CallingConvention.Cdecl)] private static extern string zlibVersion(); #endregion #region Private stuff private uint _flags; // helper function that unpacks a bitsize mask private static int bitSize(uint bits) { switch (bits) { case 0: return 16; case 1: return 32; case 2: return 64; } return -1; } #endregion /// <summary> /// Constructs an instance of the <c>Info</c> class. /// </summary> public Info() { _flags = zlibCompileFlags(); } /// <summary> /// True if the library is compiled with debug info /// </summary> public bool HasDebugInfo { get { return 0 != (_flags & 0x100); } } /// <summary> /// True if the library is compiled with assembly optimizations /// </summary> public bool UsesAssemblyCode { get { return 0 != (_flags & 0x200); } } /// <summary> /// Gets the size of the unsigned int that was compiled into Zlib /// </summary> public int SizeOfUInt { get { return bitSize(_flags & 3); } } /// <summary> /// Gets the size of the unsigned long that was compiled into Zlib /// </summary> public int SizeOfULong { get { return bitSize((_flags >> 2) & 3); } } /// <summary> /// Gets the size of the pointers that were compiled into Zlib /// </summary> public int SizeOfPointer { get { return bitSize((_flags >> 4) & 3); } } /// <summary> /// Gets the size of the z_off_t type that was compiled into Zlib /// </summary> public int SizeOfOffset { get { return bitSize((_flags >> 6) & 3); } } /// <summary> /// Gets the version of ZLib as a string, e.g. "1.2.1" /// </summary> public static string Version { get { return zlibVersion(); } } } #endregion }
{ "pile_set_name": "Github" }
#include "Stdafx.h" #include "GameOption.h" ////////////////////////////////////////////////////////////////////////// BEGIN_MESSAGE_MAP(CGameOption, CSkinDialogEx) END_MESSAGE_MAP() ////////////////////////////////////////////////////////////////////////// //构造函数 CGameOption::CGameOption() : CSkinDialogEx(IDD_OPTION) { m_bDeasilOrder=true; m_bEnableSound=true; m_dwCardHSpace=DEFAULT_PELS; return; } //析构函数 CGameOption::~CGameOption() { } //控件绑定 void CGameOption::DoDataExchange(CDataExchange * pDX) { __super::DoDataExchange(pDX); DDX_Control(pDX, IDOK, m_btOK); DDX_Control(pDX, IDCANCEL, m_btCancel); } //初始化函数 BOOL CGameOption::OnInitDialog() { __super::OnInitDialog(); //设置标题 SetWindowText(TEXT("游戏配置")); //调整参数 if ((m_dwCardHSpace>MAX_PELS)||(m_dwCardHSpace<LESS_PELS)) m_dwCardHSpace=DEFAULT_PELS; //设置控件 if (m_bDeasilOrder==true) ((CButton *)GetDlgItem(IDC_DEASIL_ORDER))->SetCheck(BST_CHECKED); if (m_bEnableSound==true) ((CButton *)GetDlgItem(IDC_ENABLE_SOUND))->SetCheck(BST_CHECKED); //扑克象素 int nItem=0; TCHAR szBuffer[32]=TEXT(""); CComboBox * pComboBox=(CComboBox *)GetDlgItem(IDC_CARD_SPACE); for (DWORD i=LESS_PELS;i<=MAX_PELS;i++) { _snprintf(szBuffer,sizeof(szBuffer),TEXT("%2ld"),i); nItem=pComboBox->InsertString((i-LESS_PELS),szBuffer); pComboBox->SetItemData(nItem,i); if (m_dwCardHSpace==i) pComboBox->SetCurSel(nItem); } return TRUE; } //确定消息 void CGameOption::OnOK() { //获取变量 m_bDeasilOrder=(((CButton *)GetDlgItem(IDC_DEASIL_ORDER))->GetCheck()==BST_CHECKED); m_bEnableSound=(((CButton *)GetDlgItem(IDC_ENABLE_SOUND))->GetCheck()==BST_CHECKED); //扑克象素 CComboBox * pComboBox=(CComboBox *)GetDlgItem(IDC_CARD_SPACE); m_dwCardHSpace=(DWORD)pComboBox->GetItemData(pComboBox->GetCurSel()); __super::OnOK(); } //////////////////////////////////////////////////////////////////////////
{ "pile_set_name": "Github" }
// name: PlanarMechanicsForTesting.Examples.TestDryFrictionWheel.mos [simulation checking] // keywords: simulation PlanarMechanicsForTesting.Examples // status: correct // teardown_command: rm -rf _PlanarMechanicsForTesting.Examples.TestDryFrictionWheel.* PlanarMechanicsForTesting.Examples.TestDryFrictionWheel_i* PlanarMechanicsForTesting.Examples.TestDryFrictionWheel_d* PlanarMechanicsForTesting.Examples.TestDryFrictionWheel_r* PlanarMechanicsForTesting.Examples.TestDryFrictionWheel_f* PlanarMechanicsForTesting.Examples.TestDryFrictionWheel PlanarMechanicsForTesting.Examples.TestDryFrictionWheel.exe PlanarMechanicsForTesting.Examples.TestDryFrictionWheel.cpp PlanarMechanicsForTesting.Examples.TestDryFrictionWheel.makefile PlanarMechanicsForTesting.Examples.TestDryFrictionWheel.libs PlanarMechanicsForTesting.Examples.TestDryFrictionWheel.log PlanarMechanicsForTesting.Examples.TestDryFrictionWheel.c PlanarMechanicsForTesting.Examples.TestDryFrictionWheel.o output.log // // Simulation Results // Modelica Standard Library // loadModel(Modelica,{"3.1"}); getErrorString(); loadFile("PlanarMechanicsForTesting.mo"); getErrorString(); // adrpo: stopTime=3 is got from the experiment annotation in the model simulate(PlanarMechanicsForTesting.Examples.TestDryFrictionWheel, tolerance=0.0001); getErrorString(); res := OpenModelica.Scripting.compareSimulationResults("PlanarMechanicsForTesting.Examples.TestDryFrictionWheel_res.mat", getEnvironmentVar("REFERENCEFILES")+"/PlanarMechanics//TestDryFrictionWheel.mat","PlanarMechanicsForTesting.Examples.TestDryFrictionWheel_diff.csv",0.01,0.45, {"revolute.w", "prismatic.s", "dryFrictionWheelJoint.f_lat", "dryFrictionWheelJoint.f_long"}); // Result: // true // "" // true // "" // record SimulationResult // resultFile = "PlanarMechanicsForTesting.Examples.TestDryFrictionWheel_res.mat", // simulationOptions = "startTime = 0.0, stopTime = 20.0, numberOfIntervals = 500, tolerance = 0.0001, method = 'dassl', fileNamePrefix = 'PlanarMechanicsForTesting.Examples.TestDryFrictionWheel', options = '', outputFormat = 'mat', variableFilter = '.*', cflags = '', simflags = ''", // messages = "LOG_SUCCESS | info | The initialization finished successfully without homotopy method. // LOG_SUCCESS | info | The simulation finished successfully. // " // end SimulationResult; // "Warning: The initial conditions are not fully specified. For more information set -d=initialization. In OMEdit Tools->Options->Simulation->OMCFlags, in OMNotebook call setCommandLineOptions(\"-d=initialization\"). // " // {"Files Equal!"} // endResult
{ "pile_set_name": "Github" }
(* this module helps android developers to publish automatically their apks useful links: https://developers.google.com/accounts/docs/OAuth2ServiceAccount#formingheader https://developers.google.com/android-publisher/api-ref/edits/insert note: I would like to use a JsonProvider to parse json but i don't know if it causes a problem with dependency "FSharp.Data.DesignTime.dll" So i used Newtonsoft.Json *) [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] module Fake.AndroidPublisher open System open System.Security.Cryptography open System.Security.Cryptography.X509Certificates open System.Text open System.Net open System.Collections.Specialized open System.Threading open System.IO open System.Diagnostics open Newtonsoft.Json open ICSharpCode.SharpZipLib.Zip open ICSharpCode.SharpZipLib.Core open System.Xml.Linq [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] type AndroidPublishConfig = { Certificate: X509Certificate2; PackageName: string; AccountId: string; Apk: string; } [<CLIMutable>] [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] type AndroidPublishParams = { Track: string; Config: AndroidPublishConfig; } [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] let ProductionSettings = { Track = "production"; Config = { Certificate = null; PackageName = null; AccountId = null; Apk = null; } } [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] let AlphaSettings = { ProductionSettings with Track = "alpha"; } [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] let BetaSettings = { ProductionSettings with Track = "beta"; } [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] let RolloutSettings = { ProductionSettings with Track = "rollout"; } type private ServiceCredentials = { Certificate: X509Certificate2; AccountId: string; //"xxxxxxx@developer.gserviceaccount.com" } type private ServiceSession = { Token: String; TokenType: String; Expiry: DateTime; } [<CLIMutable>] type private JwtHeader = { [<JsonProperty("alg")>] Algo:string; [<JsonProperty("typ")>] Type: string; } [<CLIMutable>] type private JwtClaimSet = { [<JsonProperty("iss")>] AccountId: string; [<JsonProperty("scope")>] Scope: string; [<JsonProperty("aud")>] Audience: string; [<JsonProperty("exp")>] Expiry: int64; [<JsonProperty("iat")>] Issue: int64; } [<CLIMutable>] type private AuthReply = { [<JsonProperty("access_token")>] AccessToken: String; [<JsonProperty("token_type")>] TokenType: String; [<JsonProperty("expires_in")>] ExpiresIn: int32; } [<CLIMutable>] [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] type EditResourceModel = { [<JsonProperty("id")>] Id: string; [<JsonProperty("expiryTimeSeconds")>] Expiry: int64; } [<CLIMutable>] [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] type ApkVersion = { [<JsonProperty("versionCode")>] Code: int; } [<CLIMutable>] [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] type AppEditListApksResult = { [<JsonProperty("apks")>] Apks: ApkVersion list } [<CLIMutable>] [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] type TrackModel = { [<JsonProperty("versionCodes")>] VersionCodes: int list; [<JsonProperty("track")>] Track: string; // [<JsonProperty("userFraction")>] // UserFraction: float; } type private Result<'t> = { Content: 't option; Error: string option; } type private HttpClient() = inherit WebClient() override __.GetWebRequest (address:Uri) = let rq = base.GetWebRequest address rq.Timeout <- int32 <| TimeSpan.FromHours(1.).TotalMilliseconds rq member x.CreateRequest (address:Uri) = x.GetWebRequest (address) [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] let mutable public AndroidPublisherScope = "https://www.googleapis.com/auth/androidpublisher" [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] let mutable public TokenServerUrl = "https://www.googleapis.com/oauth2/v3/token" [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] let mutable public AndroidPublisherBaseUrl = "https://www.googleapis.com/androidpublisher/v2/applications" [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] let mutable public AndroidUploadApkBaseUrl = "https://www.googleapis.com/upload/androidpublisher/v2/applications" let private ServiceAccountHeader = {Algo = "RS256"; Type = "JWT"} let private toJson = JsonConvert.SerializeObject let private fromJson<'t> v = JsonConvert.DeserializeObject<'t>(v) [<System.Obsolete("This API is obsolete. There is no alternative in FAKE 5 yet. You can help by porting this module.")>] let PublishApk (param:AndroidPublishParams)= let getZipEntry (filepath, path) = let clean (name:string) = if not(name.StartsWith("/")) then ("/" + name) else name let rec searchEntry (zip:ZipInputStream, current:ZipEntry) = match current.Name |> clean with | name when name = path -> use memory = new MemoryStream() zip.CopyTo(memory) zip.Flush() memory.Flush() Some(memory.ToArray()) | _ -> match zip.GetNextEntry() with | null -> None | next -> searchEntry(zip, next) use fs = File.OpenRead(filepath) let s = new ZipInputStream(fs) searchEntry(s, s.GetNextEntry()) let manifestDecode (data: byte array) = // inpired by http://stackoverflow.com/a/19063830/2554459 let endDocTag = 0x00100101 let startTag = 0x00100102 let endTag = 0x00100103 let lew (arr:byte array, off:int) = let p1 = uint32 (arr.[off + 3]) <<< 24 &&& 0xff000000u let p2 = uint32 (arr.[off + 2]) <<< 16 &&& 0xff0000u let p3 = uint32 (arr.[off + 1]) <<< 8 &&& 0xff00u let p4 = uint32 (arr.[off]) &&& 0xFFu p1 ||| p2 ||| p3 ||| p4 |> int32 let firstTagOffset = let rec scanStartOfFirstTag offset = match lew(data, offset) with | v when v = startTag -> Some (offset) | _ when offset < data.Length - 4 -> scanStartOfFirstTag (offset + 4) | _ -> None let s = lew(data, 3 * 4) match s |> scanStartOfFirstTag with | Some v -> v | None -> s let readStringAt offset = let len = data.[offset + 1] <<< 8 &&& byte(0xff00) ||| data.[offset] &&& byte(0xff) |> int [|for i in 0..len-1 -> data.[offset + 2 + i * 2] |] |> System.Text.Encoding.UTF8.GetString let readString (sitOff:int, stOff:int, strInd:int) = if strInd < 0 then null else readStringAt (stOff + lew(data, sitOff + strInd * 4)) let numbStrings = lew(data, 4 * 4) let sitOff = 0x24 // Offset of start of StringIndexTable let stOff = sitOff + numbStrings * 4 // StringTable follows StrIndexTable let xmlTagOff = firstTagOffset // Start from the offset in the 3rd word. let rec readNode acc off = let tag0 = lew(data, off) let lineNo = lew(data, off + 2 * 4) let nameSi = lew(data, off + 5 * 4) match tag0 with | t when t = startTag -> let numbAttrs = lew(data, off + 7 * 4) let name = readString(sitOff, stOff, nameSi) let mutable attrOff = off + 9 * 4 let sb = new StringBuilder(acc + "<" + name) for i in 0..numbAttrs-1 do let attrNameNsSi = lew(data, attrOff) let attrNameSi = lew(data, attrOff + 1 * 4) let attrValueSi = lew(data, attrOff + 2 * 4) let attrFlags = lew(data, attrOff + 3 * 4) let attrResId = lew(data, attrOff + 4 * 4) let attrName = readString(sitOff, stOff, attrNameSi) let attrValue = if not(attrValueSi = -1) then readString (sitOff, stOff, attrValueSi) else attrResId.ToString() (" " + attrName + "=\"" + attrValue + "\"") |> sb.Append |> ignore attrOff <- attrOff + 5 * 4 sb.Append(">") |> ignore readNode (sb.ToString()) attrOff | t when t = endTag -> let name = readString(sitOff, stOff, nameSi) readNode (acc + "</" + name + ">\r\n") (off + 6 * 4) | t when t = endDocTag -> acc | _ -> failwith "Invalid manifest format"; readNode "" xmlTagOff let getManifest (filepath) = match getZipEntry (filepath, "/AndroidManifest.xml") with | Some bytes -> Some (bytes |> manifestDecode |> System.Xml.Linq.XDocument.Parse) | None -> None let header (s:ServiceSession) = sprintf "%s %s" s.TokenType s.Token let (<<) (headers:WebHeaderCollection) session = headers.Add("Authorization", header session) let createRsaCrypto (credentials:ServiceCredentials) = let pkey = credentials.Certificate.PrivateKey :?> RSACryptoServiceProvider let blob = pkey.ExportCspBlob(true) let rsaCrypto = new RSACryptoServiceProvider() rsaCrypto.ImportCspBlob(blob) rsaCrypto let base64EncodeUrlBytes (b:byte[]) = b |> Convert.ToBase64String |> fun b64 -> b64.Replace("=", "").Replace('+', '-').Replace('/', '_') let base64EncodeUrl (s:string) = s |> Encoding.UTF8.GetBytes |> base64EncodeUrlBytes let toUnixTime d = (d - new DateTime(1970, 1, 1, 0, 0, 0, d.Kind)).TotalSeconds |> int64 let postUrl (url:string, data:NameValueCollection) = let client = new WebClient() client.UploadValues(url, data) let googleAuthenticate (credentials:ServiceCredentials) = let header = ServiceAccountHeader |> toJson |> base64EncodeUrl let now = DateTime.UtcNow |> toUnixTime let claimSet = { AccountId = credentials.AccountId; Scope = AndroidPublisherScope; Audience = TokenServerUrl; Issue = now; Expiry = now + 3600L } let payload = claimSet |> toJson |> base64EncodeUrl let rsa = createRsaCrypto credentials let assertion = header + "." + payload let bb = rsa.SignData(Encoding.ASCII.GetBytes(assertion), "SHA256") let signature = bb |> base64EncodeUrlBytes let signedAssertion = assertion + "." + signature let p = new NameValueCollection(); p.Add("assertion", signedAssertion) p.Add("grant_type", "urn:ietf:params:oauth:grant-type:jwt-bearer") let auth = postUrl(TokenServerUrl, p) |> Encoding.UTF8.GetString |> fromJson<AuthReply> { Token = auth.AccessToken; TokenType = auth.TokenType; Expiry = DateTime.UtcNow.AddSeconds(auth.ExpiresIn |> float) } let appEditInsert (session:ServiceSession, packageName:string) = let client = new WebClient() client.Headers << session client.UploadString(AndroidPublisherBaseUrl + "/" + packageName + "/edits", "") |> fromJson<EditResourceModel> let appListApks (session:ServiceSession, packageName:string, editId:string) = let client = new WebClient() client.Headers << session client.DownloadString(AndroidPublisherBaseUrl + "/" + packageName + "/edits/" + editId + "/apks") |> fromJson<AppEditListApksResult> let validateAppEdit (session:ServiceSession, packageName:string, editId:string) = let client = new WebClient() client.Headers << session client.UploadString(AndroidPublisherBaseUrl + "/" + packageName + "/edits/" + editId + ":validate", "") |> fromJson<EditResourceModel> let commitAppEdit (session:ServiceSession, packageName:string, editId:string) = let client = new WebClient() client.Headers << session client.UploadString(AndroidPublisherBaseUrl + "/" + packageName + "/edits/" + editId + ":commit", "") |> fromJson<EditResourceModel> let setAppTrack (session:ServiceSession, packageName:string, editId:string, track:string, versionCode:int) = let client = new WebClient() client.Headers << session client.Headers.Add ("Content-Type", "application/json") let m = { Track = track; VersionCodes = [versionCode];(* UserFraction = 1.*) } let data = JsonConvert.SerializeObject(m) client.UploadString(AndroidPublisherBaseUrl + "/" + packageName + "/edits/" + editId + "/tracks/" + track, "PUT", data) |> fromJson<EditResourceModel> let fopen fn = File.Open(fn, FileMode.Open) let uploadApk (session:ServiceSession, packageName:string, editId:string, apkPath:string) = let url = new Uri(AndroidUploadApkBaseUrl + "/" + packageName + "/edits/" + editId + "/apks?uploadType=media") let client = new HttpClient() let watch = Stopwatch.StartNew() let rq = client.CreateRequest (url) rq.Method <- "POST" rq.ContentType <- "application/octet-stream" rq.Headers << session use file = fopen apkPath use binary = new BinaryReader(file) rq.ContentLength <- file.Length use stream = rq.GetRequestStream() use out = new BinaryWriter(stream) let mutable lastProgress = 0L while file.Position < file.Length do let uploaded = (file.Position/1024L/1024L) let bytes = binary.ReadBytes(512) out.Write(bytes) out.Flush() stream.Flush() let percent = 100L * file.Position / file.Length if lastProgress = uploaded |> not then let elapsed = watch.ElapsedMilliseconds let estimated = 100L*elapsed/percent tracefn "Estimated remaining time: %s" (TimeSpan.FromMilliseconds(float(estimated - elapsed)).ToString()) tracefn @"Uploaded progress %d MB %d %% ..." uploaded percent lastProgress <- uploaded stream.Flush() stream.Close() watch.Stop() tracefn "APK file sent" try let rs = rq.GetResponse() use reader = new StreamReader(rs.GetResponseStream()) { Content=Some(fromJson<ApkVersion>(reader.ReadToEnd())); Error=None } with | :? WebException as e -> use reader = new StreamReader(e.Response.GetResponseStream()) { Content=None; Error=Some(reader.ReadToEnd()) } | e -> { Content=None; Error=Some(e.Message) } let credentials = { Certificate = param.Config.Certificate; AccountId = param.Config.AccountId; } let session = credentials |> googleAuthenticate let resource = appEditInsert (session, param.Config.PackageName) let apkList = appListApks (session, param.Config.PackageName, resource.Id) let manifest = match param.Config.Apk |> getManifest with | Some xml -> xml | None -> failwithf "cannot parse apk AndroidManifest" let versionCode = (manifest.Element("manifest" |> XName.Get).Attributes() |> Seq.filter(fun a -> a.Name.LocalName = "versionCode") |> Seq.exactlyOne).Value |> Convert.ToInt32 if apkList.Apks.Length > 0 && (apkList.Apks |> Seq.maxBy (fun a -> a.Code)).Code >= versionCode then failwithf "You must increase versionCode" let upResult = uploadApk (session, param.Config.PackageName, resource.Id, param.Config.Apk) match (upResult.Content, upResult.Error) with | Some content, None -> tracefn "upload success: version code %d \n" content.Code tracefn "setting track %s \n" param.Track setAppTrack (session, param.Config.PackageName, resource.Id, param.Track, content.Code) |> ignore tracefn "validating \n" validateAppEdit (session, param.Config.PackageName, resource.Id) |> ignore tracefn "committing app \n" commitAppEdit (session, param.Config.PackageName, resource.Id) |> ignore | None, Some error -> failwith error | _, _ -> failwith "upload failed" tracefn "app published"
{ "pile_set_name": "Github" }
version https://git-lfs.github.com/spec/v1 oid sha256:6dcf66ac4fc1bd986ad6d956789dbe765653c25b32aafcce24948db244216be9 size 24021
{ "pile_set_name": "Github" }
/* * jQuery FlexSlider v2.0 * http://www.woothemes.com/flexslider/ * * Copyright 2012 WooThemes * Free to use under the GPLv2 license. * http://www.gnu.org/licenses/gpl-2.0.html * * Contributing author: Tyler Smith (@mbmufffin) */ /* Browser Resets */ .flex-container a:active, .flexslider a:active, .flex-container a:focus, .flexslider a:focus {outline: none;} .slides, .flex-control-nav, .flex-direction-nav {margin: 0; padding: 0; list-style: none;} /* FlexSlider Necessary Styles *********************************/ .flexslider { margin: 0; padding: 0;} .flexslider .slides > li {display: none; -webkit-backface-visibility: hidden;} /* Hide the slides before the JS is loaded. Avoids image jumping */ .flexslider .slides img { width: 100%; display: block;} .flex-pauseplay span {text-transform: capitalize;} /* Clearfix for the .slides element */ .slides:after {content: "."; display: block; clear: both; visibility: hidden; line-height: 0; height: 0;} html[xmlns] .slides {display: block;} * html .slides {height: 1%;} /* No JavaScript Fallback */ /* If you are not using another script, such as Modernizr, make sure you * include js that eliminates this class on page load */ .no-js .slides > li:first-child {display: block;} /* FlexSlider Default Theme *********************************/ .flexslider {background:none; position: relative; zoom: 1;} .flex-viewport {max-height: 2000px; -webkit-transition: all 1s ease; -moz-transition: all 1s ease; transition: all 1s ease;} .loading .flex-viewport {max-height: 300px;} .flexslider .slides {zoom: 1;} .carousel li {margin-right: 5px} /* Caption style */ .flex-caption { background: rgba(0,0,0,.8); margin-left:5px;bottom:5px; position:absolute;padding:20px; z-index:99;} .flex-caption p{ font-size: 14px !important; line-height: 22px; font-weight:300; color: #fff } .flex-caption h2, .flex-caption h4 { color:#fff; } /* Direction Nav */ .flex-direction-nav {*height: 0;} .flex-direction-nav a {width: 30px; height: 40px; margin:0; display: block; background: url(../img/bg_direction_nav.png) no-repeat 0 0; position: absolute; top: 45%; z-index: 10; cursor: pointer; text-indent: -9999px; opacity: 0; -webkit-transition: all .3s ease;} .flex-direction-nav .flex-next {background-position: 100% 0; right: -36px; } .flex-direction-nav .flex-prev {left: -36px;} .flexslider:hover .flex-next {opacity: 0.8; right: 5px;} .flexslider:hover .flex-prev {opacity: 0.8; left: 5px;} .flexslider:hover .flex-next:hover, .flexslider:hover .flex-prev:hover {opacity: 1;} .flex-direction-nav .flex-disabled {opacity: .3!important; filter:alpha(opacity=30); cursor: default;} /* Control Nav */ .flex-control-nav {width: 100%; position: absolute; bottom: 0; text-align: center;} .flex-control-nav li {margin: 0 6px; display: inline-block; zoom: 1; *display: inline;} .flex-control-paging li a {width: 11px; height: 11px; display: block; background: #666; background: rgba(0,0,0,0.5); cursor: pointer; text-indent: -9999px; -webkit-border-radius: 20px; -moz-border-radius: 20px; -o-border-radius: 20px; border-radius: 20px; box-shadow: inset 0 0 3px rgba(0,0,0,0.3);} .flex-control-paging li a:hover { background: #333; background: rgba(0,0,0,0.7); } .flex-control-paging li a.flex-active { background: #000; background: rgba(0,0,0,0.9); cursor: default; } .flex-control-thumbs {margin: 5px 0 0; position: static; overflow: hidden;} .flex-control-thumbs li {width: 25%; float: left; margin: 0;} .flex-control-thumbs img {width: 100%; display: block; opacity: .7; cursor: pointer;} .flex-control-thumbs img:hover {opacity: 1;} .flex-control-thumbs .flex-active {opacity: 1; cursor: default;} @media screen and (max-width: 860px) { .flex-direction-nav .flex-prev {opacity: 1; left: 0;} .flex-direction-nav .flex-next {opacity: 1; right: 0;} }
{ "pile_set_name": "Github" }
{ "response": { "id": 123 } }
{ "pile_set_name": "Github" }
/* -*- C++ -*- */ /** * @file FilterFactory.h * * @author Pradeep Gore <pradeep@oomworks.com> */ #ifndef TAO_Notify_FILTERFACTORY_H #define TAO_Notify_FILTERFACTORY_H #include /**/ "ace/pre.h" #include "orbsvcs/Notify/notify_serv_export.h" #if !defined (ACE_LACKS_PRAGMA_ONCE) # pragma once #endif /* ACE_LACKS_PRAGMA_ONCE */ #include "ace/Service_Object.h" #include "tao/PortableServer/PortableServer.h" #include "Topology_Object.h" TAO_BEGIN_VERSIONED_NAMESPACE_DECL /** * @class TAO_Notify_FilterFactory * * @brief Service Object to obtain a CosNotifyFilter::FilterFactory reference. */ class TAO_Notify_Serv_Export TAO_Notify_FilterFactory : public ACE_Service_Object, public TAO_Notify::Topology_Object { public: /// Factory method to create a FilterFactory reference /// The Factory is activated in the default POA. The filters created are activated in the <filter_poa>. virtual CosNotifyFilter::FilterFactory_ptr create ( PortableServer::POA_ptr filter_poa) = 0; virtual void destroy (void) = 0; virtual TAO_Notify_Object::ID get_filter_id (CosNotifyFilter::Filter_ptr filter) = 0; virtual CosNotifyFilter::Filter_ptr get_filter (const TAO_Notify_Object::ID& id) = 0; }; TAO_END_VERSIONED_NAMESPACE_DECL #include /**/ "ace/post.h" #endif /* TAO_Notify_FILTERFACTORY_H */
{ "pile_set_name": "Github" }
<?php /** * TbActiveForm class file. * @author Christoffer Niska <ChristofferNiska@gmail.com> * @copyright Copyright &copy; Christoffer Niska 2011- * @license http://www.opensource.org/licenses/bsd-license.php New BSD License * @package bootstrap.widgets */ Yii::import('bootstrap.widgets.input.TbInput'); /** * Bootstrap active form widget. */ class TbActiveForm extends CActiveForm { // Form types. const TYPE_VERTICAL = 'vertical'; const TYPE_INLINE = 'inline'; const TYPE_HORIZONTAL = 'horizontal'; const TYPE_SEARCH = 'search'; // Input classes. const INPUT_HORIZONTAL = 'bootstrap.widgets.input.TbInputHorizontal'; const INPUT_INLINE = 'bootstrap.widgets.input.TbInputInline'; const INPUT_SEARCH = 'bootstrap.widgets.input.TbInputSearch'; const INPUT_VERTICAL = 'bootstrap.widgets.input.TbInputVertical'; /** * @var string the form type. See class constants. */ public $type = self::TYPE_VERTICAL; /** * @var string input class. */ public $input; /** * @var boolean indicates whether to display errors as blocks. */ public $inlineErrors; /** * Initializes the widget. * This renders the form open tag. */ public function init() { if (!isset($this->htmlOptions['class'])) $this->htmlOptions['class'] = 'form-'.$this->type; else $this->htmlOptions['class'] .= ' form-'.$this->type; if (!isset($this->inlineErrors)) $this->inlineErrors = $this->type === self::TYPE_HORIZONTAL; if ($this->inlineErrors) $this->errorMessageCssClass = 'help-inline error'; else $this->errorMessageCssClass = 'help-block error'; parent::init(); } /** * Renders a checkbox input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function checkBoxRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_CHECKBOX, $model, $attribute, null, $htmlOptions); } /** * Renders a checkbox list input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the list data * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function checkBoxListRow($model, $attribute, $data = array(), $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_CHECKBOXLIST, $model, $attribute, $data, $htmlOptions); } /** * Renders a checkbox list inline input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the list data * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function checkBoxListInlineRow($model, $attribute, $data = array(), $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_CHECKBOXLIST_INLINE, $model, $attribute, $data, $htmlOptions); } /** * Renders a drop-down list input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the list data * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function dropDownListRow($model, $attribute, $data = array(), $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_DROPDOWN, $model, $attribute, $data, $htmlOptions); } /** * Renders a file field input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function fileFieldRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_FILE, $model, $attribute, null, $htmlOptions); } /** * Renders a password field input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function passwordFieldRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_PASSWORD, $model, $attribute, null, $htmlOptions); } /** * Renders a radio button input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function radioButtonRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_RADIO, $model, $attribute, null, $htmlOptions); } /** * Renders a radio button list input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the list data * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function radioButtonListRow($model, $attribute, $data = array(), $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_RADIOLIST, $model, $attribute, $data, $htmlOptions); } /** * Renders a radio button list inline input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the list data * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function radioButtonListInlineRow($model, $attribute, $data = array(), $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_RADIOLIST_INLINE, $model, $attribute, $data, $htmlOptions); } /** * Renders a text field input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function textFieldRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_TEXT, $model, $attribute, null, $htmlOptions); } /** * Renders a text area input row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function textAreaRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_TEXTAREA, $model, $attribute, null, $htmlOptions); } /** * Renders a captcha row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row * @since 0.9.3 */ public function captchaRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_CAPTCHA, $model, $attribute, null, $htmlOptions); } /** * Renders an uneditable text field row. * @param CModel $model the data model * @param string $attribute the attribute * @param array $htmlOptions additional HTML attributes * @return string the generated row * @since 0.9.5 */ public function uneditableRow($model, $attribute, $htmlOptions = array()) { return $this->inputRow(TbInput::TYPE_UNEDITABLE, $model, $attribute, null, $htmlOptions); } /** * Renders a checkbox list for a model attribute. * This method is a wrapper of {@link CHtml::activeCheckBoxList}. * Please check {@link CHtml::activeCheckBoxList} for detailed information * about the parameters for this method. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data value-label pairs used to generate the check box list. * @param array $htmlOptions additional HTML options. * @return string the generated check box list * @since 0.9.5 */ public function checkBoxList($model, $attribute, $data, $htmlOptions = array()) { return $this->inputsList(true, $model, $attribute, $data, $htmlOptions); } /** * Renders a radio button list for a model attribute. * This method is a wrapper of {@link CHtml::activeRadioButtonList}. * Please check {@link CHtml::activeRadioButtonList} for detailed information * about the parameters for this method. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data value-label pairs used to generate the radio button list. * @param array $htmlOptions additional HTML options. * @return string the generated radio button list * @since 0.9.5 */ public function radioButtonList($model, $attribute, $data, $htmlOptions = array()) { return $this->inputsList(false, $model, $attribute, $data, $htmlOptions); } /** * Renders an input list. * @param boolean $checkbox flag that indicates if the list is a checkbox-list. * @param CModel $model the data model * @param string $attribute the attribute * @param array $data value-label pairs used to generate the input list. * @param array $htmlOptions additional HTML options. * @return string the generated input list. * @since 0.9.5 */ protected function inputsList($checkbox, $model, $attribute, $data, $htmlOptions = array()) { CHtml::resolveNameID($model, $attribute, $htmlOptions); $select = CHtml::resolveValue($model, $attribute); if ($model->hasErrors($attribute)) { if (isset($htmlOptions['class'])) $htmlOptions['class'] .= ' '.CHtml::$errorCss; else $htmlOptions['class'] = CHtml::$errorCss; } $name = $htmlOptions['name']; unset($htmlOptions['name']); if (array_key_exists('uncheckValue', $htmlOptions)) { $uncheck = $htmlOptions['uncheckValue']; unset($htmlOptions['uncheckValue']); } else $uncheck = ''; $hiddenOptions = isset($htmlOptions['id']) ? array('id' => CHtml::ID_PREFIX.$htmlOptions['id']) : array('id' => false); $hidden = $uncheck !== null ? CHtml::hiddenField($name, $uncheck, $hiddenOptions) : ''; if (isset($htmlOptions['template'])) $template = $htmlOptions['template']; else $template = '<label class="{labelCssClass}">{input}{label}</label>'; unset($htmlOptions['template'], $htmlOptions['separator'], $htmlOptions['hint']); if ($checkbox && substr($name, -2) !== '[]') $name .= '[]'; unset($htmlOptions['checkAll'], $htmlOptions['checkAllLast']); $labelOptions = isset($htmlOptions['labelOptions']) ? $htmlOptions['labelOptions'] : array(); unset($htmlOptions['labelOptions']); $items = array(); $baseID = CHtml::getIdByName($name); $id = 0; $method = $checkbox ? 'checkBox' : 'radioButton'; $labelCssClass = $checkbox ? 'checkbox' : 'radio'; if (isset($htmlOptions['inline'])) { $labelCssClass .= ' inline'; unset($htmlOptions['inline']); } foreach ($data as $value => $label) { $checked = !is_array($select) && !strcmp($value, $select) || is_array($select) && in_array($value, $select); $htmlOptions['value'] = $value; $htmlOptions['id'] = $baseID.'_'.$id++; $option = CHtml::$method($name, $checked, $htmlOptions); $label = CHtml::label($label, $htmlOptions['id'], $labelOptions); $items[] = strtr($template, array( '{labelCssClass}' => $labelCssClass, '{input}' => $option, '{label}' => $label, )); } return $hidden.implode('', $items); } /** * Displays a summary of validation errors for one or several models. * This method is very similar to {@link CHtml::errorSummary} except that it also works * when AJAX validation is performed. * @param mixed $models the models whose input errors are to be displayed. This can be either * a single model or an array of models. * @param string $header a piece of HTML code that appears in front of the errors * @param string $footer a piece of HTML code that appears at the end of the errors * @param array $htmlOptions additional HTML attributes to be rendered in the container div tag. * @return string the error summary. Empty if no errors are found. * @see CHtml::errorSummary */ public function errorSummary($models, $header = null, $footer = null, $htmlOptions = array()) { if (!isset($htmlOptions['class'])) $htmlOptions['class'] = 'alert alert-block alert-error'; // Bootstrap error class as default return parent::errorSummary($models, $header, $footer, $htmlOptions); } /** * Displays the first validation error for a model attribute. * @param CModel $model the data model * @param string $attribute the attribute name * @param array $htmlOptions additional HTML attributes to be rendered in the container div tag. * @param boolean $enableAjaxValidation whether to enable AJAX validation for the specified attribute. * @param boolean $enableClientValidation whether to enable client-side validation for the specified attribute. * @return string the validation result (error display or success message). */ public function error($model, $attribute, $htmlOptions = array(), $enableAjaxValidation = true, $enableClientValidation = true) { if (!$this->enableAjaxValidation) $enableAjaxValidation = false; if (!$this->enableClientValidation) $enableClientValidation = false; if (!isset($htmlOptions['class'])) $htmlOptions['class'] = $this->errorMessageCssClass; if (!$enableAjaxValidation && !$enableClientValidation) return $this->renderError($model, $attribute, $htmlOptions); $id = CHtml::activeId($model,$attribute); $inputID = isset($htmlOptions['inputID']) ? $htmlOptions['inputID'] : $id; unset($htmlOptions['inputID']); if (!isset($htmlOptions['id'])) $htmlOptions['id'] = $inputID.'_em_'; $option = array( 'id'=>$id, 'inputID'=>$inputID, 'errorID'=>$htmlOptions['id'], 'model'=>get_class($model), 'name'=>CHtml::resolveName($model, $attribute), 'enableAjaxValidation'=>$enableAjaxValidation, 'inputContainer'=>'div.control-group', // Bootstrap requires this ); $optionNames = array( 'validationDelay', 'validateOnChange', 'validateOnType', 'hideErrorMessage', 'inputContainer', 'errorCssClass', 'successCssClass', 'validatingCssClass', 'beforeValidateAttribute', 'afterValidateAttribute', ); foreach ($optionNames as $name) { if (isset($htmlOptions[$name])) { $option[$name] = $htmlOptions[$name]; unset($htmlOptions[$name]); } } if ($model instanceof CActiveRecord && !$model->isNewRecord) $option['status'] = 1; if ($enableClientValidation) { $validators = isset($htmlOptions['clientValidation']) ? array($htmlOptions['clientValidation']) : array(); $attributeName = $attribute; if (($pos = strrpos($attribute, ']')) !== false && $pos !== strlen($attribute) - 1) // e.g. [a]name $attributeName = substr($attribute, $pos + 1); foreach ($model->getValidators($attributeName) as $validator) { if ($validator->enableClientValidation) if (($js = $validator->clientValidateAttribute($model, $attributeName)) != '') $validators[] = $js; } if ($validators !== array()) $option['clientValidation'] = "js:function(value, messages, attribute) {\n".implode("\n", $validators)."\n}"; } $html = $this->renderError($model, $attribute, $htmlOptions); if ($html === '') { if (isset($htmlOptions['style'])) $htmlOptions['style'] = rtrim($htmlOptions['style'], ';').'; display: none'; else $htmlOptions['style'] = 'display: none'; $html = CHtml::tag('span', $htmlOptions, ''); } $this->attributes[$inputID] = $option; return $html; } /** * Displays the first validation error for a model attribute. * @param CModel $model the data model * @param string $attribute the attribute name * @param array $htmlOptions additional HTML attributes to be rendered in the container div tag. * @return string the error display. Empty if no errors are found. * @see CModel::getErrors * @see errorMessageCss */ protected static function renderError($model, $attribute, $htmlOptions = array()) { CHtml::resolveName($model, $attribute); // turn [a][b]attr into attr $error = $model->getError($attribute); return $error != '' ? CHtml::tag('span', $htmlOptions, $error) : ''; } /** * Creates an input row of a specific type. * @param string $type the input type * @param CModel $model the data model * @param string $attribute the attribute * @param array $data the data for list inputs * @param array $htmlOptions additional HTML attributes * @return string the generated row */ public function inputRow($type, $model, $attribute, $data = null, $htmlOptions = array()) { ob_start(); Yii::app()->controller->widget($this->getInputClassName(), array( 'type'=>$type, 'form'=>$this, 'model'=>$model, 'attribute'=>$attribute, 'data'=>$data, 'htmlOptions'=>$htmlOptions, )); return ob_get_clean(); } /** * Returns the input widget class name suitable for the form. * @return string the class name */ protected function getInputClassName() { if (isset($this->input)) return $this->input; else { switch ($this->type) { case self::TYPE_HORIZONTAL: return self::INPUT_HORIZONTAL; break; case self::TYPE_INLINE: return self::INPUT_INLINE; break; case self::TYPE_SEARCH: return self::INPUT_SEARCH; break; case self::TYPE_VERTICAL: default: return self::INPUT_VERTICAL; break; } } } }
{ "pile_set_name": "Github" }
// // Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. // /*============================================================================ ** ** Source: test18.c ** ** Purpose: Test #18 for the vprintf function. Tests the uppercase ** shorthand notation double specifier (%G) ** ** **==========================================================================*/ #include <palsuite.h> #include "../vprintf.h" int __cdecl main(int argc, char *argv[]) { double val = 2560.001; double neg = -2560.001; if (PAL_Initialize(argc, argv)) { return FAIL; } DoDoubleTest("foo %G", val, "foo 2560", "foo 2560"); DoDoubleTest("foo %lG", val, "foo 2560", "foo 2560"); DoDoubleTest("foo %hG", val, "foo 2560", "foo 2560"); DoDoubleTest("foo %LG", val, "foo 2560", "foo 2560"); DoDoubleTest("foo %I64G", val, "foo 2560", "foo 2560"); DoDoubleTest("foo %5G", val, "foo 2560", "foo 2560"); DoDoubleTest("foo %-5G", val, "foo 2560 ", "foo 2560 "); DoDoubleTest("foo %.1G", val, "foo 3E+003", "foo 3E+03"); DoDoubleTest("foo %.2G", val, "foo 2.6E+003", "foo 2.6E+03"); DoDoubleTest("foo %.12G", val, "foo 2560.001", "foo 2560.001"); DoDoubleTest("foo %06G", val, "foo 002560", "foo 002560"); DoDoubleTest("foo %#G", val, "foo 2560.00", "foo 2560.00"); DoDoubleTest("foo %+G", val, "foo +2560", "foo +2560"); DoDoubleTest("foo % G", val, "foo 2560", "foo 2560"); DoDoubleTest("foo %+G", neg, "foo -2560", "foo -2560"); DoDoubleTest("foo % G", neg, "foo -2560", "foo -2560"); PAL_Terminate(); return PASS; }
{ "pile_set_name": "Github" }
import Vue from 'vue' describe('Directive v-model radio', () => { it('should work', done => { const vm = new Vue({ data: { test: '1' }, template: ` <div> <input type="radio" value="1" v-model="test" name="test"> <input type="radio" value="2" v-model="test" name="test"> </div> ` }).$mount() document.body.appendChild(vm.$el) expect(vm.$el.children[0].checked).toBe(true) expect(vm.$el.children[1].checked).toBe(false) vm.test = '2' waitForUpdate(() => { expect(vm.$el.children[0].checked).toBe(false) expect(vm.$el.children[1].checked).toBe(true) vm.$el.children[0].click() expect(vm.$el.children[0].checked).toBe(true) expect(vm.$el.children[1].checked).toBe(false) expect(vm.test).toBe('1') }).then(() => { document.body.removeChild(vm.$el) }).then(done) }) it('should respect value bindings', done => { const vm = new Vue({ data: { test: 1 }, template: ` <div> <input type="radio" :value="1" v-model="test" name="test"> <input type="radio" :value="2" v-model="test" name="test"> </div> ` }).$mount() document.body.appendChild(vm.$el) expect(vm.$el.children[0].checked).toBe(true) expect(vm.$el.children[1].checked).toBe(false) vm.test = 2 waitForUpdate(() => { expect(vm.$el.children[0].checked).toBe(false) expect(vm.$el.children[1].checked).toBe(true) vm.$el.children[0].click() expect(vm.$el.children[0].checked).toBe(true) expect(vm.$el.children[1].checked).toBe(false) expect(vm.test).toBe(1) }).then(() => { document.body.removeChild(vm.$el) }).then(done) }) it('should respect value bindings (object loose equal)', done => { const vm = new Vue({ data: { test: { a: 1 } }, template: ` <div> <input type="radio" :value="{ a: 1 }" v-model="test" name="test"> <input type="radio" :value="{ a: 2 }" v-model="test" name="test"> </div> ` }).$mount() document.body.appendChild(vm.$el) expect(vm.$el.children[0].checked).toBe(true) expect(vm.$el.children[1].checked).toBe(false) vm.test = { a: 2 } waitForUpdate(() => { expect(vm.$el.children[0].checked).toBe(false) expect(vm.$el.children[1].checked).toBe(true) vm.$el.children[0].click() expect(vm.$el.children[0].checked).toBe(true) expect(vm.$el.children[1].checked).toBe(false) expect(vm.test).toEqual({ a: 1 }) }).then(() => { document.body.removeChild(vm.$el) }).then(done) }) it('multiple radios ', (done) => { const spy = jasmine.createSpy() const vm = new Vue({ data: { selections: ['a', '1'], radioList: [ { name: 'questionA', data: ['a', 'b', 'c'] }, { name: 'questionB', data: ['1', '2'] } ] }, watch: { selections: spy }, template: '<div>' + '<div v-for="(radioGroup, idx) in radioList">' + '<div>' + '<span v-for="(item, index) in radioGroup.data">' + '<input :name="radioGroup.name" type="radio" :value="item" v-model="selections[idx]" :id="idx"/>' + '<label>{{item}}</label>' + '</span>' + '</div>' + '</div>' + '</div>' }).$mount() document.body.appendChild(vm.$el) var inputs = vm.$el.getElementsByTagName('input') inputs[1].click() waitForUpdate(() => { expect(vm.selections).toEqual(['b', '1']) expect(spy).toHaveBeenCalled() }).then(done) }) it('.number modifier', () => { const vm = new Vue({ data: { test: 1 }, template: ` <div> <input type="radio" value="1" v-model="test" name="test"> <input type="radio" value="2" v-model.number="test" name="test"> </div> ` }).$mount() document.body.appendChild(vm.$el) expect(vm.$el.children[0].checked).toBe(true) expect(vm.$el.children[1].checked).toBe(false) vm.$el.children[1].click() expect(vm.$el.children[0].checked).toBe(false) expect(vm.$el.children[1].checked).toBe(true) expect(vm.test).toBe(2) }) it('should respect different primitive type value', (done) => { const vm = new Vue({ data: { test: 1 }, template: '<div>' + '<input type="radio" value="" v-model="test" name="test">' + '<input type="radio" value="0" v-model="test" name="test">' + '<input type="radio" value="1" v-model="test" name="test">' + '<input type="radio" value="false" v-model="test" name="test">' + '<input type="radio" value="true" v-model="test" name="test">' + '</div>' }).$mount() var radioboxInput = vm.$el.children expect(radioboxInput[0].checked).toBe(false) expect(radioboxInput[1].checked).toBe(false) expect(radioboxInput[2].checked).toBe(true) expect(radioboxInput[3].checked).toBe(false) expect(radioboxInput[4].checked).toBe(false) vm.test = 0 waitForUpdate(() => { expect(radioboxInput[0].checked).toBe(false) expect(radioboxInput[1].checked).toBe(true) expect(radioboxInput[2].checked).toBe(false) expect(radioboxInput[3].checked).toBe(false) expect(radioboxInput[4].checked).toBe(false) vm.test = '' }).then(() => { expect(radioboxInput[0].checked).toBe(true) expect(radioboxInput[1].checked).toBe(false) expect(radioboxInput[2].checked).toBe(false) expect(radioboxInput[3].checked).toBe(false) expect(radioboxInput[4].checked).toBe(false) vm.test = false }).then(() => { expect(radioboxInput[0].checked).toBe(false) expect(radioboxInput[1].checked).toBe(false) expect(radioboxInput[2].checked).toBe(false) expect(radioboxInput[3].checked).toBe(true) expect(radioboxInput[4].checked).toBe(false) vm.test = true }).then(() => { expect(radioboxInput[0].checked).toBe(false) expect(radioboxInput[1].checked).toBe(false) expect(radioboxInput[2].checked).toBe(false) expect(radioboxInput[3].checked).toBe(false) expect(radioboxInput[4].checked).toBe(true) }).then(done) }) // #4521 it('should work with click event', (done) => { const vm = new Vue({ data: { num: 1, checked: 1 }, template: '<div @click="add">' + 'click {{ num }}<input name="test" type="radio" value="1" v-model="checked"/>' + '<input name="test" type="radio" value="2" v-model="checked"/>' + '</div>', methods: { add: function () { this.num++ } } }).$mount() document.body.appendChild(vm.$el) const radios = vm.$el.getElementsByTagName('input') radios[0].click() waitForUpdate(() => { expect(radios[0].checked).toBe(true) expect(radios[1].checked).toBe(false) expect(vm.num).toBe(2) radios[0].click() }).then(() => { expect(radios[0].checked).toBe(true) expect(radios[1].checked).toBe(false) expect(vm.num).toBe(3) radios[1].click() }).then(() => { expect(radios[0].checked).toBe(false) expect(radios[1].checked).toBe(true) expect(vm.num).toBe(4) }).then(done) }) it('should get updated with model when in focus', (done) => { const vm = new Vue({ data: { a: '2' }, template: '<input type="radio" value="1" v-model="a"/>' }).$mount() document.body.appendChild(vm.$el) vm.$el.click() waitForUpdate(() => { expect(vm.$el.checked).toBe(true) vm.a = 2 }).then(() => { expect(vm.$el.checked).toBe(false) }).then(done) }) })
{ "pile_set_name": "Github" }
#! /usr/bin/python3 # -*- coding: utf-8 -*- # @Time : 2019/3/10 5:41 PM # @Author : xiaoliji # @Email : yutian9527@gmail.com """ 数组中的逆序对 >>> InversePairs([7, 5, 6, 4]) 5 """ from collections import deque def InversePairs(data: list) -> int: count = 0 def merge(left, right): nonlocal count q = deque() # 双端队列是为了更快地从头取出 l, r = len(left) - 1, len(right) - 1 while l >= 0 and r >= 0: if left[l] > right[r]: count += r + 1 q.appendleft(left[l]) l -= 1 else: q.appendleft(right[r]) r -= 1 # q.extendleft(left[l:-1:-1] or right[r:-1:-1]) q = left[:l + 1] + right[:r + 1] + list(q) return q def merge_sort(ary: list): if len(ary) <= 1: return ary mid = len(ary) // 2 left = merge_sort(ary[:mid]) right = merge_sort(ary[mid:]) return merge(left, right) merge_sort(data) return count % 1000000007
{ "pile_set_name": "Github" }
# Lines starting with '#' and sections without content # are not displayed by a call to 'details' # [Website] http://nmap.org/ [filters] http://nmap.org/shared/images/p/solarwinds/sw_banner728x90_lem_award2012.png http://nmap.org/shared/images/p/solarwinds/sw_siem_120x90.jpg http://nmap.org/shared/images/p/gfi/120x90_gfi_languard_rebranding.gif http://nmap.org/shared/images/p/Acunetix/120x90_Scanner.gif http://nmap.org/shared/images/p/solarwinds/solarwinds_logo_tag2012.png [other] # Any other details [comments] fanboy
{ "pile_set_name": "Github" }
// stylelint-disable declaration-no-important // Typography @mixin text-emphasis-variant($parent, $color) { #{$parent} { color: $color !important; } a#{$parent} { @include hover-focus { color: darken($color, 10%) !important; } } }
{ "pile_set_name": "Github" }
package jetbrains.mps.lang.feedback; /*Generated by MPS */ import jetbrains.mps.generator.runtime.TemplateModuleInterpreted2; import jetbrains.mps.smodel.language.LanguageRegistry; import jetbrains.mps.smodel.language.LanguageRuntime; import org.jetbrains.annotations.NotNull; import org.jetbrains.mps.openapi.module.SModuleReference; import org.jetbrains.mps.openapi.persistence.PersistenceFacade; import jetbrains.mps.lang.feedback.main.QueriesGenerated; import java.util.Collection; import org.jetbrains.mps.openapi.language.SLanguage; import java.util.Arrays; public class Generator extends TemplateModuleInterpreted2 { public Generator(LanguageRegistry languageRegistry, LanguageRuntime sourceLanguage, jetbrains.mps.smodel.Generator generator) { super(languageRegistry, sourceLanguage, generator); } @NotNull @Override public SModuleReference getModuleReference() { return PersistenceFacade.getInstance().createModuleReference("33a2686b-0855-4a16-a514-d60d6ed64889(jetbrains.mps.lang.feedback#01)"); } @Override protected void fillTemplateModels(TemplateModuleInterpreted2.TemplateModels models) { models.templates("r:a0d961bc-61c4-4d37-af2a-a614ae123460", QueriesGenerated.class); } @Override public Collection<SLanguage> getTargetLanguages() { SLanguage[] rv = new SLanguage[0]; return Arrays.asList(rv); } }
{ "pile_set_name": "Github" }
" I am an abstract class. My subclasses are some functions for a FastTable as search or filter. Description ------------------------------------------------- I can be use in two way. Implicitely I will not appear on the FastTable. Explicitely I can display a Widget on the FastTable. I work with a FTTableMorph. I cannot be use alone. Public API and Key Messages ------------------------------------------------- - #table: aTableMorph is my constructor - #keyStroke: anEvent This is the method that will allow to use me implicitely. With this I will receive an event from the FastTable. - #beExplicite This method will make my functionnality explicit. For example the FTFilterFunction will display a filter field. Example (Should only be create by a FTTableMorph) ------------------------------------------------- FTFunction table: (FTTableMorph with: (1 to: 200)) Internal Representation and Key Implementation Points. ------------------------------------------------- Instance Variables table I am a FTTableMorph that use this function. " Class { #name : #FTFunction, #superclass : #Object, #instVars : [ 'table' ], #category : #'Morphic-Widgets-FastTable-Functions' } { #category : #testing } FTFunction class >> isAbstract [ ^ self = FTFunction ] { #category : #'instance creation' } FTFunction class >> new [ self error: 'Use #table:' ] { #category : #'instance creation' } FTFunction class >> table: aFastTableMorph [ ^ self basicNew initializeTable: aFastTableMorph; yourself ] { #category : #accessing } FTFunction >> beExplicit [ "This method is call by the FastTable if the user want the widget to be explicit. Describe what to do to be explicit." self subclassResponsibility ] { #category : #protocol } FTFunction >> disable [ "I am call when a FastTable disable a function." self subclassResponsibility ] { #category : #initialization } FTFunction >> initializeTable: aTable [ table := aTable. self initialize ] { #category : #testing } FTFunction >> isExplicit [ "See FTFunctionWithField to get an example." self subclassResponsibility ] { #category : #'event handling' } FTFunction >> keyStroke: anEvent [ "If the widget is implicit this method will be call if the user want to communicate with the widget." self subclassResponsibility ] { #category : #initialization } FTFunction >> reset [ self subclassResponsibility ] { #category : #private } FTFunction >> resizeContainerFrom: topLefPoint to: bottomRightPoint [ "I takes takes the bounds of the container of the Fast Table. I return a rectangle that is the bounds of the container." self subclassResponsibility ] { #category : #private } FTFunction >> resizeWidget [ self subclassResponsibility ] { #category : #accessing } FTFunction >> showWidget [ "A FastTable can have a special function. Most of these functions are hidden by default. This method make them explicit. For exemple a FastTable can have a search field. By default the user need to type to show it. With this method the widget should be alwayse visible. My subclasses should describe what to do to be explicit." self subclassResponsibility ] { #category : #accessing } FTFunction >> table [ ^ table ]
{ "pile_set_name": "Github" }