content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
Go
Go
fix a small bug that could improperly cache
3a6d1228a83ba0f9dd96138ccccfa758c0956a62
<ide><path>builder/dispatchers.go <ide> func run(b *Builder, args []string, attributes map[string]bool) error { <ide> func cmd(b *Builder, args []string, attributes map[string]bool) error { <ide> b.Config.Cmd = handleJsonArgs(args, attributes) <ide> <del> if err := b.commit("", b.Config.Cmd, fmt.Sprintf("CMD %v", cmd)); err != nil { <add> if err := b.commit("", b.Config.Cmd, fmt.Sprintf("CMD %v", b.Config.Cmd)); err != nil { <ide> return err <ide> } <ide>
1
Javascript
Javascript
remove support.reliablehiddenoffsets detect
02d7f9aee3fbe9b568702ecc4edd664245dc8917
<ide><path>src/css.js <ide> jQuery(function() { <ide> <ide> if ( jQuery.expr && jQuery.expr.filters ) { <ide> jQuery.expr.filters.hidden = function( elem ) { <del> return ( elem.offsetWidth === 0 && elem.offsetHeight === 0 ) || (!jQuery.support.reliableHiddenOffsets && ((elem.style && elem.style.display) || jQuery.css( elem, "display" )) === "none"); <add> return elem.offsetWidth === 0 && elem.offsetHeight === 0; <ide> }; <ide> <ide> jQuery.expr.filters.visible = function( elem ) { <ide><path>src/support.js <ide> jQuery.support = (function() { <ide> <del> var support, all, a, select, opt, input, fragment, eventName, isSupported, i, <add> var support, all, a, select, opt, input, fragment, eventName, i, <ide> div = document.createElement("div"); <ide> <ide> // Setup <ide> jQuery.support = (function() { <ide> container = document.createElement("div"); <ide> container.style.cssText = "border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px"; <ide> <del> body.appendChild( container ).appendChild( div ); <del> <del> // Support: IE8 <del> // Check if table cells still have offsetWidth/Height when they are set <del> // to display:none and there are still other visible table cells in a <del> // table row; if so, offsetWidth/Height are not reliable for use when <del> // determining if an element has been hidden directly using <del> // display:none (it is still safe to use offsets if a parent element is <del> // hidden; don safety goggles and see bug #4512 for more information). <del> div.innerHTML = "<table><tr><td></td><td>t</td></tr></table>"; <del> tds = div.getElementsByTagName("td"); <del> tds[ 0 ].style.cssText = "padding:0;margin:0;border:0;display:none"; <del> isSupported = ( tds[ 0 ].offsetHeight === 0 ); <del> <del> tds[ 0 ].style.display = ""; <del> tds[ 1 ].style.display = "none"; <del> <del> // Support: IE8 <del> // Check if empty table cells still have offsetWidth/Height <del> support.reliableHiddenOffsets = isSupported && ( tds[ 0 ].offsetHeight === 0 ); <del> <ide> // Check box-sizing and margin behavior <add> body.appendChild( container ).appendChild( div ); <ide> div.innerHTML = ""; <ide> div.style.cssText = "box-sizing:border-box;-moz-box-sizing:border-box;-webkit-box-sizing:border-box;padding:1px;border:1px;display:block;width:4px;margin-top:1%;position:absolute;top:1%;"; <ide> support.boxSizing = ( div.offsetWidth === 4 );
2
Python
Python
read features.msgpack instead of features.pkl
f102ef6b54bbc0ddaf7c093dee7fcacaf667c2ed
<ide><path>spacy/lang/zh/__init__.py <ide> def to_bytes(self, **kwargs): <ide> self.pkuseg_seg.feature_extractor.save(tempdir) <ide> self.pkuseg_seg.model.save(tempdir) <ide> tempdir = Path(tempdir) <del> with open(tempdir / "features.pkl", "rb") as fileh: <add> with open(tempdir / "features.msgpack", "rb") as fileh: <ide> pkuseg_features_b = fileh.read() <ide> with open(tempdir / "weights.npz", "rb") as fileh: <ide> pkuseg_weights_b = fileh.read() <ide> def deserialize_pkuseg_processors(b): <ide> if pkuseg_data["features_b"] and pkuseg_data["weights_b"]: <ide> with tempfile.TemporaryDirectory() as tempdir: <ide> tempdir = Path(tempdir) <del> with open(tempdir / "features.pkl", "wb") as fileh: <add> with open(tempdir / "features.msgpack", "wb") as fileh: <ide> fileh.write(pkuseg_data["features_b"]) <ide> with open(tempdir / "weights.npz", "wb") as fileh: <ide> fileh.write(pkuseg_data["weights_b"])
1
Javascript
Javascript
pass cacheversion param via the new api
828cd78866f69f8dc0e969ce19f6e615a640ee26
<ide><path>local-cli/bundle/buildBundle.js <ide> async function buildBundle( <ide> assetExts: defaultAssetExts.concat(assetExts), <ide> assetRegistryPath: ASSET_REGISTRY_PATH, <ide> blacklistRE: config.getBlacklistRE(), <add> cacheVersion: config.cacheVersion, <ide> dynamicDepsInPackages: config.dynamicDepsInPackages, <ide> enableBabelRCLookup: config.getEnableBabelRCLookup(), <ide> extraNodeModules: config.extraNodeModules,
1
PHP
PHP
remove unused conditional
a3889bc48c89baaca8c274fd3f0f561e8799e3f5
<ide><path>src/Illuminate/Database/Eloquent/Model.php <ide> protected function performUpdate(Builder $query) <ide> // First we need to create a fresh query instance and touch the creation and <ide> // update timestamp on the model which are maintained by us for developer <ide> // convenience. Then we will just continue saving the model instances. <del> if ($this->usesTimestamps() && $this->shouldTouch()) { <add> if ($this->usesTimestamps()) { <ide> $this->updateTimestamps(); <ide> } <ide>
1
Javascript
Javascript
use correct comments
87a7690afa8c56d2aee2f902b49e3932bb3a2449
<ide><path>src/manipulation.js <ide> jQuery.extend({ <ide> inPage = jQuery.contains( elem.ownerDocument, elem ), <ide> clone = elem.cloneNode( true ); <ide> <del> // Fix IE cloning issues <add> // Support: IE >=9 <add> // Fix Cloning issues <ide> if ( !jQuery.support.noCloneChecked && ( elem.nodeType === 1 || elem.nodeType === 11 ) && !jQuery.isXMLDoc( elem ) ) { <ide> <ide> // We eschew Sizzle here for performance reasons: http://jsperf.com/getall-vs-sizzle/2 <ide> destElements = getAll( clone ); <ide> srcElements = getAll( elem ); <ide> <del> for ( i = 0; (node = srcElements[ i ]) != null; ++i ) { <add> for ( i = 0; ( node = srcElements[ i ] ) != null; ++i ) { <ide> // Ensure that the destination node is not null; Fixes #9587 <ide> if ( destElements[ i ] ) { <ide> fixCloneNodeIssues( node, destElements[ i ] ); <ide> jQuery.extend({ <ide> <ide> // Convert html into DOM nodes <ide> } else { <add> <ide> // Ensure a safe container <ide> container = container || context.createDocumentFragment(); <ide> tmp = tmp || container.appendChild( context.createElement("div") ); <ide> jQuery.extend({ <ide> <ide> core_push.apply( ret, tmp.childNodes ); <ide> <del> // Fix #12392 for WebKit and IE > 9 <add> // Fix #12392 - remove childNodes parent <ide> tmp.textContent = ""; <ide> <ide> // Remember the top-level container for proper cleanup <ide> function fixCloneNodeIssues( src, dest ) { <ide> dest.outerHTML = src.outerHTML; <ide> } <ide> <del> // This path appears unavoidable for IE9. When cloning an object <del> // element in IE9, the outerHTML strategy above is not sufficient. <add> // Support: IE 9 <add> // When cloning an object the outerHTML strategy above is not sufficient. <ide> // If the src has innerHTML and the destination does not, <ide> // copy the src.innerHTML into the dest.innerHTML. #10324 <ide> if ( src.innerHTML && !jQuery.trim( dest.innerHTML ) ) { <ide> dest.innerHTML = src.innerHTML; <ide> } <ide> <del> // IE9-10 fails to persist the checked state of a cloned checkbox or radio button. <add> // Support: IE >= 9 <add> // Fails to persist the checked state of a cloned checkbox or radio button. <ide> } else if ( nodeName === "input" && manipulation_rcheckableType.test( src.type ) ) { <ide> dest.checked = src.checked; <ide> <del> // IE9-10 fails to return the selected option to the default selected <del> // state when cloning options <add> // Support: IE >= 9 <add> // Fails to return the selected option to the default selected state when cloning options <ide> } else if ( nodeName === "input" || nodeName === "textarea" ) { <ide> dest.defaultValue = src.defaultValue; <ide> }
1
PHP
PHP
add error message when not using php 5.4
814669edf3134eae032eb6bb4b9ba863bdf150ac
<ide><path>src/Illuminate/Foundation/Console/ServeCommand.php <ide> class ServeCommand extends Command { <ide> */ <ide> public function fire() <ide> { <add> // The development server feature was added in PHP 5.4. <add> if (version_compare(PHP_VERSION, '5.4.0', '<')) <add> { <add> $this->error("PHP 5.4 is required to start the development server"); <add> return; <add> } <add> <ide> chdir($this->laravel['path.base']); <ide> <ide> $host = $this->input->getOption('host');
1
Javascript
Javascript
remove unnecessary template string
7cfd5f169c9f9f7765df190de2890e93331bb768
<ide><path>lib/internal/util/inspect.js <ide> function strEscape(str) { <ide> continue; <ide> } <ide> } <del> result += `${StringPrototypeSlice(str, last, i)}${`\\u${point.toString(16)}`}`; <add> result += `${StringPrototypeSlice(str, last, i)}\\u${point.toString(16)}`; <ide> last = i + 1; <ide> } <ide> }
1
PHP
PHP
remember bloc doc
542312858b60a9c471b1516a6c962e2f8ed3ef37
<ide><path>src/Cache/Cache.php <ide> public static function enabled(): bool <ide> * <ide> * ``` <ide> * $results = Cache::remember('all_articles', function () { <del> * return $this->find('all'); <add> * return $this->find('all')->toArray(); <ide> * }); <ide> * ``` <ide> *
1
Javascript
Javascript
use the correct terminology seq/iter/kind
9a50e89b259bab0394871e5657a44cc890860ee5
<ide><path>dist/immutable.js <ide> var $Iterable = Iterable; <ide> has: function(searchKey) { <ide> return this.get(searchKey, NOT_SET) !== NOT_SET; <ide> }, <del> isSubset: function(seq) { <del> seq = typeof seq.contains === 'function' ? seq : $Iterable(seq); <add> isSubset: function(iter) { <add> iter = typeof iter.contains === 'function' ? iter : $Iterable(iter); <ide> return this.every((function(value) { <del> return seq.contains(value); <add> return iter.contains(value); <ide> })); <ide> }, <del> isSuperset: function(seq) { <del> return seq.isSubset(this); <add> isSuperset: function(iter) { <add> return iter.isSubset(this); <ide> }, <ide> keySeq: function() { <ide> return this.toSeq().map(keyMapper).toIndexedSeq(); <ide> var $Iterable = Iterable; <ide> return this.maxBy(valueMapper, comparator); <ide> }, <ide> maxBy: function(mapper, comparator) { <add> var $__0 = this; <ide> comparator = comparator || defaultComparator; <del> var seq = this; <del> var maxEntry = seq.entrySeq().reduce((function(max, next) { <del> return comparator(mapper(next[1], next[0], seq), mapper(max[1], max[0], seq)) > 0 ? next : max; <add> var maxEntry = this.entrySeq().reduce((function(max, next) { <add> return comparator(mapper(next[1], next[0], $__0), mapper(max[1], max[0], $__0)) > 0 ? next : max; <ide> })); <ide> return maxEntry && maxEntry[1]; <ide> }, <ide> min: function(comparator) { <ide> return this.minBy(valueMapper, comparator); <ide> }, <ide> minBy: function(mapper, comparator) { <add> var $__0 = this; <ide> comparator = comparator || defaultComparator; <del> var seq = this; <del> var minEntry = seq.entrySeq().reduce((function(min, next) { <del> return comparator(mapper(next[1], next[0], seq), mapper(min[1], min[0], seq)) < 0 ? next : min; <add> var minEntry = this.entrySeq().reduce((function(min, next) { <add> return comparator(mapper(next[1], next[0], $__0), mapper(min[1], min[0], $__0)) < 0 ? next : min; <ide> })); <ide> return minEntry && minEntry[1]; <ide> }, <ide> var $Iterable = Iterable; <ide> return this.skipWhile(not(predicate), context); <ide> }, <ide> sortBy: function(mapper, comparator) { <add> var $__0 = this; <ide> comparator = comparator || defaultComparator; <del> var seq = this; <del> return reify(this, new ArraySequence(seq.entrySeq().entrySeq().toArray().sort((function(a, b) { <del> return comparator(mapper(a[1][1], a[1][0], seq), mapper(b[1][1], b[1][0], seq)) || a[0] - b[0]; <add> return reify(this, new ArraySequence(this.entrySeq().entrySeq().toArray().sort((function(a, b) { <add> return comparator(mapper(a[1][1], a[1][0], $__0), mapper(b[1][1], b[1][0], $__0)) || a[0] - b[0]; <ide> }))).fromEntrySeq().valueSeq().fromEntrySeq()); <ide> }, <ide> take: function(amount) { <ide> var IndexedIterable = function IndexedIterable(value) { <ide> return this.get(-1); <ide> }, <ide> skip: function(amount) { <del> var seq = this; <del> var skipSeq = skipFactory(seq, amount, false); <del> if (skipSeq !== seq) { <add> var iter = this; <add> var skipSeq = skipFactory(iter, amount, false); <add> if (isLazy(iter) && skipSeq !== iter) { <ide> skipSeq.get = function(index, notSetValue) { <ide> index = wrapIndex(this, index); <del> return index >= 0 ? seq.get(index + amount, notSetValue) : notSetValue; <add> return index >= 0 ? iter.get(index + amount, notSetValue) : notSetValue; <ide> }; <ide> } <ide> return reify(this, skipSeq); <ide> var IndexedIterable = function IndexedIterable(value) { <ide> return reify(this, skipWhileFactory(this, predicate, context, false)); <ide> }, <ide> sortBy: function(mapper, comparator) { <add> var $__0 = this; <ide> comparator = comparator || defaultComparator; <del> var seq = this; <ide> return reify(this, new ArraySequence(this.entrySeq().toArray().sort((function(a, b) { <del> return comparator(mapper(a[1], a[0], seq), mapper(b[1], b[0], seq)) || a[0] - b[0]; <add> return comparator(mapper(a[1], a[0], $__0), mapper(b[1], b[0], $__0)) || a[0] - b[0]; <ide> }))).fromEntrySeq().valueSeq()); <ide> }, <ide> take: function(amount) { <del> var seq = this; <del> var takeSeq = takeFactory(seq, amount); <del> if (takeSeq !== seq) { <add> var iter = this; <add> var takeSeq = takeFactory(iter, amount); <add> if (isLazy(iter) && takeSeq !== iter) { <ide> takeSeq.get = function(index, notSetValue) { <ide> index = wrapIndex(this, index); <del> return index >= 0 && index < amount ? seq.get(index, notSetValue) : notSetValue; <add> return index >= 0 && index < amount ? iter.get(index, notSetValue) : notSetValue; <ide> }; <ide> } <ide> return reify(this, takeSeq); <ide> Iterable.Keyed = KeyedIterable; <ide> Iterable.Set = SetIterable; <ide> Iterable.Indexed = IndexedIterable; <ide> Iterable.Iterator = Iterator; <del>function reify(kind, seq) { <del> return isLazy(kind) ? seq : kind.constructor(seq); <add>function reify(iter, seq) { <add> return isLazy(iter) ? seq : iter.constructor(seq); <ide> } <ide> function valueMapper(v) { <ide> return v; <ide><path>dist/immutable.min.js <ide> var s,u=(0===r?t.hash:t.hash>>>r)&$e,a=(0===r?n:n>>>r)&$e,o=u===a?[pe(t,e,r+He,n <ide> }function Ke(t,e){var r=Object.create(Cn);return r.size=t?t.size:0,r._map=t,r.__ownerID=e,r}function Pe(t,e,r,n){var i=Object.create(En.prototype);return i.size=t?t.size:0,i._map=t,i._vector=e,i.__ownerID=r,i.__hash=n,i}function We(t,e,r){var n=t._map,i=t._vector,s=n.get(e),u=void 0!==s,a=r===tr;if(!u&&a||u&&r===i.get(s)[1])return t;u||(s=i.size);var o=a?n.remove(e):u?n:n.set(e,s),h=a?i.remove(s):i.set(s,[e,r]);return t.__ownerID?(t.size=o.size,t._map=o,t._vector=h,t.__hash=void 0,t):Pe(o,h)}function Be(t,e,r){var n=Object.create(Object.getPrototypeOf(t));return n._map=e,n.__ownerID=r,n}function Le(t,e){return e?Je(e,t,"",{"":t}):Te(t)}function Je(t,e,r,n){return e&&(Array.isArray(e)||e.constructor===Object)?t.call(n,r,wr(e).map(function(r,n){return Je(t,r,n,e)})):e}function Te(t){if(t&&"object"==typeof t){if(Array.isArray(t))return wr(t).map(Te).toVector();if(t.constructor===Object)return wr(t).map(Te).toMap()}return t}function Ve(t,e,r,n){4>arguments.length&&(n=t.getIn(e));var i=M(n)?n.size:void 0,s=C(n)?Xn:Nn;return new s(t,e,r,i)}function Ne(t,e,r){return M(r)?Ye(t,e,r):r}function Ye(t,e,r){return Ve(t._rootData,t._keyPath.concat(e),t._onChange,r)}function Xe(t,e,r){var n=t._rootData.updateIn(t._keyPath,r?en.empty():void 0,e),i=t._keyPath||[];return t._onChange&&t._onChange.call(void 0,n,t._rootData,r?i.concat(r):i),Ve(n,t._keyPath,t._onChange)}var Ze=Object,Fe={};Fe.createClass=t,Fe.superCall=e,Fe.defaultSuperCall=r;var Ge="delete",He=5,Qe=1<<He,$e=Qe-1,tr={},er={value:!1},rr={value:!1},nr=function(){try{return Object.defineProperty({},"x",{}),!0}catch(t){return!1}}(),ir="function"==typeof WeakMap&&new WeakMap,sr=2147483647,ur=0,ar="__immutablehash__";"function"==typeof Symbol&&(ar=Symbol(ar));var or=16,hr=255,cr=0,fr={},_r=0,vr=1,lr=2,pr="@@iterator",dr="function"==typeof Symbol&&Symbol.iterator,yr=dr||pr,mr=function(t){this.next=t};Fe.createClass(mr,{toString:function(){return"[Iterator]"}},{});var gr=mr.prototype;gr.inspect=gr.toSource=function(){return""+this},gr[yr]=function(){return this};var wr=function(t){return M(t)?t:Cr.apply(void 0,arguments) <ide> },zr=wr;Fe.createClass(wr,{toArray:function(){h(this.size);var t=Array(this.size||0);return this.valueSeq().__iterate(function(e,r){t[r]=e}),t},toIndexedSeq:function(){return new Vr(this)},toJS:function(){return this.toSeq().map(function(t){return t&&"function"==typeof t.toJS?t.toJS():t}).__toJS()},toKeyedSeq:function(){return new Nr(this,!0)},toMap:function(){return h(this.size),en(this.toKeyedSeq())},toObject:function(){h(this.size);var t={};return this.__iterate(function(e,r){t[r]=e}),t},toOrderedMap:function(){return h(this.size),En(this.toKeyedSeq())},toSet:function(){return h(this.size),Mn(this)},toSetSeq:function(){return new Yr(this,!0)},toSeq:function(){return C(this)?this.toIndexedSeq():k(this)?this.toKeyedSeq():this.toSetSeq()},toStack:function(){return h(this.size),In(this)},toVector:function(){return h(this.size),yn(this)},toString:function(){return"[Iterable]"},__toString:function(t,e){return 0===this.size?t+e:t+" "+this.toSeq().map(this.__toStringMapper).join(", ")+" "+e},concat:function(){for(var t=[],e=0;arguments.length>e;e++)t[e]=arguments[e];return E(this,ne(this,t,!0))},contains:function(t){return this.some(function(e){return n(e,t)})},entries:function(){return this.__iterator(lr)},every:function(t,e){var r=!0;return this.__iterate(function(n,i,s){return t.call(e,n,i,s)?void 0:(r=!1,!1)}),r},filter:function(t,e){return E(this,H(this,t,e,!0))},find:function(t,e,r){var n=r;return this.__iterate(function(r,i,s){return t.call(e,r,i,s)?(n=r,!1):void 0}),n},forEach:function(t,e){return this.__iterate(e?t.bind(e):t)},join:function(t){t=void 0!==t?""+t:",";var e="",r=!0;return this.__iterate(function(n){r?r=!1:e+=t,e+=null!==n&&void 0!==n?n:""}),e},keys:function(){return this.__iterator(_r)},map:function(t,e){return E(this,F(this,t,e))},reduce:function(t,e,r){var n,i;return 2>arguments.length?i=!0:n=e,this.__iterate(function(e,s,u){i?(i=!1,n=e):n=t.call(r,n,e,s,u)}),n},reduceRight:function(){var t=this.toKeyedSeq().reverse();return t.reduce.apply(t,arguments)},reverse:function(){return E(this,G(this,!0)) <ide> },slice:function(t,e){if(v(t,e,this.size))return this;var r=l(t,this.size),n=p(e,this.size);if(r!==r||n!==n)return this.toSeq().cacheResult().slice(t,e);var i=0===r?this:this.skip(r);return E(this,void 0===n||n===this.size?i:i.take(n-r))},some:function(t,e){return!this.every(U(t),e)},sort:function(t){return this.sortBy(A,t)},values:function(){return this.__iterator(vr)},butLast:function(){return this.slice(0,-1)},count:function(t,e){return c(t?this.toSeq().filter(t,e):this)},countBy:function(t,e){var r=this,n={},i=[];return this.__iterate(function(s,u){var a=t.call(e,s,u,r),o=y(a);n.hasOwnProperty(o)?i[n[o]][1]++:(n[o]=i.length,i.push([a,1]))}),new Wr(i).fromEntrySeq()},equals:function(t){if(this===t)return!0;if(!t||"function"!=typeof t.equals)return!1;if(void 0!==this.size&&void 0!==t.size){if(this.size!==t.size)return!1;if(0===this.size&&0===t.size)return!0}return void 0!==this.__hash&&void 0!==t.__hash&&this.__hash!==t.__hash?!1:this.__deepEquals(t)},__deepEquals:function(t){var e=this.entries();return"function"==typeof t.every&&t.every(function(t,r){var i=e.next().value;return i&&n(i[0],r)&&n(i[1],t)})&&e.next().done},entrySeq:function(){var t=this;if(t._cache)return new Wr(t._cache);var e=t.toSeq().map(j).toIndexedSeq();return e.fromEntrySeq=function(){return t.toSeq()},e},filterNot:function(t,e){return this.filter(U(t),e)},findKey:function(t,e){var r;return this.__iterate(function(n,i,s){return t.call(e,n,i,s)?(r=i,!1):void 0}),r},findLast:function(t,e,r){return this.toKeyedSeq().reverse().find(t,e,r)},findLastKey:function(t,e){return this.toKeyedSeq().reverse().findKey(t,e)},first:function(){return this.find(_)},flatMap:function(t,e){var r=this,n=W(this);return E(this,this.toSeq().map(function(i,s){return n(t.call(e,i,s,r))}).flatten(!0))},flatten:function(t){return E(this,ie(this,t,!0))},fromEntrySeq:function(){return new Xr(this)},get:function(t,e){return this.find(function(e,r){return n(r,t)},void 0,e)},getIn:function(t,e){var r=this;if(t)for(var n=0;t.length>n;n++)if(r=r&&r.get?r.get(t[n],tr):tr,r===tr)return e; <del>return r},groupBy:function(t,e){return Q(this,t,e,!0)},has:function(t){return this.get(t,tr)!==tr},isSubset:function(t){return t="function"==typeof t.contains?t:zr(t),this.every(function(e){return t.contains(e)})},isSuperset:function(t){return t.isSubset(this)},keySeq:function(){return this.toSeq().map(R).toIndexedSeq()},last:function(){return this.toSeq().reverse().first()},max:function(t){return this.maxBy(A,t)},maxBy:function(t,e){e=e||P;var r=this,n=r.entrySeq().reduce(function(n,i){return e(t(i[1],i[0],r),t(n[1],n[0],r))>0?i:n});return n&&n[1]},min:function(t){return this.minBy(A,t)},minBy:function(t,e){e=e||P;var r=this,n=r.entrySeq().reduce(function(n,i){return e(t(i[1],i[0],r),t(n[1],n[0],r))<0?i:n});return n&&n[1]},rest:function(){return this.slice(1)},skip:function(t){return E(this,ee(this,t,!0))},skipLast:function(t){return E(this,this.toSeq().reverse().skip(t).reverse())},skipWhile:function(t,e){return E(this,re(this,t,e,!0))},skipUntil:function(t,e){return this.skipWhile(U(t),e)},sortBy:function(t,e){e=e||P;var r=this;return E(this,new Wr(r.entrySeq().entrySeq().toArray().sort(function(n,i){return e(t(n[1][1],n[1][0],r),t(i[1][1],i[1][0],r))||n[0]-i[0]})).fromEntrySeq().valueSeq().fromEntrySeq())},take:function(t){return E(this,$(this,t))},takeLast:function(t){return E(this,this.toSeq().reverse().take(t).reverse())},takeWhile:function(t,e){return E(this,te(this,t,e))},takeUntil:function(t,e){return this.takeWhile(U(t),e)},valueSeq:function(){return this.toIndexedSeq()},hashCode:function(){return this.__hash||(this.__hash=1/0===this.size?0:this.reduce(function(t,e,r){return t+(y(e)^(e===r?0:y(r)))&sr},0))}},{});var Sr="",qr="",Ir="",br=wr.prototype;br[Sr]=!0,br[yr]=br.values,br.toJSON=br.toJS,br.__toJS=br.toArray,br.__toStringMapper=K,br.inspect=br.toSource=function(){return""+this},br.chain=br.flatMap,function(){try{Object.defineProperty(br,"length",{get:function(){var t;try{throw Error()}catch(e){t=e.stack}return-1===t.indexOf("_wrapObject")?(console&&console.warn&&console.warn("iterable.length has been deprecated, use iterable.size or iterable.count(). This warning will become a silent error in a future version. "+t),this.size):void 0 <add>return r},groupBy:function(t,e){return Q(this,t,e,!0)},has:function(t){return this.get(t,tr)!==tr},isSubset:function(t){return t="function"==typeof t.contains?t:zr(t),this.every(function(e){return t.contains(e)})},isSuperset:function(t){return t.isSubset(this)},keySeq:function(){return this.toSeq().map(R).toIndexedSeq()},last:function(){return this.toSeq().reverse().first()},max:function(t){return this.maxBy(A,t)},maxBy:function(t,e){var r=this;e=e||P;var n=this.entrySeq().reduce(function(n,i){return e(t(i[1],i[0],r),t(n[1],n[0],r))>0?i:n});return n&&n[1]},min:function(t){return this.minBy(A,t)},minBy:function(t,e){var r=this;e=e||P;var n=this.entrySeq().reduce(function(n,i){return e(t(i[1],i[0],r),t(n[1],n[0],r))<0?i:n});return n&&n[1]},rest:function(){return this.slice(1)},skip:function(t){return E(this,ee(this,t,!0))},skipLast:function(t){return E(this,this.toSeq().reverse().skip(t).reverse())},skipWhile:function(t,e){return E(this,re(this,t,e,!0))},skipUntil:function(t,e){return this.skipWhile(U(t),e)},sortBy:function(t,e){var r=this;return e=e||P,E(this,new Wr(this.entrySeq().entrySeq().toArray().sort(function(n,i){return e(t(n[1][1],n[1][0],r),t(i[1][1],i[1][0],r))||n[0]-i[0]})).fromEntrySeq().valueSeq().fromEntrySeq())},take:function(t){return E(this,$(this,t))},takeLast:function(t){return E(this,this.toSeq().reverse().take(t).reverse())},takeWhile:function(t,e){return E(this,te(this,t,e))},takeUntil:function(t,e){return this.takeWhile(U(t),e)},valueSeq:function(){return this.toIndexedSeq()},hashCode:function(){return this.__hash||(this.__hash=1/0===this.size?0:this.reduce(function(t,e,r){return t+(y(e)^(e===r?0:y(r)))&sr},0))}},{});var Sr="",qr="",Ir="",br=wr.prototype;br[Sr]=!0,br[yr]=br.values,br.toJSON=br.toJS,br.__toJS=br.toArray,br.__toStringMapper=K,br.inspect=br.toSource=function(){return""+this},br.chain=br.flatMap,function(){try{Object.defineProperty(br,"length",{get:function(){var t;try{throw Error()}catch(e){t=e.stack}return-1===t.indexOf("_wrapObject")?(console&&console.warn&&console.warn("iterable.length has been deprecated, use iterable.size or iterable.count(). This warning will become a silent error in a future version. "+t),this.size):void 0 <ide> }})}catch(t){}}();var xr=function(t){return k(t)?t:Er.apply(void 0,arguments)};Fe.createClass(xr,{flip:function(){return E(this,Z(this))},mapEntries:function(t,e){var r=this,n=0;return E(this,this.toSeq().map(function(i,s){return t.call(e,[s,i],n++,r)}).fromEntrySeq())},mapKeys:function(t,e){var r=this;return E(this,this.toSeq().flip().map(function(n,i){return t.call(e,n,i,r)}).flip())}},{},wr);var Dr=xr.prototype;Dr[qr]=!0,Dr[yr]=br.entries,Dr.__toJS=br.toObject,Dr.__toStringMapper=function(t,e){return e+": "+K(t)};var Mr=function(t){return M(t)&&!O(t)?t:Rr.apply(void 0,arguments)};Fe.createClass(Mr,{get:function(t,e){return this.has(t)?t:e},contains:function(t){return this.has(t)},keySeq:function(){return this.valueSeq()}},{},wr),Mr.prototype.has=br.contains;var kr=function(t){return C(t)?t:Ur.apply(void 0,arguments)};Fe.createClass(kr,{toKeyedSeq:function(){return new Nr(this,!1)},concat:function(){for(var t=[],e=0;arguments.length>e;e++)t[e]=arguments[e];return E(this,ne(this,t,!1))},filter:function(t,e){return E(this,H(this,t,e,!1))},findIndex:function(t,e){var r=this.findKey(t,e);return void 0===r?-1:r},indexOf:function(t){return this.findIndex(function(e){return n(e,t)})},lastIndexOf:function(t){return this.toKeyedSeq().reverse().indexOf(t)},reverse:function(){return E(this,G(this,!1))},splice:function(t,e){var r=arguments.length;if(e=Math.max(0|e,0),0===r||2===r&&!e)return this;t=l(t,this.size);var n=this.slice(0,t);return E(this,1===r?n:n.concat(o(arguments,2),this.slice(t+e)))},findLastIndex:function(t,e){return this.toKeyedSeq().reverse().findIndex(t,e)},first:function(){return this.get(0)},flatten:function(t){return E(this,ie(this,t,!1))},get:function(t,e){return t=f(this,t),0>t||1/0===this.size||void 0!==this.size&&t>this.size?e:this.find(function(e,r){return r===t},void 0,e)},groupBy:function(t,e){return Q(this,t,e,!1)},has:function(t){return t=f(this,t),t>=0&&(void 0!==this.size?1/0===this.size||this.size>t:-1!==this.indexOf(t))},interpose:function(t){return E(this,se(this,t))},last:function(){return this.get(-1) <del>},skip:function(t){var e=this,r=ee(e,t,!1);return r!==e&&(r.get=function(r,n){return r=f(this,r),r>=0?e.get(r+t,n):n}),E(this,r)},skipWhile:function(t,e){return E(this,re(this,t,e,!1))},sortBy:function(t,e){e=e||P;var r=this;return E(this,new Wr(this.entrySeq().toArray().sort(function(n,i){return e(t(n[1],n[0],r),t(i[1],i[0],r))||n[0]-i[0]})).fromEntrySeq().valueSeq())},take:function(t){var e=this,r=$(e,t);return r!==e&&(r.get=function(r,n){return r=f(this,r),r>=0&&t>r?e.get(r,n):n}),E(this,r)}},{},wr),kr.prototype[Ir]=!0,wr.isIterable=M,wr.isKeyed=k,wr.isIndexed=C,wr.isAssociative=O,wr.Keyed=xr,wr.Set=Mr,wr.Indexed=kr,wr.Iterator=mr;var Cr=function(t){return 0===arguments.length?J():(M(t)?t:T(t,!1)).toSeq()},Or=Cr;Fe.createClass(Cr,{toSeq:function(){return this},toString:function(){return this.__toString("Seq {","}")},cacheResult:function(){return!this._cache&&this.__iterateUncached&&(h(this.size),this._cache=this.entrySeq().toArray(),void 0===this.size&&(this.size=this._cache.length)),this},__iterate:function(t,e){return Y(this,t,e,!0)},__iterator:function(t,e){return X(this,t,e,!0)}},{of:function(){return Or(arguments)}},wr);var Er=function(t){return 0===arguments.length?J().toKeyedSeq():(M(t)||(t=T(t,!1)),k(t)?t.toSeq():t.fromEntrySeq())},Ar=Er;Fe.createClass(Er,{toKeyedSeq:function(){return this},toSeq:function(){return this}},{empty:function(){return Ar()},of:function(){return Ar(arguments)}},Cr),B(Er,xr.prototype);var Rr=function(t){return 0===arguments.length?J().toSetSeq():(M(t)?t:T(t,!1)).toSetSeq()},jr=Rr;Fe.createClass(Rr,{toSetSeq:function(){return this}},{empty:function(){return jr()},of:function(){return jr(arguments)}},Cr),B(Rr,Mr.prototype);var Ur=function(t){return 0===arguments.length?J():(M(t)?t:T(t,!1)).toIndexedSeq()},Kr=Ur;Fe.createClass(Ur,{toIndexedSeq:function(){return this},toString:function(){return this.__toString("Seq [","]")},__iterate:function(t,e){return Y(this,t,e,!1)},__iterator:function(t,e){return X(this,t,e,!1)}},{empty:function(){return Kr()},of:function(){return Kr(arguments) <add>},skip:function(t){var e=this,r=ee(e,t,!1);return L(e)&&r!==e&&(r.get=function(r,n){return r=f(this,r),r>=0?e.get(r+t,n):n}),E(this,r)},skipWhile:function(t,e){return E(this,re(this,t,e,!1))},sortBy:function(t,e){var r=this;return e=e||P,E(this,new Wr(this.entrySeq().toArray().sort(function(n,i){return e(t(n[1],n[0],r),t(i[1],i[0],r))||n[0]-i[0]})).fromEntrySeq().valueSeq())},take:function(t){var e=this,r=$(e,t);return L(e)&&r!==e&&(r.get=function(r,n){return r=f(this,r),r>=0&&t>r?e.get(r,n):n}),E(this,r)}},{},wr),kr.prototype[Ir]=!0,wr.isIterable=M,wr.isKeyed=k,wr.isIndexed=C,wr.isAssociative=O,wr.Keyed=xr,wr.Set=Mr,wr.Indexed=kr,wr.Iterator=mr;var Cr=function(t){return 0===arguments.length?J():(M(t)?t:T(t,!1)).toSeq()},Or=Cr;Fe.createClass(Cr,{toSeq:function(){return this},toString:function(){return this.__toString("Seq {","}")},cacheResult:function(){return!this._cache&&this.__iterateUncached&&(h(this.size),this._cache=this.entrySeq().toArray(),void 0===this.size&&(this.size=this._cache.length)),this},__iterate:function(t,e){return Y(this,t,e,!0)},__iterator:function(t,e){return X(this,t,e,!0)}},{of:function(){return Or(arguments)}},wr);var Er=function(t){return 0===arguments.length?J().toKeyedSeq():(M(t)||(t=T(t,!1)),k(t)?t.toSeq():t.fromEntrySeq())},Ar=Er;Fe.createClass(Er,{toKeyedSeq:function(){return this},toSeq:function(){return this}},{empty:function(){return Ar()},of:function(){return Ar(arguments)}},Cr),B(Er,xr.prototype);var Rr=function(t){return 0===arguments.length?J().toSetSeq():(M(t)?t:T(t,!1)).toSetSeq()},jr=Rr;Fe.createClass(Rr,{toSetSeq:function(){return this}},{empty:function(){return jr()},of:function(){return jr(arguments)}},Cr),B(Rr,Mr.prototype);var Ur=function(t){return 0===arguments.length?J():(M(t)?t:T(t,!1)).toIndexedSeq()},Kr=Ur;Fe.createClass(Ur,{toIndexedSeq:function(){return this},toString:function(){return this.__toString("Seq [","]")},__iterate:function(t,e){return Y(this,t,e,!1)},__iterator:function(t,e){return X(this,t,e,!1)}},{empty:function(){return Kr()},of:function(){return Kr(arguments) <ide> }},Cr),B(Ur,kr.prototype),Cr.empty=J,Cr.isLazy=L,Cr.Keyed=Er,Cr.Set=Rr,Cr.Indexed=Ur;var Pr="";Cr.prototype[Pr]=!0;var Wr=function(t){this._array=t,this.size=t.length};Fe.createClass(Wr,{get:function(t,e){return this.has(t)?this._array[f(this,t)]:e},__iterate:function(t,e){for(var r=this._array,n=r.length-1,i=0;n>=i;i++)if(t(r[e?n-i:i],i,this)===!1)return i+1;return i},__iterator:function(t,e){var r=this._array,n=r.length-1,i=0;return new mr(function(){return i>n?q():S(t,i,r[e?n-i++:i++])})}},{},Ur);var Br=function(t){var e=Object.keys(t);this._object=t,this._keys=e,this.size=e.length};Fe.createClass(Br,{get:function(t,e){return void 0===e||this.has(t)?this._object[t]:e},has:function(t){return this._object.hasOwnProperty(t)},__iterate:function(t,e){for(var r=this._object,n=this._keys,i=n.length-1,s=0;i>=s;s++){var u=n[e?i-s:s];if(t(r[u],u,this)===!1)return s+1}return s},__iterator:function(t,e){var r=this._object,n=this._keys,i=n.length-1,s=0;return new mr(function(){var u=n[e?i-s:s];return s++>i?q():S(t,u,r[u])})}},{},Er);var Lr=function(t){this._iterable=t,this.size=t.length||t.size};Fe.createClass(Lr,{__iterateUncached:function(t,e){if(e)return this.cacheResult().__iterate(t,e);var r=this._iterable,n=x(r),i=0;if(b(n))for(var s;!(s=n.next()).done&&t(s.value,i++,this)!==!1;);return i},__iteratorUncached:function(t,e){if(e)return this.cacheResult().__iterator(t,e);var r=this._iterable,n=x(r);if(!b(n))return new mr(q);var i=0;return new mr(function(){var e=n.next();return e.done?e:S(t,i++,e.value)})}},{},Ur);var Jr=function(t){this._iterator=t,this._iteratorCache=[]};Fe.createClass(Jr,{__iterateUncached:function(t,e){if(e)return this.cacheResult().__iterate(t,e);for(var r=this._iterator,n=this._iteratorCache,i=0;n.length>i;)if(t(n[i],i++,this)===!1)return i;for(var s;!(s=r.next()).done;){var u=s.value;if(n[i]=u,t(u,i++,this)===!1)break}return i},__iteratorUncached:function(t,e){if(e)return this.cacheResult().__iterator(t,e);var r=this._iterator,n=this._iteratorCache,i=0;return new mr(function(){if(i>=n.length){var e=r.next(); <ide> if(e.done)return e;n[i]=e.value}return S(t,i,n[i++])})}},{},Ur);var Tr,Vr=function(t){this._seq=t,this.size=t.size};Fe.createClass(Vr,{contains:function(t){return this._seq.contains(t)},cacheResult:function(){return this._seq.cacheResult(),this.size=this._seq.size,this},__iterate:function(t,e){var r=this,n=0;return this._seq.__iterate(function(e){return t(e,n++,r)},e)},__iterator:function(t,e){var r=this._seq.__iterator(vr,e),n=0;return new mr(function(){var e=r.next();return e.done?e:S(t,n++,e.value,e)})}},{},Ur);var Nr=function(t,e){this._seq=t,this._useKeys=e,this.size=t.size};Fe.createClass(Nr,{get:function(t,e){return this._seq.get(t,e)},has:function(t){return this._seq.has(t)},valueSeq:function(){return this._seq.valueSeq()},reverse:function(){var t=this,e=G(this,!0);return this._useKeys||(e.valueSeq=function(){return t._seq.toSeq().reverse()}),e},map:function(t,e){var r=this,n=F(this,t,e);return this._useKeys||(n.valueSeq=function(){return r._seq.toSeq().map(t,e)}),n},cacheResult:function(){return this._seq.cacheResult(),this.size=this._seq.size,this},__iterate:function(t,e){var r,n=this;return this._seq.__iterate(this._useKeys?function(e,r){return t(e,r,n)}:(r=e?ae(this):0,function(i){return t(i,e?--r:r++,n)}),e)},__iterator:function(t,e){if(this._useKeys)return this._seq.__iterator(t,e);var r=this._seq.__iterator(vr,e),n=e?ae(this):0;return new mr(function(){var i=r.next();return i.done?i:S(t,e?--n:n++,i.value,i)})}},{},Er);var Yr=function(t){this._seq=t,this.size=t.size};Fe.createClass(Yr,{has:function(t){return this._seq.contains(t)},cacheResult:function(){return this._seq.cacheResult(),this.size=this._seq.size,this},__iterate:function(t,e){var r=this;return this._seq.__iterate(function(e){return t(e,e,r)},e)},__iterator:function(t,e){var r=this._seq.__iterator(vr,e);return new mr(function(){var e=r.next();return e.done?e:S(t,e.value,e.value,e)})}},{},Rr);var Xr=function(t){this._seq=t,this.size=t.size};Fe.createClass(Xr,{entrySeq:function(){return this._seq.toSeq()},cacheResult:function(){return this._seq.cacheResult(),this.size=this._seq.size,this <ide> },__iterate:function(t,e){var r=this;return this._seq.__iterate(function(e){return e?(ue(e),t(e[1],e[0],r)):void 0},e)},__iterator:function(t,e){var r=this._seq.__iterator(vr,e);return new mr(function(){for(;;){var e=r.next();if(e.done)return e;var n=e.value;if(n)return ue(n),t===lr?e:S(t,n[0],n[1],e)}})}},{},Er);var Zr=function(){throw TypeError("Abstract")};Fe.createClass(Zr,{},{},wr);var Fr=function(){Fe.defaultSuperCall(this,Gr.prototype,arguments)},Gr=Fr;Fe.createClass(Fr,{},{},Zr),B(Fr,xr.prototype);var Hr=function(){Fe.defaultSuperCall(this,Qr.prototype,arguments)},Qr=Hr;Fe.createClass(Hr,{},{},Zr),B(Hr,Mr.prototype);var $r=function(){Fe.defaultSuperCall(this,tn.prototype,arguments)},tn=$r;Fe.createClass($r,{},{},Zr),B($r,kr.prototype),Zr.Keyed=Fr,Zr.Set=Hr,Zr.Indexed=$r;var en=function(t){return 0===arguments.length?rn.empty():t&&t.constructor===rn?t:rn.empty().merge(t)},rn=en;Fe.createClass(en,{toString:function(){return this.__toString("Map {","}")},get:function(t,e){return this._root?this._root.get(0,y(t),t,e):e},set:function(t,e){return _e(this,t,e)},setIn:function(t,e){return i(t.length>0,"Requires non-empty key path."),this.updateIn(t,function(){return e})},remove:function(t){return _e(this,t,tr)},removeIn:function(t){return i(t.length>0,"Requires non-empty key path."),this.updateIn(t,function(){return tr})},update:function(t,e,r){return 1===arguments.length?t(this):this.updateIn([t],e,r)},updateIn:function(t,e,r){return r||(r=e,e=void 0),0===t.length?r(this):ze(this,t,e,r,0)},clear:function(){return 0===this.size?this:this.__ownerID?(this.size=0,this._root=null,this.__hash=void 0,this.__altered=!0,this):rn.empty()},merge:function(){return me(this,void 0,arguments)},mergeWith:function(t){for(var e=[],r=1;arguments.length>r;r++)e[r-1]=arguments[r];return me(this,t,e)},mergeDeep:function(){return me(this,ge(void 0),arguments)},mergeDeepWith:function(t){for(var e=[],r=1;arguments.length>r;r++)e[r-1]=arguments[r];return me(this,ge(t),e)},cursor:function(t,e){var r=0===arguments.length||"function"==typeof t&&(e=t)?[]:Array.isArray(t)?t:[t]; <ide><path>src/Iterable.js <ide> class Iterable { <ide> return this.get(searchKey, NOT_SET) !== NOT_SET; <ide> } <ide> <del> isSubset(seq) { <del> seq = typeof seq.contains === 'function' ? seq : Iterable(seq); <del> return this.every(value => seq.contains(value)); <add> isSubset(iter) { <add> iter = typeof iter.contains === 'function' ? iter : Iterable(iter); <add> return this.every(value => iter.contains(value)); <ide> } <ide> <del> isSuperset(seq) { <del> return seq.isSubset(this); <add> isSuperset(iter) { <add> return iter.isSubset(this); <ide> } <ide> <ide> keySeq() { <ide> class Iterable { <ide> <ide> maxBy(mapper, comparator) { <ide> comparator = comparator || defaultComparator; <del> var seq = this; <del> var maxEntry = seq.entrySeq().reduce((max, next) => { <add> var maxEntry = this.entrySeq().reduce((max, next) => { <ide> return comparator( <del> mapper(next[1], next[0], seq), <del> mapper(max[1], max[0], seq) <add> mapper(next[1], next[0], this), <add> mapper(max[1], max[0], this) <ide> ) > 0 ? next : max <ide> }); <ide> return maxEntry && maxEntry[1]; <ide> class Iterable { <ide> <ide> minBy(mapper, comparator) { <ide> comparator = comparator || defaultComparator; <del> var seq = this; <del> var minEntry = seq.entrySeq().reduce((min, next) => { <add> var minEntry = this.entrySeq().reduce((min, next) => { <ide> return comparator( <del> mapper(next[1], next[0], seq), <del> mapper(min[1], min[0], seq) <add> mapper(next[1], next[0], this), <add> mapper(min[1], min[0], this) <ide> ) < 0 ? next : min <ide> }); <ide> return minEntry && minEntry[1]; <ide> class Iterable { <ide> <ide> sortBy(mapper, comparator) { <ide> comparator = comparator || defaultComparator; <del> var seq = this; <del> return reify(this, new ArraySequence(seq.entrySeq().entrySeq().toArray().sort( <add> return reify(this, new ArraySequence(this.entrySeq().entrySeq().toArray().sort( <ide> (a, b) => comparator( <del> mapper(a[1][1], a[1][0], seq), <del> mapper(b[1][1], b[1][0], seq) <add> mapper(a[1][1], a[1][0], this), <add> mapper(b[1][1], b[1][0], this) <ide> ) || a[0] - b[0] <ide> )).fromEntrySeq().valueSeq().fromEntrySeq()); <ide> } <ide> class IndexedIterable extends Iterable { <ide> } <ide> <ide> <del> // ### More sequential methods <add> // ### More collection methods <ide> <ide> findLastIndex(predicate, context) { <ide> return this.toKeyedSeq().reverse().findIndex(predicate, context); <ide> class IndexedIterable extends Iterable { <ide> } <ide> <ide> skip(amount) { <del> var seq = this; <del> var skipSeq = skipFactory(seq, amount, false); <del> if (skipSeq !== seq) { <add> var iter = this; <add> var skipSeq = skipFactory(iter, amount, false); <add> if (isLazy(iter) && skipSeq !== iter) { <ide> skipSeq.get = function (index, notSetValue) { <ide> index = wrapIndex(this, index); <del> return index >= 0 ? seq.get(index + amount, notSetValue) : notSetValue; <add> return index >= 0 ? iter.get(index + amount, notSetValue) : notSetValue; <ide> } <ide> } <ide> return reify(this, skipSeq); <ide> class IndexedIterable extends Iterable { <ide> <ide> sortBy(mapper, comparator) { <ide> comparator = comparator || defaultComparator; <del> var seq = this; <ide> return reify(this, new ArraySequence(this.entrySeq().toArray().sort( <ide> (a, b) => comparator( <del> mapper(a[1], a[0], seq), <del> mapper(b[1], b[0], seq) <add> mapper(a[1], a[0], this), <add> mapper(b[1], b[0], this) <ide> ) || a[0] - b[0] <ide> )).fromEntrySeq().valueSeq()); <ide> } <ide> <ide> take(amount) { <del> var seq = this; <del> var takeSeq = takeFactory(seq, amount); <del> if (takeSeq !== seq) { <add> var iter = this; <add> var takeSeq = takeFactory(iter, amount); <add> if (isLazy(iter) && takeSeq !== iter) { <ide> takeSeq.get = function (index, notSetValue) { <ide> index = wrapIndex(this, index); <del> return index >= 0 && index < amount ? seq.get(index, notSetValue) : notSetValue; <add> return index >= 0 && index < amount ? iter.get(index, notSetValue) : notSetValue; <ide> } <ide> } <ide> return reify(this, takeSeq); <ide> Iterable.Iterator = Iterator; <ide> <ide> // #pragma Helper functions <ide> <del>function reify(kind, seq) { <del> return isLazy(kind) ? seq : kind.constructor(seq); <add>function reify(iter, seq) { <add> return isLazy(iter) ? seq : iter.constructor(seq); <ide> } <ide> <ide> function valueMapper(v) {
3
Ruby
Ruby
add more deprecations, disable some existing ones
2cbce1fbf0a7a361f2be8b3545998f1a36ab3588
<ide><path>Library/Homebrew/compat/ARGV.rb <ide> module HomebrewArgvExtension <ide> def build_32_bit? <del> odeprecated "ARGV.build_32_bit?" <del> include? "--32-bit" <add> odisabled "ARGV.build_32_bit?" <ide> end <ide> end <ide><path>Library/Homebrew/compat/ENV/shared.rb <ide> def j1 <ide> end <ide> <ide> def java_cache <del> # odeprecated "ENV.java_cache" <add> odeprecated "ENV.java_cache" <ide> end <ide> end <ide><path>Library/Homebrew/compat/build_options.rb <ide> class BuildOptions <ide> def build_32_bit? <del> odeprecated "build.build_32_bit?" <del> include?("32-bit") && option_defined?("32-bit") <add> odisabled "build.build_32_bit?" <ide> end <ide> <ide> def build_bottle? <del> odeprecated "build.build_bottle?", "build.bottle?" <del> bottle? <add> odisabled "build.build_bottle?", "build.bottle?" <ide> end <ide> end <ide><path>Library/Homebrew/compat/dependency_collector.rb <ide> class DependencyCollector <ide> <ide> def parse_string_spec(spec, tags) <ide> if (tag = tags.first) && LANGUAGE_MODULES.include?(tag) <add> odeprecated "'depends_on :#{tag}'" <ide> LanguageModuleRequirement.new(tag, spec, tags[1]) <ide> else <ide> _parse_string_spec(spec, tags) <ide> def parse_symbol_spec(spec, tags) <ide> when :clt <ide> odeprecated "'depends_on :clt'" <ide> when :tex <del> # odeprecated "'depends_on :tex'" <add> odeprecated "'depends_on :tex'" <ide> TeXRequirement.new(tags) <ide> when :autoconf, :automake, :bsdmake, :libtool <ide> output_deprecation(spec, tags) <ide> def parse_symbol_spec(spec, tags) <ide> output_deprecation("libtool", tags) <ide> Dependency.new("libtool", tags) <ide> when :apr <del> # output_deprecation(spec, tags, "apr-util") <add> output_deprecation(spec, tags, "apr-util") <ide> Dependency.new("apr-util", tags) <ide> when :fortran <ide> # output_deprecation(spec, tags, "gcc") <ide><path>Library/Homebrew/compat/fails_with_llvm.rb <ide> class Formula <ide> def fails_with_llvm(_msg = nil, _data = nil) <del> odeprecated "Formula#fails_with_llvm in install" <add> odisabled "Formula#fails_with_llvm in install" <ide> end <ide> <ide> def self.fails_with_llvm(_msg = nil, _data = {}) <del> odeprecated "Formula.fails_with_llvm" <add> odisabled "Formula.fails_with_llvm" <ide> end <ide> end <ide><path>Library/Homebrew/compat/formula.rb <ide> module FormulaCompat <ide> def x11_installed? <del> odeprecated "Formula#x11_installed?", "MacOS::X11.installed?" <del> MacOS::X11.installed? <add> odisabled "Formula#x11_installed?", "MacOS::X11.installed?" <ide> end <ide> <ide> def snow_leopard_64? <del> odeprecated "Formula#snow_leopard_64?", "MacOS.prefer_64_bit?" <del> MacOS.prefer_64_bit? <add> odisabled "Formula#snow_leopard_64?", "MacOS.prefer_64_bit?" <ide> end <ide> end <ide> <ide> class Formula <ide> extend FormulaCompat <ide> <ide> def std_cmake_parameters <del> odeprecated "Formula#std_cmake_parameters", "Formula#std_cmake_args" <del> "-DCMAKE_INSTALL_PREFIX='#{prefix}' -DCMAKE_BUILD_TYPE=None -DCMAKE_FIND_FRAMEWORK=LAST -Wno-dev" <add> odisabled "Formula#std_cmake_parameters", "Formula#std_cmake_args" <ide> end <ide> <del> def cxxstdlib_check(check_type) <del> odeprecated "Formula#cxxstdlib_check in install", <del> "Formula.cxxstdlib_check outside install" <del> self.class.cxxstdlib_check check_type <add> def cxxstdlib_check(_) <add> odisabled "Formula#cxxstdlib_check in install", <add> "Formula.cxxstdlib_check outside install" <ide> end <ide> <ide> def self.bottle_sha1(*) <del> odeprecated "Formula.bottle_sha1" <add> odisabled "Formula.bottle_sha1" <ide> end <ide> <ide> def self.all <del> odeprecated "Formula.all", "Formula.map" <del> map <add> odisabled "Formula.all", "Formula.map" <ide> end <ide> <del> def self.canonical_name(name) <del> odeprecated "Formula.canonical_name", "Formulary.canonical_name" <del> Formulary.canonical_name(name) <add> def self.canonical_name(_) <add> odisabled "Formula.canonical_name", "Formulary.canonical_name" <ide> end <ide> <del> def self.class_s(name) <del> odeprecated "Formula.class_s", "Formulary.class_s" <del> Formulary.class_s(name) <add> def self.class_s(_) <add> odisabled "Formula.class_s", "Formulary.class_s" <ide> end <ide> <del> def self.factory(name) <del> odeprecated "Formula.factory", "Formulary.factory" <del> Formulary.factory(name) <add> def self.factory(_) <add> odisabled "Formula.factory", "Formulary.factory" <ide> end <ide> <ide> def self.require_universal_deps <del> odeprecated "Formula.require_universal_deps" <del> define_method(:require_universal_deps?) { true } <add> odisabled "Formula.require_universal_deps" <ide> end <ide> <del> def self.path(name) <del> odeprecated "Formula.path", "Formulary.core_path" <del> Formulary.core_path(name) <add> def self.path(_) <add> odisabled "Formula.path", "Formulary.core_path" <ide> end <ide> <ide> DATA = :DATA <ide> def patches <ide> {} <ide> end <ide> <del> def python(_options = {}, &_block) <del> odeprecated "Formula#python" <del> yield if block_given? <del> PythonRequirement.new <add> def python(_options = {}, &_) <add> odisabled "Formula#python" <ide> end <ide> alias python2 python <ide> alias python3 python <ide> <ide> def startup_plist <del> odeprecated "Formula#startup_plist", "Formula#plist" <add> odisabled "Formula#startup_plist", "Formula#plist" <ide> end <ide> <ide> def rake(*args) <del> # odeprecated "FileUtils#rake", "system \"rake\"" <add> odeprecated "FileUtils#rake", "system \"rake\"" <ide> system "rake", *args <ide> end <ide> end <ide><path>Library/Homebrew/compat/formula_specialties.rb <ide> class ScriptFileFormula < Formula <ide> def install <del> odeprecated "ScriptFileFormula#install", "Formula#install" <del> bin.install Dir["*"] <add> odisabled "ScriptFileFormula#install", "Formula#install" <ide> end <ide> end <ide> <ide> class GithubGistFormula < ScriptFileFormula <del> def self.url(val) <del> odeprecated "GithubGistFormula.url", "Formula.url" <del> super <del> version File.basename(File.dirname(val))[0, 6] <add> def self.url(_val) <add> odisabled "GithubGistFormula.url", "Formula.url" <ide> end <ide> end <ide> <del># This formula serves as the base class for several very similar <del># formulae for Amazon Web Services related tools. <ide> class AmazonWebServicesFormula < Formula <del> # Use this method to perform a standard install for Java-based tools, <del> # keeping the .jars out of HOMEBREW_PREFIX/lib <ide> def install <del> odeprecated "AmazonWebServicesFormula#install", "Formula#install" <del> <del> rm Dir["bin/*.cmd"] # Remove Windows versions <del> libexec.install Dir["*"] <del> bin.install_symlink Dir["#{libexec}/bin/*"] - ["#{libexec}/bin/service"] <add> odisabled "AmazonWebServicesFormula#install", "Formula#install" <ide> end <ide> alias standard_install install <ide> <ide> # Use this method to generate standard caveats. <del> def standard_instructions(home_name, home_value = libexec) <del> odeprecated "AmazonWebServicesFormula#standard_instructions", "Formula#caveats" <del> <del> <<~EOS <del> Before you can use these tools you must export some variables to your $SHELL. <del> <del> To export the needed variables, add them to your dotfiles. <del> * On Bash, add them to `~/.bash_profile`. <del> * On Zsh, add them to `~/.zprofile` instead. <del> <del> export JAVA_HOME="$(/usr/libexec/java_home)" <del> export AWS_ACCESS_KEY="<Your AWS Access ID>" <del> export AWS_SECRET_KEY="<Your AWS Secret Key>" <del> export #{home_name}="#{home_value}" <del> EOS <add> def standard_instructions(_, _) <add> odisabled "AmazonWebServicesFormula#standard_instructions", "Formula#caveats" <ide> end <ide> end <ide><path>Library/Homebrew/compat/global.rb <ide> module Homebrew <ide> <ide> def method_missing(method, *args, &block) <ide> if instance_methods.include?(method) <del> odeprecated "#{self}##{method}", "'module_function' or 'def self.#{method}' to convert it to a class method" <del> return instance_method(method).bind(self).call(*args, &block) <add> odisabled "#{self}##{method}", "'module_function' or 'def self.#{method}' to convert it to a class method" <ide> end <ide> super <ide> end <ide><path>Library/Homebrew/compat/gpg.rb <ide> module Gpg <ide> module_function <ide> <ide> def executable <add> odeprecated "Gpg.executable", 'which "gpg"' <ide> which "gpg" <ide> end <ide> <ide> def available? <add> odeprecated "Gpg.available?", 'which "gpg"' <ide> File.executable?(executable.to_s) <ide> end <ide> <del> def create_test_key(path) <del> odie "No GPG present to test against!" unless available? <del> <del> (path/"batch.gpg").write <<~EOS <del> Key-Type: RSA <del> Key-Length: 2048 <del> Subkey-Type: RSA <del> Subkey-Length: 2048 <del> Name-Real: Testing <del> Name-Email: testing@foo.bar <del> Expire-Date: 1d <del> %no-protection <del> %commit <del> EOS <del> system executable, "--batch", "--gen-key", "batch.gpg" <add> def create_test_key(_) <add> odeprecated "Gpg.create_test_key" <ide> end <ide> <ide> def cleanup_test_processes! <del> odie "No GPG present to test against!" unless available? <del> <del> gpgconf = Pathname.new(executable).parent/"gpgconf" <del> <del> system gpgconf, "--kill", "gpg-agent" <del> system gpgconf, "--homedir", "keyrings/live", "--kill", <del> "gpg-agent" <add> odeprecated "Gpg.cleanup_test_processes!" <ide> end <ide> <del> def test(path) <del> create_test_key(path) <del> begin <del> yield <del> ensure <del> cleanup_test_processes! <del> end <add> def test(_) <add> odeprecated "Gpg.test" <ide> end <ide> end <ide><path>Library/Homebrew/compat/hardware.rb <ide> module Hardware <ide> class << self <ide> def is_32_bit? <del> odeprecated "Hardware.is_32_bit?", "Hardware::CPU.is_32_bit?" <del> !CPU.is_64_bit? <add> odisabled "Hardware.is_32_bit?", "Hardware::CPU.is_32_bit?" <ide> end <ide> <ide> def is_64_bit? <del> odeprecated "Hardware.is_64_bit?", "Hardware::CPU.is_64_bit?" <del> CPU.is_64_bit? <add> odisabled "Hardware.is_64_bit?", "Hardware::CPU.is_64_bit?" <ide> end <ide> <ide> def bits <del> odeprecated "Hardware.bits", "Hardware::CPU.bits" <del> Hardware::CPU.bits <add> odisabled "Hardware.bits", "Hardware::CPU.bits" <ide> end <ide> <ide> def cpu_type <del> odeprecated "Hardware.cpu_type", "Hardware::CPU.type" <del> Hardware::CPU.type <add> odisabled "Hardware.cpu_type", "Hardware::CPU.type" <ide> end <ide> <ide> def cpu_family <del> odeprecated "Hardware.cpu_family", "Hardware::CPU.family" <del> Hardware::CPU.family <add> odisabled "Hardware.cpu_family", "Hardware::CPU.family" <ide> end <ide> <ide> def intel_family <del> odeprecated "Hardware.intel_family", "Hardware::CPU.family" <del> Hardware::CPU.family <add> odisabled "Hardware.intel_family", "Hardware::CPU.family" <ide> end <ide> <ide> def ppc_family <del> odeprecated "Hardware.ppc_family", "Hardware::CPU.family" <del> Hardware::CPU.family <add> odisabled "Hardware.ppc_family", "Hardware::CPU.family" <ide> end <ide> <ide> def processor_count <del> odeprecated "Hardware.processor_count", "Hardware::CPU.cores" <del> Hardware::CPU.cores <add> odisabled "Hardware.processor_count", "Hardware::CPU.cores" <ide> end <ide> end <ide> end <ide><path>Library/Homebrew/compat/json.rb <ide> module Utils <ide> module JSON <ide> module_function <ide> <del> Error = Class.new(StandardError) <del> <del> def load(str) <del> odeprecated "Utils::JSON.load", "JSON.parse" <del> ::JSON.parse(str) <del> rescue ::JSON::ParserError => e <del> raise Error, e.message <add> def load(_) <add> odisabled "Utils::JSON.load", "JSON.parse" <ide> end <ide> <del> def dump(obj) <del> odeprecated "Utils::JSON.dump", "JSON.generate" <del> ::JSON.generate(obj) <add> def dump(_) <add> odisabled "Utils::JSON.dump", "JSON.generate" <ide> end <ide> <del> def stringify_keys(obj) <del> odeprecated "Utils::JSON.stringify_keys" <del> case obj <del> when Array <del> obj.map { |val| stringify_keys(val) } <del> when Hash <del> obj.inject({}) do |result, (key, val)| <del> key = key.respond_to?(:to_s) ? key.to_s : key <del> val = stringify_keys(val) <del> result.merge!(key => val) <del> end <del> else <del> obj <del> end <add> def stringify_keys(_) <add> odisabled "Utils::JSON.stringify_keys" <ide> end <ide> end <ide> end <ide><path>Library/Homebrew/compat/keg.rb <ide> class Keg <ide> def fname <del> odeprecated "Keg#fname", "Keg#name" <del> name <add> odisabled "Keg#fname", "Keg#name" <ide> end <ide> end <ide><path>Library/Homebrew/compat/macos.rb <ide> module Mac <ide> module_function <ide> <ide> def xcode_folder <del> odeprecated "MacOS.xcode_folder", "MacOS::Xcode.folder" <del> Xcode.folder <add> odisabled "MacOS.xcode_folder", "MacOS::Xcode.folder" <ide> end <ide> <ide> def xcode_prefix <del> odeprecated "MacOS.xcode_prefix", "MacOS::Xcode.prefix" <del> Xcode.prefix <add> odisabled "MacOS.xcode_prefix", "MacOS::Xcode.prefix" <ide> end <ide> <ide> def xcode_installed? <del> odeprecated "MacOS.xcode_installed?", "MacOS::Xcode.installed?" <del> Xcode.installed? <add> odisabled "MacOS.xcode_installed?", "MacOS::Xcode.installed?" <ide> end <ide> <ide> def xcode_version <del> odeprecated "MacOS.xcode_version", "MacOS::Xcode.version" <del> Xcode.version <add> odisabled "MacOS.xcode_version", "MacOS::Xcode.version" <ide> end <ide> <ide> def clt_installed? <del> odeprecated "MacOS.clt_installed?", "MacOS::CLT.installed?" <del> CLT.installed? <add> odisabled "MacOS.clt_installed?", "MacOS::CLT.installed?" <ide> end <ide> <ide> def clt_version? <del> odeprecated "MacOS.clt_version?", "MacOS::CLT.version" <del> CLT.version <add> odisabled "MacOS.clt_version?", "MacOS::CLT.version" <ide> end <ide> <ide> def x11_installed? <del> odeprecated "MacOS.x11_installed?", "MacOS::X11.installed?" <del> X11.installed? <add> odisabled "MacOS.x11_installed?", "MacOS::X11.installed?" <ide> end <ide> <ide> def x11_prefix <del> odeprecated "MacOS.x11_prefix", "MacOS::X11.prefix" <del> X11.prefix <add> odisabled "MacOS.x11_prefix", "MacOS::X11.prefix" <ide> end <ide> <ide> def leopard? <del> odeprecated "MacOS.leopard?", "'MacOS.version == :leopard'" <del> version == :leopard <add> odisabled "MacOS.leopard?", "'MacOS.version == :leopard'" <ide> end <ide> <ide> def snow_leopard? <del> odeprecated "MacOS.snow_leopard?", "'MacOS.version >= :snow_leopard'" <del> version >= :snow_leopard <add> odisabled "MacOS.snow_leopard?", "'MacOS.version >= :snow_leopard'" <ide> end <ide> <ide> def snow_leopard_or_newer? <del> odeprecated "MacOS.snow_leopard_or_newer?", "'MacOS.version >= :snow_leopard'" <del> version >= :snow_leopard <add> odisabled "MacOS.snow_leopard_or_newer?", "'MacOS.version >= :snow_leopard'" <ide> end <ide> <ide> def lion? <del> odeprecated "MacOS.lion?", "'MacOS.version >= :lion'" <del> version >= :lion <add> odisabled "MacOS.lion?", "'MacOS.version >= :lion'" <ide> end <ide> <ide> def lion_or_newer? <del> odeprecated "MacOS.lion_or_newer?", "'MacOS.version >= :lion'" <del> version >= :lion <add> odisabled "MacOS.lion_or_newer?", "'MacOS.version >= :lion'" <ide> end <ide> <ide> def mountain_lion? <del> odeprecated "MacOS.mountain_lion?", "'MacOS.version >= :mountain_lion'" <del> version >= :mountain_lion <add> odisabled "MacOS.mountain_lion?", "'MacOS.version >= :mountain_lion'" <ide> end <ide> <ide> def mountain_lion_or_newer? <del> odeprecated "MacOS.mountain_lion_or_newer?", "'MacOS.version >= :mountain_lion'" <del> version >= :mountain_lion <add> odisabled "MacOS.mountain_lion_or_newer?", "'MacOS.version >= :mountain_lion'" <ide> end <ide> <ide> def macports_or_fink_installed? <del> odeprecated "MacOS.macports_or_fink_installed?", "!MacOS.macports_or_fink.empty?" <del> !macports_or_fink.empty? <add> odisabled "MacOS.macports_or_fink_installed?", "!MacOS.macports_or_fink.empty?" <ide> end <ide> <del> def locate(tool) <del> odeprecated "MacOS.locate", "DevelopmentTools.locate" <del> DevelopmentTools.locate(tool) <add> def locate(_) <add> odisabled "MacOS.locate", "DevelopmentTools.locate" <ide> end <ide> <ide> def default_cc <del> odeprecated "MacOS.default_cc", "DevelopmentTools.default_cc" <del> DevelopmentTools.default_cc <add> odisabled "MacOS.default_cc", "DevelopmentTools.default_cc" <ide> end <ide> <ide> def default_compiler <del> odeprecated "MacOS.default_compiler", "DevelopmentTools.default_compiler" <del> DevelopmentTools.default_compiler <add> odisabled "MacOS.default_compiler", "DevelopmentTools.default_compiler" <ide> end <ide> <ide> def gcc_40_build_version <del> odeprecated "MacOS.gcc_40_build_version", "DevelopmentTools.gcc_4_0_build_version" <del> DevelopmentTools.gcc_4_0_build_version <add> odisabled "MacOS.gcc_40_build_version", "DevelopmentTools.gcc_4_0_build_version" <ide> end <ide> <ide> def gcc_4_0_build_version <del> odeprecated "MacOS.gcc_4_0_build_version", "DevelopmentTools.gcc_4_0_build_version" <del> DevelopmentTools.gcc_4_0_build_version <add> odisabled "MacOS.gcc_4_0_build_version", "DevelopmentTools.gcc_4_0_build_version" <ide> end <ide> <ide> def gcc_42_build_version <del> odeprecated "MacOS.gcc_42_build_version", "DevelopmentTools.gcc_4_2_build_version" <del> DevelopmentTools.gcc_4_2_build_version <add> odisabled "MacOS.gcc_42_build_version", "DevelopmentTools.gcc_4_2_build_version" <ide> end <ide> <ide> def gcc_build_version <del> odeprecated "MacOS.gcc_build_version", "DevelopmentTools.gcc_4_2_build_version" <del> DevelopmentTools.gcc_4_2_build_version <add> odisabled "MacOS.gcc_build_version", "DevelopmentTools.gcc_4_2_build_version" <ide> end <ide> <ide> def llvm_build_version <del> odeprecated "MacOS.llvm_build_version" <add> odisabled "MacOS.llvm_build_version" <ide> end <ide> <ide> def clang_version <del> odeprecated "MacOS.clang_version", "DevelopmentTools.clang_version" <del> DevelopmentTools.clang_version <add> odisabled "MacOS.clang_version", "DevelopmentTools.clang_version" <ide> end <ide> <ide> def clang_build_version <del> odeprecated "MacOS.clang_build_version", "DevelopmentTools.clang_build_version" <del> DevelopmentTools.clang_build_version <add> odisabled "MacOS.clang_build_version", "DevelopmentTools.clang_build_version" <ide> end <ide> <ide> def has_apple_developer_tools? <del> odeprecated "MacOS.has_apple_developer_tools?", "DevelopmentTools.installed?" <del> DevelopmentTools.installed? <add> odisabled "MacOS.has_apple_developer_tools?", "DevelopmentTools.installed?" <ide> end <ide> <ide> def release <del> odeprecated "MacOS.release", "MacOS.version" <del> version <add> odisabled "MacOS.release", "MacOS.version" <ide> end <ide> end <ide> end <ide><path>Library/Homebrew/compat/pathname.rb <ide> class Pathname <del> def cp(dst) <del> odeprecated "Pathname#cp", "FileUtils.cp" <del> if file? <del> FileUtils.cp to_s, dst <del> else <del> FileUtils.cp_r to_s, dst <del> end <del> dst <add> def cp(_) <add> odisabled "Pathname#cp", "FileUtils.cp" <ide> end <ide> <del> def chmod_R(perms) <del> odeprecated "Pathname#chmod_R", "FileUtils.chmod_R" <del> require "fileutils" <del> FileUtils.chmod_R perms, to_s <add> def chmod_R(_) <add> odisabled "Pathname#chmod_R", "FileUtils.chmod_R" <ide> end <ide> end <ide><path>Library/Homebrew/compat/software_spec.rb <ide> class BottleSpecification <del> def revision(*args) <del> odeprecated "BottleSpecification.revision", "BottleSpecification.rebuild" <del> rebuild(*args) <add> def revision(*) <add> odisabled "BottleSpecification.revision", "BottleSpecification.rebuild" <ide> end <ide> end <ide><path>Library/Homebrew/compat/tab.rb <ide> class Tab < OpenStruct <ide> def build_32_bit? <del> odeprecated "Tab.build_32_bit?" <del> include?("32-bit") <add> odisabled "Tab.build_32_bit?" <ide> end <ide> end <ide><path>Library/Homebrew/compat/tap.rb <ide> <ide> class Tap <ide> def core_formula_repository? <del> odeprecated "Tap#core_formula_repository?", "Tap#core_tap?" <del> core_tap? <add> odisabled "Tap#core_formula_repository?", "Tap#core_tap?" <ide> end <ide> end <ide><path>Library/Homebrew/compat/utils.rb <ide> module Tty <ide> module_function <ide> <ide> def white <del> odeprecated "Tty.white", "Tty.reset.bold" <del> reset.bold <add> odisabled "Tty.white", "Tty.reset.bold" <ide> end <ide> end <ide> <del>def puts_columns(items) <del> odeprecated "puts_columns", "puts Formatter.columns" <del> puts Formatter.columns(items) <add>def puts_columns(_) <add> odisabled "puts_columns", "puts Formatter.columns" <ide> end <ide> <del>def plural(n, s = "s") <del> odeprecated "#plural", "Formatter.pluralize" <del> (n == 1) ? "" : s <add>def plural(_, _) <add> odisabled "#plural", "Formatter.pluralize" <ide> end <ide><path>Library/Homebrew/compat/version.rb <ide> class Version <del> def slice(*args) <del> odeprecated "Version#slice", "Version#to_s.slice" <del> to_s.slice(*args) <add> def slice(*) <add> odisabled "Version#slice", "Version#to_s.slice" <ide> end <ide> end <ide><path>Library/Homebrew/compat/xcode.rb <ide> module Xcode <ide> module_function <ide> <ide> def provides_autotools? <del> odeprecated "OS::Mac::Xcode.provides_autotools?" <del> version < "4.3" <add> odisabled "OS::Mac::Xcode.provides_autotools?" <ide> end <ide> end <ide> end
20
PHP
PHP
add class_exists() check
ac2004180235a6f3f43281fc1f071f1a60ee367d
<ide><path>src/Core/ObjectRegistry.php <ide> public function load(string $objectName, array $config = []) <ide> $className = $objectName; <ide> if (is_string($objectName)) { <ide> $className = $this->_resolveClassName($objectName); <del> if ($className === null) { <add> if ($className === null || !class_exists($className)) { <ide> [$plugin, $objectName] = pluginSplit($objectName); <ide> $this->_throwMissingClassError($objectName, $plugin); <ide> }
1
Text
Text
update core ideas.md
74d5988ec4cf13fd4d00a7e9348255b6d62f4471
<ide><path>docs/Basics/Core Ideas.md <add> <add>Redux can be described in three fundamental principles: <add> <add>* **The whole state of your app is stored in an object tree inside a single *store*.** This makes it easy to create universal apps. The state from the server can be serialized and hydrated into the client with no extra coding effort. You can also persist your app’s state in development for a faster development cycle. And of course, with a single state tree, you get the previously difficult functionality like Undo/Redo for free. <add> <add>* **The only way to mutate the state is to emit an *action*, an object describing what happened.** This ensures that the views or the network callbacks never write directly to the state, and instead express the intent to mutate. Because all mutations are centralized and happen one by one in a strict order, there are no subtle race conditions to watch out for. Actions are just plain objects, so they can be logged, serialized, stored, and later replayed for debugging or testing purposes. <add> <add>* **To specify how the state tree is transformed by the actions, you write pure *reducers*.** Reducers are just pure functions that take the previous state and the action, and return the next state. You can start with a single reducer, but as your app grows, you can split it into smaller reducers that manage the specific parts of the state tree. Because reducers are just functions, you can control the order in which they are called, pass additional data, or even make reusable reducers for common tasks such as pagination.
1
Javascript
Javascript
allow override of when/expect definitions
477626d846b4de65d1d5c7071e6a94361395ff42
<ide><path>src/ngMock/angular-mocks.js <ide> angular.mock.dump = function(object) { <ide> ```js <ide> // testing controller <ide> describe('MyController', function() { <del> var $httpBackend, $rootScope, createController; <add> var $httpBackend, $rootScope, createController, authRequestHandler; <ide> <ide> beforeEach(inject(function($injector) { <ide> // Set up the mock http service responses <ide> $httpBackend = $injector.get('$httpBackend'); <ide> // backend definition common for all tests <del> $httpBackend.when('GET', '/auth.py').respond({userId: 'userX'}, {'A-Token': 'xxx'}); <add> authRequestHandler = $httpBackend.when('GET', '/auth.py') <add> .respond({userId: 'userX'}, {'A-Token': 'xxx'}); <ide> <ide> // Get hold of a scope (i.e. the root scope) <ide> $rootScope = $injector.get('$rootScope'); <ide> angular.mock.dump = function(object) { <ide> }); <ide> <ide> <add> it('should fail authentication', function() { <add> <add> // Notice how you can change the response even after it was set <add> authRequestHandler.respond(401, ''); <add> <add> $httpBackend.expectGET('/auth.py'); <add> var controller = createController(); <add> $httpBackend.flush(); <add> expect($rootScope.status).toBe('Failed...'); <add> }); <add> <add> <ide> it('should send msg to server', function() { <ide> var controller = createController(); <ide> $httpBackend.flush(); <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * @param {(Object|function(Object))=} headers HTTP headers or function that receives http header <ide> * object and returns true if the headers match the current definition. <ide> * @returns {requestHandler} Returns an object with `respond` method that controls how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> * <ide> * - respond – <ide> * `{function([status,] data[, headers, statusText]) <ide> * | function(function(method, url, data, headers)}` <ide> * – The respond method takes a set of static data to be returned or a function that can <ide> * return an array containing response status (number), response data (string), response <del> * headers (Object), and the text for the status (string). <add> * headers (Object), and the text for the status (string). The respond method returns the <add> * `requestHandler` object for possible overrides. <ide> */ <ide> $httpBackend.when = function(method, url, data, headers) { <ide> var definition = new MockHttpExpectation(method, url, data, headers), <ide> chain = { <ide> respond: function(status, data, headers, statusText) { <add> definition.passThrough = undefined; <ide> definition.response = createResponse(status, data, headers, statusText); <add> return chain; <ide> } <ide> }; <ide> <ide> if ($browser) { <ide> chain.passThrough = function() { <add> definition.response = undefined; <ide> definition.passThrough = true; <add> return chain; <ide> }; <ide> } <ide> <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * and returns true if the url match the current definition. <ide> * @param {(Object|function(Object))=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * and returns true if the url match the current definition. <ide> * @param {(Object|function(Object))=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * and returns true if the url match the current definition. <ide> * @param {(Object|function(Object))=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * data string and returns true if the data is as expected. <ide> * @param {(Object|function(Object))=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * data string and returns true if the data is as expected. <ide> * @param {(Object|function(Object))=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * @param {string|RegExp|function(string)} url HTTP url or function that receives the url <ide> * and returns true if the url match the current definition. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> createShortMethods('when'); <ide> <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * @param {(Object|function(Object))=} headers HTTP headers or function that receives http header <ide> * object and returns true if the headers match the current expectation. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> * <ide> * - respond – <ide> * `{function([status,] data[, headers, statusText]) <ide> * | function(function(method, url, data, headers)}` <ide> * – The respond method takes a set of static data to be returned or a function that can <ide> * return an array containing response status (number), response data (string), response <del> * headers (Object), and the text for the status (string). <add> * headers (Object), and the text for the status (string). The respond method returns the <add> * `requestHandler` object for possible overrides. <ide> */ <ide> $httpBackend.expect = function(method, url, data, headers) { <del> var expectation = new MockHttpExpectation(method, url, data, headers); <add> var expectation = new MockHttpExpectation(method, url, data, headers), <add> chain = { <add> respond: function (status, data, headers, statusText) { <add> expectation.response = createResponse(status, data, headers, statusText); <add> return chain; <add> } <add> }; <add> <ide> expectations.push(expectation); <del> return { <del> respond: function (status, data, headers, statusText) { <del> expectation.response = createResponse(status, data, headers, statusText); <del> } <del> }; <add> return chain; <ide> }; <ide> <ide> <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * and returns true if the url match the current definition. <ide> * @param {Object=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. See #expect for more info. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. See #expect for more info. <ide> */ <ide> <ide> /** <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * and returns true if the url match the current definition. <ide> * @param {Object=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * and returns true if the url match the current definition. <ide> * @param {Object=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * is in JSON format. <ide> * @param {Object=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * is in JSON format. <ide> * @param {Object=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * is in JSON format. <ide> * @param {Object=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> function createHttpBackendMock($rootScope, $delegate, $browser) { <ide> * @param {string|RegExp|function(string)} url HTTP url or function that receives the url <ide> * and returns true if the url match the current definition. <ide> * @returns {requestHandler} Returns an object with `respond` method that control how a matched <del> * request is handled. <add> * request is handled. You can save this object for later use and invoke `respond` again in <add> * order to change how a matched request is handled. <ide> */ <ide> createShortMethods('expect'); <ide> <ide> angular.module('ngMockE2E', ['ng']).config(['$provide', function($provide) { <ide> * @param {(Object|function(Object))=} headers HTTP headers or function that receives http header <ide> * object and returns true if the headers match the current definition. <ide> * @returns {requestHandler} Returns an object with `respond` and `passThrough` methods that <del> * control how a matched request is handled. <add> * control how a matched request is handled. You can save this object for later use and invoke <add> * `respond` or `passThrough` again in order to change how a matched request is handled. <ide> * <ide> * - respond – <ide> * `{function([status,] data[, headers, statusText]) <ide> angular.module('ngMockE2E', ['ng']).config(['$provide', function($provide) { <ide> * - passThrough – `{function()}` – Any request matching a backend definition with <ide> * `passThrough` handler will be passed through to the real backend (an XHR request will be made <ide> * to the server.) <add> * - Both methods return the `requestHandler` object for possible overrides. <ide> */ <ide> <ide> /** <ide> angular.module('ngMockE2E', ['ng']).config(['$provide', function($provide) { <ide> * and returns true if the url match the current definition. <ide> * @param {(Object|function(Object))=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` and `passThrough` methods that <del> * control how a matched request is handled. <add> * control how a matched request is handled. You can save this object for later use and invoke <add> * `respond` or `passThrough` again in order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> angular.module('ngMockE2E', ['ng']).config(['$provide', function($provide) { <ide> * and returns true if the url match the current definition. <ide> * @param {(Object|function(Object))=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` and `passThrough` methods that <del> * control how a matched request is handled. <add> * control how a matched request is handled. You can save this object for later use and invoke <add> * `respond` or `passThrough` again in order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> angular.module('ngMockE2E', ['ng']).config(['$provide', function($provide) { <ide> * and returns true if the url match the current definition. <ide> * @param {(Object|function(Object))=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` and `passThrough` methods that <del> * control how a matched request is handled. <add> * control how a matched request is handled. You can save this object for later use and invoke <add> * `respond` or `passThrough` again in order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> angular.module('ngMockE2E', ['ng']).config(['$provide', function($provide) { <ide> * @param {(string|RegExp)=} data HTTP request body. <ide> * @param {(Object|function(Object))=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` and `passThrough` methods that <del> * control how a matched request is handled. <add> * control how a matched request is handled. You can save this object for later use and invoke <add> * `respond` or `passThrough` again in order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> angular.module('ngMockE2E', ['ng']).config(['$provide', function($provide) { <ide> * @param {(string|RegExp)=} data HTTP request body. <ide> * @param {(Object|function(Object))=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` and `passThrough` methods that <del> * control how a matched request is handled. <add> * control how a matched request is handled. You can save this object for later use and invoke <add> * `respond` or `passThrough` again in order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> angular.module('ngMockE2E', ['ng']).config(['$provide', function($provide) { <ide> * @param {(string|RegExp)=} data HTTP request body. <ide> * @param {(Object|function(Object))=} headers HTTP headers. <ide> * @returns {requestHandler} Returns an object with `respond` and `passThrough` methods that <del> * control how a matched request is handled. <add> * control how a matched request is handled. You can save this object for later use and invoke <add> * `respond` or `passThrough` again in order to change how a matched request is handled. <ide> */ <ide> <ide> /** <ide> angular.module('ngMockE2E', ['ng']).config(['$provide', function($provide) { <ide> * @param {string|RegExp|function(string)} url HTTP url or function that receives the url <ide> * and returns true if the url match the current definition. <ide> * @returns {requestHandler} Returns an object with `respond` and `passThrough` methods that <del> * control how a matched request is handled. <add> * control how a matched request is handled. You can save this object for later use and invoke <add> * `respond` or `passThrough` again in order to change how a matched request is handled. <ide> */ <ide> angular.mock.e2e = {}; <ide> angular.mock.e2e.$httpBackendDecorator = <ide><path>test/ngMock/angular-mocksSpec.js <ide> describe('ngMock', function() { <ide> expect(callback.argsForCall[0]).toEqual([200, 'first', '', '']); <ide> expect(callback.argsForCall[1]).toEqual([200, 'second', '', '']); <ide> }); <add> <add> it('should be able to override response of expect definition', function() { <add> var definition = hb.expect('GET', '/url1'); <add> definition.respond('first'); <add> definition.respond('second'); <add> <add> hb('GET', '/url1', null, callback); <add> hb.flush(); <add> expect(callback).toHaveBeenCalledOnceWith(200, 'second', '', ''); <add> }); <add> <add> it('should be able to override response of when definition', function() { <add> var definition = hb.when('GET', '/url1'); <add> definition.respond('first'); <add> definition.respond('second'); <add> <add> hb('GET', '/url1', null, callback); <add> hb.flush(); <add> expect(callback).toHaveBeenCalledOnceWith(200, 'second', '', ''); <add> }); <add> <add> it('should be able to override response of expect definition with chaining', function() { <add> var definition = hb.expect('GET', '/url1').respond('first'); <add> definition.respond('second'); <add> <add> hb('GET', '/url1', null, callback); <add> hb.flush(); <add> expect(callback).toHaveBeenCalledOnceWith(200, 'second', '', ''); <add> }); <add> <add> it('should be able to override response of when definition with chaining', function() { <add> var definition = hb.when('GET', '/url1').respond('first'); <add> definition.respond('second'); <add> <add> hb('GET', '/url1', null, callback); <add> hb.flush(); <add> expect(callback).toHaveBeenCalledOnceWith(200, 'second', '', ''); <add> }); <ide> }); <ide> <ide> <ide> describe('ngMockE2E', function() { <ide> expect(realHttpBackend).toHaveBeenCalledOnceWith( <ide> 'GET', '/passThrough/23', null, callback, {}, null, true); <ide> }); <add> <add> it('should be able to override a respond definition with passThrough', function() { <add> var definition = hb.when('GET', /\/passThrough\/.*/).respond('override me'); <add> definition.passThrough(); <add> hb('GET', '/passThrough/23', null, callback, {}, null, true); <add> <add> expect(realHttpBackend).toHaveBeenCalledOnceWith( <add> 'GET', '/passThrough/23', null, callback, {}, null, true); <add> }); <add> <add> it('should be able to override a respond definition with passThrough', inject(function($browser) { <add> var definition = hb.when('GET', /\/passThrough\/.*/).passThrough(); <add> definition.respond('passThrough override'); <add> hb('GET', '/passThrough/23', null, callback, {}, null, true); <add> $browser.defer.flush(); <add> <add> expect(realHttpBackend).not.toHaveBeenCalled(); <add> expect(callback).toHaveBeenCalledOnceWith(200, 'passThrough override', '', ''); <add> })); <ide> }); <ide> <ide>
2
Python
Python
fix regression in regression
469384a777131e8c6d6bf7851ab0f77f2fbe3cb0
<ide><path>src/transformers/models/albert/modeling_albert.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/bert/modeling_bert.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/big_bird/modeling_big_bird.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/convbert/modeling_convbert.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/distilbert/modeling_distilbert.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/electra/modeling_electra.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/funnel/modeling_funnel.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/longformer/modeling_longformer.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/mobilebert/modeling_mobilebert.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/reformer/modeling_reformer.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/roberta/modeling_roberta.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/squeezebert/modeling_squeezebert.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/xlm/modeling_xlm.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>src/transformers/models/xlnet/modeling_xlnet.py <ide> def forward( <ide> <ide> if self.config.problem_type == "regression": <ide> loss_fct = MSELoss() <del> loss = loss_fct(logits.view(-1, self.num_labels), labels) <add> if self.num_labels == 1: <add> loss = loss_fct(logits.squeeze(), labels.squeeze()) <add> else: <add> loss = loss_fct(logits, labels) <ide> elif self.config.problem_type == "single_label_classification": <ide> loss_fct = CrossEntropyLoss() <ide> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <ide><path>tests/test_modeling_common.py <ide> import random <ide> import tempfile <ide> import unittest <add>import warnings <ide> from typing import List, Tuple <ide> <ide> from huggingface_hub import HfApi <ide> def test_problem_types(self): <ide> <ide> inputs["labels"] = inputs["labels"].to(problem_type["dtype"]) <ide> <del> loss = model(**inputs).loss <add> # This tests that we do not trigger the warning form PyTorch "Using a target size that is different <add> # to the input size. This will likely lead to incorrect results due to broadcasting. Please ensure <add> # they have the same size." which is a symptom something in wrong for the regression problem. <add> # See https://github.com/huggingface/transformers/issues/11780 <add> with warnings.catch_warnings(record=True) as warning_list: <add> loss = model(**inputs).loss <add> self.assertListEqual(warning_list, []) <add> <ide> loss.backward() <ide> <ide>
15
Python
Python
remove print statement
9cb2aef5877b342ef44cd77386328ee91039088e
<ide><path>spacy/lemmatizer.py <ide> def __call__(self, string, univ_pos, morphology=None): <ide> elif univ_pos == PUNCT: <ide> univ_pos = 'punct' <ide> # See Issue #435 for example of where this logic is requied. <del> print("Check base form", string) <ide> if self.is_base_form(univ_pos, morphology): <ide> return set([string.lower()]) <ide> lemmas = lemmatize(string, self.index.get(univ_pos, {}),
1
PHP
PHP
add formprotector class
980e6367f970ad51780803485e843cf707af85bf
<ide><path>src/Form/FormProtector.php <add><?php <add>declare(strict_types=1); <add> <add>/** <add> * CakePHP(tm) : Rapid Development Framework (https://cakephp.org) <add> * Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <add> * <add> * Licensed under The MIT License <add> * For full copyright and license information, please see the LICENSE.txt <add> * Redistributions of files must retain the above copyright notice. <add> * <add> * @copyright Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <add> * @link https://cakephp.org CakePHP(tm) Project <add> * @since 4.0.0 <add> * @license https://opensource.org/licenses/mit-license.php MIT License <add> */ <add>namespace Cake\Form; <add> <add>use Cake\Controller\Exception\FormProtectionException; <add>use Cake\Core\Configure; <add>use Cake\Utility\Hash; <add>use Cake\Utility\Security; <add> <add>/** <add> * Protects against form tampering. It ensures that: <add> * <add> * - Form's action (URL) is not modified. <add> * - Unknown / extra fields are not added to the form. <add> * - Existing fields have not been removed from the form. <add> * - Values of hidden inputs have not been changed. <add> */ <add>class FormProtector <add>{ <add> /** <add> * Fields list. <add> * <add> * @var array <add> */ <add> protected $fields = []; <add> <add> /** <add> * Unlocked fields. <add> * <add> * @var array <add> */ <add> protected $unlockedFields = []; <add> <add> /** <add> * Form URL <add> * <add> * @var string <add> */ <add> protected $url; <add> <add> /** <add> * Session Id <add> * <add> * @var string <add> */ <add> protected $sessionId; <add> <add> /** <add> * Error message providing detail for failed validation. <add> * <add> * @var string|null <add> */ <add> protected $debugMessage; <add> <add> /** <add> * Construct. <add> * <add> * @param string $url Form URL. <add> * @param string $sessionId Session Id. <add> * @param array $data Data array, can contain key `unlockedFields` with list of unlocked fields. <add> */ <add> public function __construct(?string $url = null, ?string $sessionId = null, array $data = []) <add> { <add> $this->url = $url; <add> $this->sessionId = $sessionId; <add> <add> if (!empty($data['unlockedFields'])) { <add> $this->unlockedFields = $data['unlockedFields']; <add> } <add> } <add> <add> /** <add> * Validate submitted form data. <add> * <add> * @param mixed $formData Form data. <add> * @param string $url URL form was POSTed to. <add> * @param string $sessionId Session id for hash generation. <add> * @return bool <add> * @throws \Cake\Controller\Exception\FormProtectionException <add> */ <add> public function validate($formData, string $url, string $sessionId): bool <add> { <add> $this->debugMessage = null; <add> <add> $extractedToken = $this->extractToken($formData); <add> if (empty($extractedToken)) { <add> return false; <add> } <add> <add> $hashParts = $this->extractHashParts($formData); <add> $generatedToken = $this->generateHash( <add> $hashParts['fields'], <add> $hashParts['unlockedFields'], <add> $url, <add> $sessionId <add> ); <add> <add> if (hash_equals($generatedToken, $extractedToken)) { <add> return true; <add> } <add> <add> if (Configure::read('debug')) { <add> $debugMessage = $this->debugTokenNotMatching($formData, $hashParts + compact('url', 'sessionId')); <add> if ($debugMessage) { <add> $this->debugMessage = $debugMessage; <add> } <add> } <add> <add> return false; <add> } <add> <add> /** <add> * Get validation error message. <add> * <add> * @return string|null <add> */ <add> public function getError(): ?string <add> { <add> return $this->debugMessage; <add> } <add> <add> /** <add> * Throws a 400 - Bad request exception or calls custom callback. <add> * <add> * If `validationFailureCallback` config is specified, it will use this <add> * callback by executing the method passing the argument as exception. <add> * <add> * @param \Cake\Controller\Exception\FormProtectionException $exception Exception. <add> * @return \Cake\Http\Response|null If specified, validationFailureCallback's response, or no return otherwise. <add> * @throws \Cake\Http\Exception\BadRequestException <add> */ <add> protected function validationFailure(FormProtectionException $exception): ?Response <add> { <add> if ($this->_config['validationFailureCallback']) { <add> return $this->executeCallback($this->_config['validationFailureCallback'], [$exception]); <add> } <add> <add> $this->throwException($exception); <add> <add> return null; <add> } <add> <add> /** <add> * Extract token from data. <add> * <add> * @param mixed $formData Data to validate. <add> * @return string|null Fields token on success, null on failure. <add> */ <add> protected function extractToken($formData): ?string <add> { <add> if (!is_array($formData)) { <add> $this->debugMessage = 'Request data is not an array.'; <add> <add> return null; <add> } <add> <add> $message = '\'%s\' was not found in request data.'; <add> if (!isset($formData['_Token'])) { <add> $this->debugMessage = sprintf($message, '_Token'); <add> <add> return null; <add> } <add> if (!isset($formData['_Token']['fields'])) { <add> $this->debugMessage = sprintf($message, '_Token.fields'); <add> <add> return null; <add> } <add> if (!isset($formData['_Token']['unlocked'])) { <add> $this->debugMessage = sprintf($message, '_Token.unlocked'); <add> <add> return null; <add> } <add> if (Configure::read('debug') && !isset($formData['_Token']['debug'])) { <add> $this->debugMessage = sprintf($message, '_Token.debug'); <add> <add> return null; <add> } <add> if (!Configure::read('debug') && isset($formData['_Token']['debug'])) { <add> $this->debugMessage = 'Unexpected \'_Token.debug\' found in request data'; <add> <add> return null; <add> } <add> <add> $token = urldecode($formData['_Token']['fields']); <add> if (strpos($token, ':')) { <add> [$token, ] = explode(':', $token, 2); <add> } <add> <add> return $token; <add> } <add> <add> /** <add> * Return hash parts for the token generation <add> * <add> * @param array $formData Form data. <add> * @return array <add> * @psalm-return array{fields: array, unlockedFields: array} <add> */ <add> protected function extractHashParts(array $formData): array <add> { <add> $fields = $this->extractFields($formData); <add> $unlockedFields = $this->sortedUnlockedFields($formData); <add> <add> return [ <add> 'fields' => $fields, <add> 'unlockedFields' => $unlockedFields, <add> ]; <add> } <add> <add> /** <add> * Return the fields list for the hash calculation <add> * <add> * @param array $formData Data array <add> * @return array <add> */ <add> protected function extractFields(array $formData): array <add> { <add> $locked = ''; <add> $token = urldecode($formData['_Token']['fields']); <add> $unlocked = urldecode($formData['_Token']['unlocked']); <add> <add> if (strpos($token, ':')) { <add> [$token, $locked] = explode(':', $token, 2); <add> } <add> unset($formData['_Token']); <add> <add> $locked = explode('|', $locked); <add> $unlocked = explode('|', $unlocked); <add> <add> $fields = Hash::flatten($formData); <add> $fieldList = array_keys($fields); <add> $multi = $lockedFields = []; <add> $isUnlocked = false; <add> <add> foreach ($fieldList as $i => $key) { <add> if (is_string($key) && preg_match('/(\.\d+){1,10}$/', $key)) { <add> $multi[$i] = preg_replace('/(\.\d+){1,10}$/', '', $key); <add> unset($fieldList[$i]); <add> } else { <add> $fieldList[$i] = (string)$key; <add> } <add> } <add> if (!empty($multi)) { <add> $fieldList += array_unique($multi); <add> } <add> <add> $unlockedFields = array_unique( <add> array_merge( <add> $this->unlockedFields, <add> $unlocked <add> ) <add> ); <add> <add> /** @var (string|int)[] $fieldList */ <add> foreach ($fieldList as $i => $key) { <add> $isLocked = in_array($key, $locked, true); <add> <add> if (!empty($unlockedFields)) { <add> foreach ($unlockedFields as $off) { <add> $off = explode('.', $off); <add> /** @psalm-suppress PossiblyInvalidArgument */ <add> $field = array_values(array_intersect(explode('.', $key), $off)); <add> $isUnlocked = ($field === $off); <add> if ($isUnlocked) { <add> break; <add> } <add> } <add> } <add> <add> if ($isUnlocked || $isLocked) { <add> unset($fieldList[$i]); <add> if ($isLocked) { <add> $lockedFields[$key] = $fields[$key]; <add> } <add> } <add> } <add> sort($fieldList, SORT_STRING); <add> ksort($lockedFields, SORT_STRING); <add> $fieldList += $lockedFields; <add> <add> return $fieldList; <add> } <add> <add> /** <add> * Get the sorted unlocked string <add> * <add> * @param array $formData Data array <add> * @return string[] <add> */ <add> protected function sortedUnlockedFields(array $formData): array <add> { <add> $unlocked = urldecode($formData['_Token']['unlocked']); <add> if (empty($unlocked)) { <add> return []; <add> } <add> <add> $unlocked = explode('|', $unlocked); <add> sort($unlocked, SORT_STRING); <add> <add> return $unlocked; <add> } <add> <add> /** <add> * Generate validation hash. <add> * <add> * @param array $fields Fields list. <add> * @param array $unlockedFields Unlocked fields. <add> * @param string $url Form URL. <add> * @param string $sessionId Session Id. <add> * @return string <add> */ <add> protected function generateHash(array $fields, array $unlockedFields, string $url, string $sessionId) <add> { <add> $hashParts = [ <add> $url, <add> serialize($fields), <add> implode('|', $unlockedFields), <add> $sessionId, <add> ]; <add> <add> return hash_hmac('sha1', implode('', $hashParts), Security::getSalt()); <add> } <add> <add> /** <add> * Create a message for humans to understand why Security token is not matching <add> * <add> * @param array $formData Data. <add> * @param array $hashParts Elements used to generate the Token hash <add> * @return string Message explaining why the tokens are not matching <add> */ <add> protected function debugTokenNotMatching(array $formData, array $hashParts): string <add> { <add> $messages = []; <add> if (!isset($formData['_Token']['debug'])) { <add> return 'Form protection debug token not found.'; <add> } <add> <add> $expectedParts = json_decode(urldecode($formData['_Token']['debug']), true); <add> if (!is_array($expectedParts) || count($expectedParts) !== 3) { <add> return 'Invalid form protection debug token.'; <add> } <add> $expectedUrl = Hash::get($expectedParts, 0); <add> $url = Hash::get($hashParts, 'url'); <add> if ($expectedUrl !== $url) { <add> $messages[] = sprintf('URL mismatch in POST data (expected \'%s\' but found \'%s\')', $expectedUrl, $url); <add> } <add> $expectedFields = Hash::get($expectedParts, 1); <add> $dataFields = Hash::get($hashParts, 'fields') ?: []; <add> $fieldsMessages = $this->debugCheckFields( <add> (array)$dataFields, <add> $expectedFields, <add> 'Unexpected field \'%s\' in POST data', <add> 'Tampered field \'%s\' in POST data (expected value \'%s\' but found \'%s\')', <add> 'Missing field \'%s\' in POST data' <add> ); <add> $expectedUnlockedFields = Hash::get($expectedParts, 2); <add> $dataUnlockedFields = Hash::get($hashParts, 'unlockedFields') ?: []; <add> $unlockFieldsMessages = $this->debugCheckFields( <add> (array)$dataUnlockedFields, <add> $expectedUnlockedFields, <add> 'Unexpected unlocked field \'%s\' in POST data', <add> '', <add> 'Missing unlocked field: \'%s\'' <add> ); <add> <add> $messages = array_merge($messages, $fieldsMessages, $unlockFieldsMessages); <add> <add> return implode(', ', $messages); <add> } <add> <add> /** <add> * Iterates data array to check against expected <add> * <add> * @param array $dataFields Fields array, containing the POST data fields <add> * @param array $expectedFields Fields array, containing the expected fields we should have in POST <add> * @param string $intKeyMessage Message string if unexpected found in data fields indexed by int (not protected) <add> * @param string $stringKeyMessage Message string if tampered found in <add> * data fields indexed by string (protected). <add> * @param string $missingMessage Message string if missing field <add> * @return string[] Messages <add> */ <add> protected function debugCheckFields( <add> array $dataFields, <add> array $expectedFields = [], <add> string $intKeyMessage = '', <add> string $stringKeyMessage = '', <add> string $missingMessage = '' <add> ): array { <add> $messages = $this->matchExistingFields($dataFields, $expectedFields, $intKeyMessage, $stringKeyMessage); <add> $expectedFieldsMessage = $this->debugExpectedFields($expectedFields, $missingMessage); <add> if ($expectedFieldsMessage !== null) { <add> $messages[] = $expectedFieldsMessage; <add> } <add> <add> return $messages; <add> } <add> <add> /** <add> * Generate array of messages for the existing fields in POST data, matching dataFields in $expectedFields <add> * will be unset <add> * <add> * @param array $dataFields Fields array, containing the POST data fields <add> * @param array $expectedFields Fields array, containing the expected fields we should have in POST <add> * @param string $intKeyMessage Message string if unexpected found in data fields indexed by int (not protected) <add> * @param string $stringKeyMessage Message string if tampered found in <add> * data fields indexed by string (protected) <add> * @return string[] Error messages <add> */ <add> protected function matchExistingFields( <add> array $dataFields, <add> array &$expectedFields, <add> string $intKeyMessage, <add> string $stringKeyMessage <add> ): array { <add> $messages = []; <add> foreach ($dataFields as $key => $value) { <add> if (is_int($key)) { <add> $foundKey = array_search($value, (array)$expectedFields, true); <add> if ($foundKey === false) { <add> $messages[] = sprintf($intKeyMessage, $value); <add> } else { <add> unset($expectedFields[$foundKey]); <add> } <add> } else { <add> if (isset($expectedFields[$key]) && $value !== $expectedFields[$key]) { <add> $messages[] = sprintf($stringKeyMessage, $key, $expectedFields[$key], $value); <add> } <add> unset($expectedFields[$key]); <add> } <add> } <add> <add> return $messages; <add> } <add> <add> /** <add> * Generate debug message for the expected fields <add> * <add> * @param array $expectedFields Expected fields <add> * @param string $missingMessage Message template <add> * @return string|null Error message about expected fields <add> */ <add> protected function debugExpectedFields(array $expectedFields = [], string $missingMessage = ''): ?string <add> { <add> if (count($expectedFields) === 0) { <add> return null; <add> } <add> <add> $expectedFieldNames = []; <add> foreach ((array)$expectedFields as $key => $expectedField) { <add> if (is_int($key)) { <add> $expectedFieldNames[] = $expectedField; <add> } else { <add> $expectedFieldNames[] = $key; <add> } <add> } <add> <add> return sprintf($missingMessage, implode(', ', $expectedFieldNames)); <add> } <add>} <ide><path>tests/TestCase/Form/FormProtectorTest.php <add><?php <add>declare(strict_types=1); <add> <add>/** <add> * CakePHP(tm) : Rapid Development Framework (https://cakephp.org) <add> * Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <add> * <add> * Licensed under The MIT License <add> * For full copyright and license information, please see the LICENSE.txt <add> * Redistributions of files must retain the above copyright notice <add> * <add> * @copyright Copyright (c) Cake Software Foundation, Inc. (https://cakefoundation.org) <add> * @link https://cakephp.org CakePHP(tm) Project <add> * @since 4.0.0 <add> * @license https://opensource.org/licenses/mit-license.php MIT License <add> */ <add>namespace Cake\Test\TestCase\Form; <add> <add>use Cake\Core\Configure; <add>use Cake\Form\FormProtector; <add>use Cake\TestSuite\TestCase; <add>use Cake\Utility\Security; <add> <add>/** <add> * FormProtectorTest class <add> */ <add>class FormProtectorTest extends TestCase <add>{ <add> /** <add> * @var string <add> */ <add> protected $url = '/articles/index'; <add> <add> /** <add> * @var string <add> */ <add> protected $sessionId = 'cli'; <add> <add> public function setUp(): void <add> { <add> parent::setUp(); <add> <add> Security::setSalt('foo!'); <add> <add> // $this->protector = new FormProtector('http://localhost/articles/index', 'cli'); <add> } <add> <add> /** <add> * Helper function for validation. <add> * <add> * @param array $data <add> * @param string|null $errorMessage <add> * @return void <add> */ <add> public function validate($data, $errorMessage = null) <add> { <add> $protector = new FormProtector(); <add> $result = $protector->validate($data, $this->url, $this->sessionId); <add> <add> if ($errorMessage === null) { <add> $this->assertTrue($result); <add> } else { <add> $this->assertFalse($result); <add> $this->assertSame($errorMessage, $protector->getError()); <add> } <add> } <add> <add> /** <add> * testValidate method <add> * <add> * Simple hash validation test <add> * <add> * @return void <add> */ <add> public function testValidate(): void <add> { <add> $fields = '4697b45f7f430ff3ab73018c20f315eecb0ba5a6%3AModel.valid'; <add> $unlocked = ''; <add> $debug = ''; <add> <add> $data = [ <add> 'Model' => ['username' => 'nate', 'password' => 'foo', 'valid' => '0'], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateNoUnlockedInRequestData method <add> * <add> * Test that validate fails if you are missing unlocked in request data. <add> * <add> * @return void <add> */ <add> public function testValidateNoUnlockedInRequestData(): void <add> { <add> $fields = 'a5475372b40f6e3ccbf9f8af191f20e1642fd877%3AModel.valid'; <add> <add> $data = [ <add> 'Model' => ['username' => 'nate', 'password' => 'foo', 'valid' => '0'], <add> '_Token' => compact('fields'), <add> ]; <add> <add> $this->validate($data, '\'_Token.unlocked\' was not found in request data.'); <add> } <add> <add> /** <add> * testValidateFormHacking method <add> * <add> * Test that validate fails if any of its required fields are missing. <add> * <add> * @return void <add> */ <add> public function testValidateFormHacking(): void <add> { <add> $unlocked = ''; <add> <add> $data = [ <add> 'Model' => ['username' => 'nate', 'password' => 'foo', 'valid' => '0'], <add> '_Token' => compact('unlocked'), <add> ]; <add> <add> $this->validate($data, '\'_Token.fields\' was not found in request data.'); <add> } <add> <add> /** <add> * testValidateEmptyForm method <add> * <add> * Test that validate fails if empty form is submitted. <add> * <add> * @return void <add> */ <add> public function testValidateEmptyForm(): void <add> { <add> $this->validate([], '\'_Token\' was not found in request data.'); <add> } <add> <add> /** <add> * testValidateObjectDeserialize <add> * <add> * Test that objects can't be passed into the serialized string. This was a vector for RFI and LFI <add> * attacks. Thanks to Felix Wilhelm <add> * <add> * @return void <add> */ <add> public function testValidateObjectDeserialize(): void <add> { <add> $fields = 'a5475372b40f6e3ccbf9f8af191f20e1642fd877'; <add> $unlocked = ''; <add> $debug = urlencode(json_encode([ <add> '/articles/index', <add> ['Model.password', 'Model.username', 'Model.valid'], <add> [], <add> ])); <add> <add> // a corrupted serialized object, so we can see if it ever gets to deserialize <add> $attack = 'O:3:"App":1:{s:5:"__map";a:1:{s:3:"foo";s:7:"Hacked!";s:1:"fail"}}'; <add> $fields .= urlencode(':' . str_rot13($attack)); <add> <add> $data = [ <add> 'Model' => ['username' => 'mark', 'password' => 'foo', 'valid' => '0'], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> <add> $protector = new FormProtector(); <add> $result = $protector->validate($data, $this->url, $this->sessionId); <add> $this->assertFalse($result); <add> } <add> <add> /** <add> * testValidateArray method <add> * <add> * Tests validation of checkbox arrays. <add> * <add> * @return void <add> */ <add> public function testValidateArray(): void <add> { <add> $fields = 'f95b472a63f1d883b9eaacaf8a8e36e325e3fe82%3A'; <add> $unlocked = ''; <add> $debug = urlencode(json_encode([ <add> 'some-action', <add> [], <add> [], <add> ])); <add> <add> $data = [ <add> 'Model' => ['multi_field' => ['1', '3']], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> <add> $data = [ <add> 'Model' => ['multi_field' => [12 => '1', 20 => '3']], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateIntFieldName method <add> * <add> * Tests validation of integer field names. <add> * <add> * @return void <add> */ <add> public function testValidateIntFieldName(): void <add> { <add> $fields = '11f87a5962db9ac26405e460cd3063bb6ff76cf8%3A'; <add> $unlocked = ''; <add> $debug = urlencode(json_encode([ <add> 'some-action', <add> [], <add> [], <add> ])); <add> <add> $data = [ <add> 1 => 'value,', <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateNoModel method <add> * <add> * @return void <add> */ <add> public function testValidateNoModel(): void <add> { <add> $fields = 'a2a942f587deb20e90241c51b59d901d8a7f796b%3A'; <add> $unlocked = ''; <add> $debug = 'not used'; <add> <add> $data = [ <add> 'anything' => 'some_data', <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> <add> $this->validate($data); <add> } <add> <add> /** <add> * test validate uses full URL <add> * <add> * @return void <add> */ <add> public function testValidateSubdirectory(): void <add> { <add> $this->url = '/subdir' . $this->url; <add> <add> $fields = 'cc9b6af3f33147235ae8f8037b0a71399a2425f2%3A'; <add> $unlocked = ''; <add> $debug = ''; <add> <add> $data = [ <add> 'Model' => ['username' => '', 'password' => ''], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateComplex method <add> * <add> * Tests hash validation for multiple records, including locked fields. <add> * <add> * @return void <add> */ <add> public function testValidateComplex(): void <add> { <add> $fields = 'b00b7e5c2e3bf8bc474fb7cfde6f9c2aa06ab9bc%3AAddresses.0.id%7CAddresses.1.id'; <add> $unlocked = ''; <add> $debug = 'not used'; <add> <add> $data = [ <add> 'Addresses' => [ <add> '0' => [ <add> 'id' => '123456', 'title' => '', 'first_name' => '', 'last_name' => '', <add> 'address' => '', 'city' => '', 'phone' => '', 'primary' => '', <add> ], <add> '1' => [ <add> 'id' => '654321', 'title' => '', 'first_name' => '', 'last_name' => '', <add> 'address' => '', 'city' => '', 'phone' => '', 'primary' => '', <add> ], <add> ], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateMultipleSelect method <add> * <add> * Test ValidatePost with multiple select elements. <add> * <add> * @return void <add> */ <add> public function testValidateMultipleSelect(): void <add> { <add> $fields = '28dd05f0af314050784b18b3366857e8e8c78e73%3A'; <add> $unlocked = ''; <add> $debug = 'not used'; <add> <add> $data = [ <add> 'Tag' => ['Tag' => [1, 2]], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> <add> $data = [ <add> 'Tag' => ['Tag' => [1, 2, 3]], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> <add> $data = [ <add> 'Tag' => ['Tag' => [1, 2, 3, 4]], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> <add> $fields = '1e4c9269b64756e9b141d364497c5f037b428a37%3A'; <add> $data = [ <add> 'User.password' => 'bar', 'User.name' => 'foo', 'User.is_valid' => '1', <add> 'Tag' => ['Tag' => [1]], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateCheckbox method <add> * <add> * First block tests un-checked checkbox <add> * Second block tests checked checkbox <add> * <add> * @return void <add> */ <add> public function testValidateCheckbox(): void <add> { <add> $fields = '4697b45f7f430ff3ab73018c20f315eecb0ba5a6%3AModel.valid'; <add> $unlocked = ''; <add> $debug = 'not used'; <add> <add> $data = [ <add> 'Model' => ['username' => '', 'password' => '', 'valid' => '0'], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> <add> $fields = '3f368401f9a8610bcace7746039651066cdcdc38%3A'; <add> <add> $data = [ <add> 'Model' => ['username' => '', 'password' => '', 'valid' => '0'], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> <add> $data = [ <add> 'Model' => ['username' => '', 'password' => '', 'valid' => '0'], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateHidden method <add> * <add> * @return void <add> */ <add> public function testValidateHidden(): void <add> { <add> $fields = '96e61bded2b62b0c420116a0eb06a3b3acddb8f1%3AModel.hidden%7CModel.other_hidden'; <add> $unlocked = ''; <add> $debug = 'not used'; <add> <add> $data = [ <add> 'Model' => [ <add> 'username' => '', 'password' => '', 'hidden' => '0', <add> 'other_hidden' => 'some hidden value', <add> ], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateDisabledFieldsInData method <add> * <add> * Test validating post data with posted unlocked fields. <add> * <add> * @return void <add> */ <add> public function testValidateDisabledFieldsInData(): void <add> { <add> $unlocked = 'Model.username'; <add> $fields = ['Model.hidden', 'Model.password']; <add> $fields = urlencode( <add> hash_hmac('sha1', '/articles/index' . serialize($fields) . $unlocked . 'cli', Security::getSalt()) <add> ); <add> $debug = 'not used'; <add> <add> $data = [ <add> 'Model' => [ <add> 'username' => 'mark', <add> 'password' => 'sekret', <add> 'hidden' => '0', <add> ], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateFailNoDisabled method <add> * <add> * Test that missing 'unlocked' input causes failure. <add> * <add> * @return void <add> */ <add> public function testValidateFailNoDisabled(): void <add> { <add> $fields = ['Model.hidden', 'Model.password', 'Model.username']; <add> $fields = urlencode(Security::hash(serialize($fields) . Security::getSalt())); <add> <add> $data = [ <add> 'Model' => [ <add> 'username' => 'mark', <add> 'password' => 'sekret', <add> 'hidden' => '0', <add> ], <add> '_Token' => compact('fields'), <add> ]; <add> <add> $this->validate($data, '\'_Token.unlocked\' was not found in request data.'); <add> } <add> <add> /** <add> * testValidateFailNoDebug method <add> * <add> * Test that missing 'debug' input causes failure. <add> * <add> * @return void <add> */ <add> public function testValidateFailNoDebug(): void <add> { <add> $fields = ['Model.hidden', 'Model.password', 'Model.username']; <add> $fields = urlencode(Security::hash(serialize($fields) . Security::getSalt())); <add> $unlocked = ''; <add> <add> $data = [ <add> 'Model' => [ <add> 'username' => 'mark', <add> 'password' => 'sekret', <add> 'hidden' => '0', <add> ], <add> '_Token' => compact('fields', 'unlocked'), <add> ]; <add> <add> $this->validate($data, '\'_Token.debug\' was not found in request data.'); <add> } <add> <add> /** <add> * testValidateFailNoDebugMode method <add> * <add> * Test that missing 'debug' input is not the problem when debug mode disabled. <add> * <add> * @return void <add> */ <add> public function testValidateFailNoDebugMode(): void <add> { <add> $fields = ['Model.hidden', 'Model.password', 'Model.username']; <add> $fields = urlencode(Security::hash(serialize($fields) . Security::getSalt())); <add> $unlocked = ''; <add> <add> $data = [ <add> 'Model' => [ <add> 'username' => 'mark', <add> 'password' => 'sekret', <add> 'hidden' => '0', <add> ], <add> '_Token' => compact('fields', 'unlocked'), <add> ]; <add> Configure::write('debug', false); <add> $protector = new FormProtector(); <add> $result = $protector->validate($data, $this->url, $this->sessionId); <add> $this->assertFalse($result); <add> } <add> <add> /** <add> * testValidateFailDisabledFieldTampering method <add> * <add> * Test that validate fails when unlocked fields are changed. <add> * <add> * @return void <add> */ <add> public function testValidateFailDisabledFieldTampering(): void <add> { <add> $unlocked = 'Model.username'; <add> $fields = ['Model.hidden', 'Model.password']; <add> $fields = urlencode(Security::hash(serialize($fields) . $unlocked . Security::getSalt())); <add> $debug = urlencode(json_encode([ <add> '/articles/index', <add> ['Model.hidden', 'Model.password'], <add> ['Model.username'], <add> ])); <add> <add> // Tamper the values. <add> $unlocked = 'Model.username|Model.password'; <add> <add> $data = [ <add> 'Model' => [ <add> 'username' => 'mark', <add> 'password' => 'sekret', <add> 'hidden' => '0', <add> ], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> <add> $this->validate($data, 'Missing field \'Model.password\' in POST data, Unexpected unlocked field \'Model.password\' in POST data'); <add> } <add> <add> /** <add> * testValidateHiddenMultipleModel method <add> * <add> * @return void <add> */ <add> public function testValidateHiddenMultipleModel(): void <add> { <add> $fields = '642b7a6db3b848fab88952b86ea36c572f93df40%3AModel.valid%7CModel2.valid%7CModel3.valid'; <add> $unlocked = ''; <add> $debug = 'not used'; <add> <add> $data = [ <add> 'Model' => ['username' => '', 'password' => '', 'valid' => '0'], <add> 'Model2' => ['valid' => '0'], <add> 'Model3' => ['valid' => '0'], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateHasManyModel method <add> * <add> * @return void <add> */ <add> public function testValidateHasManyModel(): void <add> { <add> $fields = '792324c8a374772ad82acfb28f0e77e70f8ed3af%3AModel.0.hidden%7CModel.0.valid'; <add> $fields .= '%7CModel.1.hidden%7CModel.1.valid'; <add> $unlocked = ''; <add> $debug = 'not used'; <add> <add> $data = [ <add> 'Model' => [ <add> [ <add> 'username' => 'username', 'password' => 'password', <add> 'hidden' => 'value', 'valid' => '0', <add> ], <add> [ <add> 'username' => 'username', 'password' => 'password', <add> 'hidden' => 'value', 'valid' => '0', <add> ], <add> ], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateHasManyRecordsPass method <add> * <add> * @return void <add> */ <add> public function testValidateHasManyRecordsPass(): void <add> { <add> $fields = '7f4bff67558e25ebeea44c84ea4befa8d50b080c%3AAddress.0.id%7CAddress.0.primary%7C'; <add> $fields .= 'Address.1.id%7CAddress.1.primary'; <add> $unlocked = ''; <add> $debug = 'not used'; <add> <add> $data = [ <add> 'Address' => [ <add> 0 => [ <add> 'id' => '123', <add> 'title' => 'home', <add> 'first_name' => 'Bilbo', <add> 'last_name' => 'Baggins', <add> 'address' => '23 Bag end way', <add> 'city' => 'the shire', <add> 'phone' => 'N/A', <add> 'primary' => '1', <add> ], <add> 1 => [ <add> 'id' => '124', <add> 'title' => 'home', <add> 'first_name' => 'Frodo', <add> 'last_name' => 'Baggins', <add> 'address' => '50 Bag end way', <add> 'city' => 'the shire', <add> 'phone' => 'N/A', <add> 'primary' => '1', <add> ], <add> ], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateHasManyRecords method <add> * <add> * validate should fail, hidden fields have been changed. <add> * <add> * @return void <add> */ <add> public function testValidateHasManyRecordsFail(): void <add> { <add> $fields = '7a203edb3d345bbf38fe0dccae960da8842e11d7%3AAddress.0.id%7CAddress.0.primary%7C'; <add> $fields .= 'Address.1.id%7CAddress.1.primary'; <add> $unlocked = ''; <add> $debug = urlencode(json_encode([ <add> '/articles/index', <add> [ <add> 'Address.0.address', <add> 'Address.0.city', <add> 'Address.0.first_name', <add> 'Address.0.last_name', <add> 'Address.0.phone', <add> 'Address.0.title', <add> 'Address.1.address', <add> 'Address.1.city', <add> 'Address.1.first_name', <add> 'Address.1.last_name', <add> 'Address.1.phone', <add> 'Address.1.title', <add> 'Address.0.id' => '123', <add> 'Address.0.primary' => '5', <add> 'Address.1.id' => '124', <add> 'Address.1.primary' => '1', <add> ], <add> [], <add> ])); <add> <add> $data = [ <add> 'Address' => [ <add> 0 => [ <add> 'id' => '123', <add> 'title' => 'home', <add> 'first_name' => 'Bilbo', <add> 'last_name' => 'Baggins', <add> 'address' => '23 Bag end way', <add> 'city' => 'the shire', <add> 'phone' => 'N/A', <add> 'primary' => '5', <add> ], <add> 1 => [ <add> 'id' => '124', <add> 'title' => 'home', <add> 'first_name' => 'Frodo', <add> 'last_name' => 'Baggins', <add> 'address' => '50 Bag end way', <add> 'city' => 'the shire', <add> 'phone' => 'N/A', <add> 'primary' => '1', <add> ], <add> ], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> <add> $protector = new FormProtector(); <add> $result = $protector->validate($data, $this->url, $this->sessionId); <add> $this->assertFalse($result); <add> } <add> <add> /** <add> * testValidateRadio method <add> * <add> * Test validate with radio buttons. <add> * <add> * @return void <add> * @triggers Controller.startup $this->Controller <add> */ <add> public function testValidateRadio(): void <add> { <add> $fields = 'a709dfdee0a0cce52c4c964a1b8a56159bb081b4%3An%3A0%3A%7B%7D'; <add> $unlocked = ''; <add> $debug = urlencode(json_encode([ <add> '/articles/index', <add> [], <add> [], <add> ])); <add> <add> $data = [ <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $protector = new FormProtector(); <add> $result = $protector->validate($data, $this->url, $this->sessionId); <add> $this->assertFalse($result); <add> <add> $data = [ <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> 'Test' => ['test' => ''], <add> ]; <add> $this->validate($data); <add> <add> $data = [ <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> 'Test' => ['test' => '1'], <add> ]; <add> $this->validate($data); <add> <add> $data = [ <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> 'Test' => ['test' => '2'], <add> ]; <add> $this->validate($data); <add> } <add> <add> /** <add> * testValidateUrlAsHashInput method <add> * <add> * Test validate uses here() as a hash input. <add> * <add> * @return void <add> */ <add> public function testValidateUrlAsHashInput(): void <add> { <add> $fields = 'de2ca3670dd06c29558dd98482c8739e86da2c7c%3A'; <add> $unlocked = ''; <add> $debug = urlencode(json_encode([ <add> 'another-url', <add> ['Model.username', 'Model.password'], <add> [], <add> ])); <add> <add> $data = [ <add> 'Model' => ['username' => '', 'password' => ''], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> $this->validate($data); <add> <add> $this->url = '/posts/index?page=1'; <add> $this->validate( <add> $data, <add> 'URL mismatch in POST data (expected \'another-url\' but found \'/posts/index?page=1\')' <add> ); <add> <add> $this->url = '/posts/edit/1'; <add> $this->validate( <add> $data, <add> 'URL mismatch in POST data (expected \'another-url\' but found \'/posts/edit/1\')' <add> ); <add> } <add> <add> /** <add> * testValidateDebugFormat method <add> * <add> * Test that debug token format is right. <add> * <add> * @return void <add> */ <add> public function testValidateDebugFormat(): void <add> { <add> $unlocked = 'Model.username'; <add> $fields = ['Model.hidden', 'Model.password']; <add> $fields = urlencode(Security::hash(serialize($fields) . $unlocked . Security::getSalt())); <add> $debug = urlencode(json_encode([ <add> '/articles/index', <add> ['Model.hidden', 'Model.password'], <add> ['Model.username'], <add> ['not expected'], <add> ])); <add> <add> $data = [ <add> 'Model' => [ <add> 'username' => 'mark', <add> 'password' => 'sekret', <add> 'hidden' => '0', <add> ], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> <add> $this->validate($data, 'Invalid form protection debug token.'); <add> <add> $debug = urlencode(json_encode('not an array')); <add> $this->validate($data, 'Invalid form protection debug token.'); <add> } <add> <add> /** <add> * testValidateFailTampering method <add> * <add> * Test that validate fails with tampered fields and explanation. <add> * <add> * @return void <add> */ <add> public function testValidateFailTampering(): void <add> { <add> $unlocked = ''; <add> $fields = ['Model.hidden' => 'value', 'Model.id' => '1']; <add> $debug = urlencode(json_encode([ <add> '/articles/index', <add> $fields, <add> [], <add> ])); <add> $fields = urlencode(Security::hash(serialize($fields) . $unlocked . Security::getSalt())); <add> $fields .= urlencode(':Model.hidden|Model.id'); <add> $data = [ <add> 'Model' => [ <add> 'hidden' => 'tampered', <add> 'id' => '1', <add> ], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> <add> $this->validate($data, 'Tampered field \'Model.hidden\' in POST data (expected value \'value\' but found \'tampered\')'); <add> } <add> <add> /** <add> * testValidateFailTamperingMutatedIntoArray method <add> * <add> * Test that validate fails with tampered fields and explanation. <add> * <add> * @return void <add> */ <add> public function testValidateFailTamperingMutatedIntoArray(): void <add> { <add> $unlocked = ''; <add> $fields = ['Model.hidden' => 'value', 'Model.id' => '1']; <add> $debug = urlencode(json_encode([ <add> '/articles/index', <add> $fields, <add> [], <add> ])); <add> $fields = urlencode(Security::hash(serialize($fields) . $unlocked . Security::getSalt())); <add> $fields .= urlencode(':Model.hidden|Model.id'); <add> $data = [ <add> 'Model' => [ <add> 'hidden' => ['some-key' => 'some-value'], <add> 'id' => '1', <add> ], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> <add> $this->validate( <add> $data, <add> 'Unexpected field \'Model.hidden.some-key\' in POST data, Missing field \'Model.hidden\' in POST data' <add> ); <add> } <add> <add> /** <add> * testValidateUnexpectedDebugToken method <add> * <add> * Test that debug token should not be sent if debug is disabled. <add> * <add> * @return void <add> */ <add> public function testValidateUnexpectedDebugToken(): void <add> { <add> $unlocked = ''; <add> $fields = ['Model.hidden' => 'value', 'Model.id' => '1']; <add> $debug = urlencode(json_encode([ <add> '/articles/index', <add> $fields, <add> [], <add> ])); <add> $fields = urlencode(Security::hash(serialize($fields) . $unlocked . Security::getSalt())); <add> $fields .= urlencode(':Model.hidden|Model.id'); <add> $data = [ <add> 'Model' => [ <add> 'hidden' => ['some-key' => 'some-value'], <add> 'id' => '1', <add> ], <add> '_Token' => compact('fields', 'unlocked', 'debug'), <add> ]; <add> Configure::write('debug', false); <add> $this->validate($data, 'Unexpected \'_Token.debug\' found in request data'); <add> } <add>}
2
Python
Python
add dataflow operations to azure datafactory hook
24d88e8feedcb11edc316f0d3f20f4ea54dc23b8
<ide><path>airflow/providers/microsoft/azure/hooks/data_factory.py <ide> PipelineRun <ide> TriggerResource <ide> datafactory <add> DataFlow <ide> mgmt <ide> """ <ide> from __future__ import annotations <ide> from azure.mgmt.datafactory import DataFactoryManagementClient <ide> from azure.mgmt.datafactory.models import ( <ide> CreateRunResponse, <add> DataFlow, <ide> DatasetResource, <ide> Factory, <ide> LinkedServiceResource, <ide> def delete_dataset( <ide> Delete the dataset. <ide> <ide> :param dataset_name: The dataset name. <del> :param resource_group_name: The dataset name. <add> :param resource_group_name: The resource group name. <ide> :param factory_name: The factory name. <ide> :param config: Extra parameters for the ADF client. <ide> """ <ide> self.get_conn().datasets.delete(resource_group_name, factory_name, dataset_name, **config) <ide> <add> @provide_targeted_factory <add> def get_dataflow( <add> self, <add> dataflow_name: str, <add> resource_group_name: str | None = None, <add> factory_name: str | None = None, <add> **config: Any, <add> ) -> DataFlow: <add> """ <add> Get the dataflow. <add> <add> :param dataflow_name: The dataflow name. <add> :param resource_group_name: The resource group name. <add> :param factory_name: The factory name. <add> :param config: Extra parameters for the ADF client. <add> :return: The dataflow. <add> """ <add> return self.get_conn().data_flows.get(resource_group_name, factory_name, dataflow_name, **config) <add> <add> def _dataflow_exists( <add> self, <add> dataflow_name: str, <add> resource_group_name: str | None = None, <add> factory_name: str | None = None, <add> ) -> bool: <add> """Return whether the dataflow already exists.""" <add> dataflows = { <add> dataflow.name <add> for dataflow in self.get_conn().data_flows.list_by_factory(resource_group_name, factory_name) <add> } <add> <add> return dataflow_name in dataflows <add> <add> @provide_targeted_factory <add> def update_dataflow( <add> self, <add> dataflow_name: str, <add> dataflow: DataFlow, <add> resource_group_name: str | None = None, <add> factory_name: str | None = None, <add> **config: Any, <add> ) -> DataFlow: <add> """ <add> Update the dataflow. <add> <add> :param dataflow_name: The dataflow name. <add> :param dataflow: The dataflow resource definition. <add> :param resource_group_name: The resource group name. <add> :param factory_name: The factory name. <add> :param config: Extra parameters for the ADF client. <add> :raise AirflowException: If the dataset does not exist. <add> :return: The dataflow. <add> """ <add> if not self._dataflow_exists( <add> dataflow_name, <add> resource_group_name, <add> factory_name, <add> ): <add> raise AirflowException(f"Dataflow {dataflow_name!r} does not exist.") <add> <add> return self.get_conn().data_flows.create_or_update( <add> resource_group_name, factory_name, dataflow_name, dataflow, **config <add> ) <add> <add> @provide_targeted_factory <add> def create_dataflow( <add> self, <add> dataflow_name: str, <add> dataflow: DataFlow, <add> resource_group_name: str | None = None, <add> factory_name: str | None = None, <add> **config: Any, <add> ) -> DataFlow: <add> """ <add> Create the dataflow. <add> <add> :param dataflow_name: The dataflow name. <add> :param dataflow: The dataflow resource definition. <add> :param resource_group_name: The resource group name. <add> :param factory_name: The factory name. <add> :param config: Extra parameters for the ADF client. <add> :raise AirflowException: If the dataset already exists. <add> :return: The dataset. <add> """ <add> if self._dataflow_exists(dataflow_name, resource_group_name, factory_name): <add> raise AirflowException(f"Dataflow {dataflow_name!r} already exists.") <add> <add> return self.get_conn().data_flows.create_or_update( <add> resource_group_name, factory_name, dataflow_name, dataflow, **config <add> ) <add> <add> @provide_targeted_factory <add> def delete_dataflow( <add> self, <add> dataflow_name: str, <add> resource_group_name: str | None = None, <add> factory_name: str | None = None, <add> **config: Any, <add> ) -> None: <add> """ <add> Delete the dataflow. <add> <add> :param dataflow_name: The dataflow name. <add> :param resource_group_name: The resource group name. <add> :param factory_name: The factory name. <add> :param config: Extra parameters for the ADF client. <add> """ <add> self.get_conn().data_flows.delete(resource_group_name, factory_name, dataflow_name, **config) <add> <ide> @provide_targeted_factory <ide> def get_pipeline( <ide> self, <ide><path>tests/providers/microsoft/azure/hooks/test_azure_data_factory.py <ide> def hook(): <ide> "pipeline_runs", <ide> "triggers", <ide> "trigger_runs", <add> "data_flows", <ide> ] <ide> ) <ide> <ide> def test_delete_dataset(hook: AzureDataFactoryHook, user_args, sdk_args): <ide> hook._conn.datasets.delete.assert_called_with(*sdk_args) <ide> <ide> <add>@parametrize( <add> explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), <add> implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), <add>) <add>def test_get_dataflow(hook: AzureDataFactoryHook, user_args, sdk_args): <add> hook.get_dataflow(*user_args) <add> <add> hook._conn.data_flows.get.assert_called_with(*sdk_args) <add> <add> <add>@parametrize( <add> explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), <add> implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), <add>) <add>def test_create_dataflow(hook: AzureDataFactoryHook, user_args, sdk_args): <add> hook.create_dataflow(*user_args) <add> <add> hook._conn.data_flows.create_or_update.assert_called_with(*sdk_args) <add> <add> <add>@parametrize( <add> explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), <add> implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), <add>) <add>def test_update_dataflow(hook: AzureDataFactoryHook, user_args, sdk_args): <add> with patch.object(hook, "_dataflow_exists") as mock_dataflow_exists: <add> mock_dataflow_exists.return_value = True <add> hook.update_dataflow(*user_args) <add> <add> hook._conn.data_flows.create_or_update.assert_called_with(*sdk_args) <add> <add> <add>@parametrize( <add> explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), <add> implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), <add>) <add>def test_update_dataflow_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args): <add> with patch.object(hook, "_dataflow_exists") as mock_dataflow_exists: <add> mock_dataflow_exists.return_value = False <add> <add> with pytest.raises(AirflowException, match=r"Dataflow .+ does not exist"): <add> hook.update_dataflow(*user_args) <add> <add> <add>@parametrize( <add> explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), <add> implicit_factory=( <add> (NAME,), <add> ( <add> DEFAULT_RESOURCE_GROUP, <add> DEFAULT_FACTORY, <add> NAME, <add> ), <add> ), <add>) <add>def test_delete_dataflow(hook: AzureDataFactoryHook, user_args, sdk_args): <add> hook.delete_dataflow(*user_args) <add> <add> hook._conn.data_flows.delete.assert_called_with(*sdk_args) <add> <add> <ide> @parametrize( <ide> explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), <ide> implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)),
2
Python
Python
add lots various extra variable to the ec2 driver
b871ac8ce741f0f47b55cce66dabc0a35421804a
<ide><path>libcloud/drivers/ec2.py <ide> def _fixxpath(self, xpath): <ide> def _findattr(self, element, xpath): <ide> return element.findtext(self._fixxpath(xpath)) <ide> <add> def _findall(self, element, xpath): <add> return element.findall(self._fixxpath(xpath)) <add> <ide> def _pathlist(self, key, arr): <ide> """Converts a key and an array of values into AWS query param <ide> format.""" <ide> def _to_node(self, element): <ide> state=state, <ide> public_ip=[self._findtext(element, 'dnsName')], <ide> private_ip=[self._findtext(element, 'privateDnsName')], <del> driver=self.connection.driver) <add> driver=self.connection.driver, <add> extra = { <add> 'dns_name': self._findattr(element, "dnsName"), <add> 'instanceId': self._findattr(element, "instanceId"), <add> 'imageId': self._findattr(element, "imageId"), <add> 'private_dns': self._findattr(element, "privateDnsName"), <add> 'status': self._findattr(element, "instanceState/name"), <add> 'keyname': self._findattr(element, "keyName"), <add> 'launchindex': self._findattr(element, "amiLaunchIndex"), <add> 'productcode': [p.text for p in self._findall(element, "productCodesSet/item/productCode")], <add> 'instancetype': self._findattr(element, "instanceType"), <add> 'launchdatetime': self._findattr(element, "launchTime"), <add> 'availability': self._findattr(element, "placement/availabilityZone"), <add> 'kernelid': self._findattr(element, "kernelId"), <add> 'ramdiskid': self._findattr(element, "ramdiskId") <add> }) <ide> return n <ide> <ide> def _to_images(self, object):
1
Javascript
Javascript
increase time limit in qtest
af9c7ac3cee034dbc5542058c9dd3ff36610a924
<ide><path>tasks/qtest.js <ide> module.exports = function (grunt) { <ide> testrunner.options.log.tests = false; <ide> testrunner.options.log.summary = false; <ide> testrunner.options.log.testing = false; <del> testrunner.options.maxBlockDuration = 5000; <add> testrunner.options.maxBlockDuration = 10000; <ide> <ide> testrunner.run({ <ide> code: "build/umd/moment.js",
1
Python
Python
remove pos arg from perceiver's pre/postprocessors
408b5e307b495b2c14e9c83ff1c62b944ff366af
<ide><path>src/transformers/models/perceiver/modeling_perceiver.py <ide> def num_channels(self) -> int: <ide> <ide> return inp_dim + pos_dim <ide> <del> def _build_network_inputs(self, inputs: torch.Tensor, pos: torch.Tensor, network_input_is_1d: bool = True): <add> def _build_network_inputs(self, inputs: torch.Tensor, network_input_is_1d: bool = True): <ide> """ <ide> Construct the final input, including position encoding. <ide> <ide> def forward(self, inputs: torch.Tensor, pos: Optional[torch.Tensor] = None, netw <ide> else: <ide> raise ValueError("Unsupported data format for conv1x1.") <ide> <del> inputs, inputs_without_pos = self._build_network_inputs(inputs, pos, network_input_is_1d) <add> inputs, inputs_without_pos = self._build_network_inputs(inputs, network_input_is_1d) <ide> modality_sizes = None # Size for each modality, only needed for multimodal <ide> <ide> return inputs, modality_sizes, inputs_without_pos <ide> def num_channels(self) -> int: <ide> return pos_dim <ide> return self.samples_per_patch + pos_dim <ide> <del> def _build_network_inputs(self, inputs, pos): <add> def _build_network_inputs(self, inputs): <ide> """Construct the final input, including position encoding.""" <ide> batch_size = inputs.shape[0] <ide> index_dims = inputs.shape[1:-1] <ide> def _build_network_inputs(self, inputs, pos): <ide> def forward(self, inputs: torch.Tensor, pos: Optional[torch.Tensor] = None, network_input_is_1d: bool = True): <ide> inputs = torch.reshape(inputs, [inputs.shape[0], -1, self.samples_per_patch]) <ide> <del> inputs, inputs_without_pos = self._build_network_inputs(inputs, pos) <add> inputs, inputs_without_pos = self._build_network_inputs(inputs) <ide> modality_sizes = None # Size for each modality, only needed for multimodal <ide> <ide> return inputs, modality_sizes, inputs_without_pos
1
Python
Python
update esmfold conversion script
6f257bb3c241eabf7eb970240a92fdbf6ff2de49
<ide><path>src/transformers/models/esm/convert_esm.py <ide> import torch <ide> <ide> import esm as esm_module <add>from esm.esmfold.v1.misc import batch_encode_sequences as esmfold_encode_sequences <ide> from esm.esmfold.v1.pretrained import esmfold_v1 <ide> from transformers.models.esm.configuration_esm import EsmConfig, EsmFoldConfig <ide> from transformers.models.esm.modeling_esm import ( <ide> logger = logging.get_logger(__name__) <ide> <ide> SAMPLE_DATA = [ <del> ("protein1", "MKTVRQERLKSIVRILERSKEPVSGAQLAEELSVSRQVIVQDIAYLRSLGYNIVATPRGYVLAGG"), <add> ( <add> "protein1", <add> "MNGTEGPNFYVPFSNATGVVRSPFEYPQYYLAEPWQFSMLAAYMFLLIVLGFPINFLTLYVTVQHKKLRTPLNYILLNLAVADLFMVLGGFTSTLYTSLHGYFVFGPTGCNLEGFFATLGGEIALWSLVVLAIERYVVVCKPMSNFRFGENHAIMGVAFTWVMALACAAPPLAGWSRYIPEGLQCSCGIDYYTLKPEVNNESFVIYMFVVHFTIPMIIIFFCYGQLVFTVKEAAAQQQESATTQKAEKEVTRMVIIMVIAFLICWVPYASVAFYIFTHQGSNFGPIFMTIPAFFAKSAAIYNPVIYIMMNKQFRNCMLTTICCGKNPLGDDEASATVSKTETSQVAPA", <add> ), <ide> ("protein2", "MKTVRQERLKSIVRILERSKEPVSGAQLAEELSVSRQVIVQDIAYLRSLGYNIVATPRGYVLA"), <ide> ("protein3", "MKTVRQERLKSI<mask>RILERSKEPVSGAQLAEELS<mask>SRQVIVQDIAYLRSLGYN<mask>VATPRGYVLAGG"), <ide> ("protein4", "MKTVRQERLKSI<mask>RILERSKEPVSGAQLAEELS<mask>SRQVIVQDIAYLRSLGYN<mask>VATPRGYVLA"), <ide> "esmfold_v1": esmfold_v1, <ide> } <ide> <add>restypes = list("ARNDCQEGHILKMFPSTWYV") <add> <add>restypes_with_x = restypes + ["X"] <add>restypes_with_extras = restypes_with_x + ["<pad>", "<mask>", "<cls>", "<sep>", "<eos>"] <add> <add> <add>def get_esmfold_tokenizer(): <add> with TemporaryDirectory() as tempdir: <add> vocab = "\n".join(restypes_with_extras) <add> vocab_file = Path(tempdir) / "vocab.txt" <add> vocab_file.write_text(vocab) <add> hf_tokenizer = EsmTokenizer(vocab_file=str(vocab_file)) <add> hf_tokenizer.pad_token_id = 0 # Overlaps with 'A' but that seems to be what they want <add> return hf_tokenizer <add> <ide> <ide> def transfer_and_check_weights(original_module, our_module): <ide> status = our_module.load_state_dict(original_module.state_dict()) <ide> def convert_esm_checkpoint_to_pytorch( <ide> """ <ide> if model.startswith("esmfold"): <ide> esm = MODEL_MAPPING[model]() <del> alphabet = esm.esm.alphabet <ide> else: <ide> esm, alphabet = MODEL_MAPPING[model]() <ide> esm.eval() # disable dropout <ide> def convert_esm_checkpoint_to_pytorch( <ide> is_folding_model = False <ide> esmfold_config = None <ide> <add> if is_folding_model: <add> alphabet = esm.esm.alphabet <ide> vocab_list = tuple(alphabet.all_toks) <add> mask_token_id = alphabet.mask_idx <add> pad_token_id = alphabet.padding_idx <ide> <ide> if is_folding_model: <ide> original_esm_model = esm.esm <ide> def convert_esm_checkpoint_to_pytorch( <ide> <ide> config = EsmConfig( <ide> vocab_size=original_esm_model.embed_tokens.num_embeddings, <del> mask_token_id=alphabet.mask_idx, <add> mask_token_id=mask_token_id, <ide> hidden_size=embed_dim, <ide> num_hidden_layers=num_layers, <ide> num_attention_heads=num_attention_heads, <ide> def convert_esm_checkpoint_to_pytorch( <ide> layer_norm_eps=1e-5, # PyTorch default used in fairseq <ide> attention_probs_dropout_prob=0.0, <ide> hidden_dropout_prob=0.0, <del> pad_token_id=alphabet.padding_idx, <add> pad_token_id=pad_token_id, <ide> emb_layer_norm_before=emb_layer_norm_before, <ide> token_dropout=token_dropout, <ide> position_embedding_type=position_embedding_type, <ide> def convert_esm_checkpoint_to_pytorch( <ide> <ide> if is_folding_model: <ide> model.esm_s_combine.data = esm.esm_s_combine.data <add> model.af2_to_esm.data = esm.af2_to_esm.data <ide> transfer_and_check_weights(esm.embedding, model.embedding) <ide> transfer_and_check_weights(esm.esm_s_mlp, model.esm_s_mlp) <ide> transfer_and_check_weights(esm.trunk, model.trunk) <ide> def convert_esm_checkpoint_to_pytorch( <ide> model.lm_head.decoder.weight = esm.lm_head.weight <ide> model.lm_head.bias = esm.lm_head.bias <ide> <del> # Let's check that we get the same results. <del> batch_converter = alphabet.get_batch_converter() <del> <ide> # Prepare data (first 2 sequences from ESMStructuralSplitDataset superfamily / 4) <ide> if is_folding_model: <ide> # Folding models aren't trained on masked inputs and don't like mask tokens. <ide> sample_data = SAMPLE_DATA[:2] <ide> else: <ide> sample_data = SAMPLE_DATA <ide> <del> batch_labels, batch_strs, batch_tokens = batch_converter(sample_data) <del> # Prepare tokenizer and make sure it matches <del> with TemporaryDirectory() as tempdir: <del> vocab = "\n".join(alphabet.all_toks) <del> vocab_file = Path(tempdir) / "vocab.txt" <del> vocab_file.write_text(vocab) <del> hf_tokenizer = EsmTokenizer(vocab_file=str(vocab_file)) <add> if is_folding_model: <add> hf_tokenizer = get_esmfold_tokenizer() <add> hf_tokens = hf_tokenizer( <add> [row[1] for row in sample_data], return_tensors="pt", padding=True, add_special_tokens=False <add> ) <add> esmfold_aas, esmfold_mask, _, _, _ = esmfold_encode_sequences([row[1] for row in sample_data]) <add> success = torch.all(hf_tokens["input_ids"] == esmfold_aas) and torch.all( <add> hf_tokens["attention_mask"] == esmfold_mask <add> ) <add> else: <add> # Let's check that we get the same results. <add> batch_converter = alphabet.get_batch_converter() <add> batch_labels, batch_strs, batch_tokens = batch_converter(sample_data) <add> # Prepare tokenizer and make sure it matches <add> with TemporaryDirectory() as tempdir: <add> vocab = "\n".join(alphabet.all_toks) <add> vocab_file = Path(tempdir) / "vocab.txt" <add> vocab_file.write_text(vocab) <add> hf_tokenizer = EsmTokenizer(vocab_file=str(vocab_file)) <add> <add> hf_tokens = hf_tokenizer([row[1] for row in sample_data], return_tensors="pt", padding=True) <add> success = torch.all(hf_tokens["input_ids"] == batch_tokens) <ide> <del> hf_tokens = hf_tokenizer([row[1] for row in sample_data], return_tensors="pt", padding=True) <del> success = torch.all(hf_tokens["input_ids"] == batch_tokens) <ide> print("Do both models tokenizers output the same tokens?", "🔥" if success else "💩") <ide> if not success: <ide> raise Exception("Tokenization does not match!") <ide> def convert_esm_checkpoint_to_pytorch( <ide> # that don't exist on CPU. Therefore, to test it we need to run it on GPU. However, <ide> # ESMFold is what we in the community call a "big boy" and so we desperately avoid putting both the <ide> # original and the converted model on the GPU at the same time. <add> their_output = esm.cuda().infer([row[1] for row in sample_data]) <ide> our_output = model.cuda()( <ide> input_ids=hf_tokens["input_ids"].cuda(), attention_mask=hf_tokens["attention_mask"].cuda() <ide> ) <del> their_output = esm.cuda()(hf_tokens["input_ids"].cuda(), hf_tokens["attention_mask"].cuda()) <ide> else: <ide> our_output = model(**hf_tokens, output_hidden_states=True) <ide> our_output = our_output["logits"] <ide> def convert_esm_checkpoint_to_pytorch( <ide> print(f"Saving model to {pytorch_dump_folder_path}") <ide> model.save_pretrained(pytorch_dump_folder_path) <ide> <del> reloaded = model_class.from_pretrained(pytorch_dump_folder_path).cuda() <del> reloaded_output = reloaded( <del> input_ids=hf_tokens["input_ids"].cuda(), attention_mask=hf_tokens["attention_mask"].cuda() <del> ) <del> <del> if is_folding_model: <del> max_absolute_diff = torch.max(torch.abs(our_output["positions"] - reloaded_output["positions"])).item() <del> success = torch.allclose(our_output["positions"], their_output["positions"], atol=1e-6) <del> else: <del> max_absolute_diff = torch.max(torch.abs(our_output - reloaded_output["logits"])).item() <del> success = torch.allclose(our_output, reloaded_output["logits"], atol=1e-6) <del> <del> print(f"max_absolute_diff = {max_absolute_diff}") <del> print("Does the model output the same tensors after reloading?", "🔥" if success else "💩") <del> <del> if not success: <del> raise Exception("Something went wRoNg") <add> del esm # Free up some memory before continuing <ide> <ide> print(f"Saving tokenizer to {pytorch_dump_folder_path}") <ide> hf_tokenizer.save_pretrained(pytorch_dump_folder_path) <ide><path>src/transformers/models/esm/modeling_esmfold.py <ide> def forward( <ide> <ide> >>> model = EsmForProteinFolding.from_pretrained("facebook/esmfold_v1") <ide> >>> tokenizer = AutoTokenizer.from_pretrained("facebook/esmfold_v1") <del> >>> inputs = tokenizer(["MLKNVQVQLV"], return_tensors="pt") # A tiny random peptide <add> >>> inputs = tokenizer(["MLKNVQVQLV"], return_tensors="pt", add_special_tokens=False) # A tiny random peptide <ide> >>> outputs = model(**inputs) <ide> >>> folded_positions = outputs.positions <ide> ```
2
Java
Java
add onclosed to sockjssessionsupport sub-classes
2794224b28430190c53c7cd284ccfe33c8eb5d1c
<ide><path>spring-websocket/src/main/java/org/springframework/sockjs/SockJsSession.java <ide> */ <ide> public interface SockJsSession { <ide> <add> String getId(); <add> <ide> void sendMessage(String text) throws IOException; <ide> <ide> void close(); <ide><path>spring-websocket/src/main/java/org/springframework/sockjs/SockJsSessionSupport.java <ide> public void delegateException(Throwable ex) { <ide> this.sockJsHandler.handleException(this, ex); <ide> } <ide> <add> public void connectionClosed() { <add> this.state = State.CLOSED; <add> this.sockJsHandler.sessionClosed(this); <add> } <add> <ide> public void close() { <ide> this.state = State.CLOSED; <ide> this.sockJsHandler.sessionClosed(this); <ide><path>spring-websocket/src/main/java/org/springframework/sockjs/server/AbstractServerSession.java <ide> public final synchronized void sendMessage(String message) throws IOException { <ide> <ide> protected abstract void sendMessageInternal(String message) throws IOException; <ide> <add> <add> @Override <add> public void connectionClosed() { <add> logger.debug("Session closed"); <add> super.close(); <add> cancelHeartbeat(); <add> } <add> <add> @Override <ide> public final synchronized void close() { <ide> if (!isClosed()) { <ide> logger.debug("Closing session"); <del> <ide> if (isActive()) { <ide> // deliver messages "in flight" before sending close frame <ide> try { <ide> public final synchronized void close() { <ide> // ignore <ide> } <ide> } <del> <ide> super.close(); <del> <ide> cancelHeartbeat(); <ide> closeInternal(); <ide> } <ide><path>spring-websocket/src/main/java/org/springframework/sockjs/server/AbstractSockJsService.java <ide> import org.springframework.util.DigestUtils; <ide> import org.springframework.util.ObjectUtils; <ide> import org.springframework.util.StringUtils; <del>import org.springframework.web.util.UriUtils; <ide> <ide> <ide> /** <ide> private static final int ONE_YEAR = 365 * 24 * 60 * 60; <ide> <ide> <del> private final String prefix; <add> private String name = getClass().getSimpleName() + "@" + Integer.toHexString(hashCode()); <ide> <ide> private String clientLibraryUrl = "https://d1fxtkz8shb9d2.cloudfront.net/sockjs-0.3.4.min.js"; <ide> <ide> private final TaskSchedulerHolder heartbeatSchedulerHolder; <ide> <ide> <del> /** <del> * Class constructor... <del> * <del> * @param prefix the path prefix for the SockJS service. All requests with a path <del> * that begins with the specified prefix will be handled by this service. In a <del> * Servlet container this is the path within the current servlet mapping. <del> */ <del> public AbstractSockJsService(String prefix) { <del> Assert.hasText(prefix, "prefix is required"); <del> this.prefix = prefix; <add> <add> public AbstractSockJsService() { <ide> this.heartbeatSchedulerHolder = new TaskSchedulerHolder("SockJs-heartbeat-"); <ide> } <ide> <del> public AbstractSockJsService(String prefix, TaskScheduler heartbeatScheduler) { <del> Assert.hasText(prefix, "prefix is required"); <add> public AbstractSockJsService(TaskScheduler heartbeatScheduler) { <ide> Assert.notNull(heartbeatScheduler, "heartbeatScheduler is required"); <del> this.prefix = prefix; <ide> this.heartbeatSchedulerHolder = new TaskSchedulerHolder(heartbeatScheduler); <ide> } <ide> <ide> /** <del> * The path prefix to which the SockJS service is mapped. <add> * A unique name for the service mainly for logging purposes. <ide> */ <del> public String getPrefix() { <del> return this.prefix; <add> public void setName(String name) { <add> this.name = name; <add> } <add> <add> public String getName() { <add> return this.name; <ide> } <ide> <ide> /** <ide> public final void handleRequest(ServerHttpRequest request, ServerHttpResponse re <ide> // Ignore invalid Content-Type (TODO) <ide> } <ide> <del> String path = UriUtils.decode(request.getURI().getPath(), "URF-8"); <del> int index = path.indexOf(this.prefix); <del> sockJsPath = path.substring(index + this.prefix.length()); <del> <ide> try { <ide> if (sockJsPath.equals("") || sockJsPath.equals("/")) { <ide> response.getHeaders().setContentType(new MediaType("text", "plain", Charset.forName("UTF-8"))); <ide><path>spring-websocket/src/main/java/org/springframework/sockjs/server/SockJsService.java <ide> */ <ide> public interface SockJsService { <ide> <del> String getPrefix(); <del> <ide> /** <ide> * Pre-register {@link SockJsHandler} instances so they can be adapted to <ide> * {@link WebSocketHandler} and hence re-used at runtime when <ide><path>spring-websocket/src/main/java/org/springframework/sockjs/server/support/DefaultSockJsService.java <ide> public class DefaultSockJsService extends AbstractSockJsService implements Initi <ide> private final Map<SockJsHandler, WebSocketHandler> sockJsHandlers = new HashMap<SockJsHandler, WebSocketHandler>(); <ide> <ide> <del> public DefaultSockJsService(String prefix) { <del> super(prefix); <add> public DefaultSockJsService() { <ide> this.sessionTimeoutSchedulerHolder = new TaskSchedulerHolder("SockJs-sessionTimeout-"); <ide> } <ide> <del> public DefaultSockJsService(String prefix, TaskScheduler heartbeatScheduler, TaskScheduler sessionTimeoutScheduler) { <del> super(prefix, heartbeatScheduler); <add> public DefaultSockJsService(TaskScheduler heartbeatScheduler, TaskScheduler sessionTimeoutScheduler) { <ide> Assert.notNull(sessionTimeoutScheduler, "sessionTimeoutScheduler is required"); <ide> this.sessionTimeoutSchedulerHolder = new TaskSchedulerHolder(sessionTimeoutScheduler); <ide> } <ide> public void run() { <ide> try { <ide> int count = sessions.size(); <ide> if (logger.isTraceEnabled() && (count != 0)) { <del> logger.trace("Checking " + count + " session(s) for timeouts [" + getPrefix() + "]"); <add> logger.trace("Checking " + count + " session(s) for timeouts [" + getName() + "]"); <ide> } <ide> for (SockJsSessionSupport session : sessions.values()) { <ide> if (session.getTimeSinceLastActive() > getDisconnectDelay()) { <ide> if (logger.isTraceEnabled()) { <del> logger.trace("Removing " + session + " for [" + getPrefix() + "]"); <add> logger.trace("Removing " + session + " for [" + getName() + "]"); <ide> } <ide> session.close(); <ide> sessions.remove(session.getId()); <ide> } <ide> } <ide> if (logger.isTraceEnabled() && (count != 0)) { <del> logger.trace(sessions.size() + " remaining session(s) [" + getPrefix() + "]"); <add> logger.trace(sessions.size() + " remaining session(s) [" + getName() + "]"); <ide> } <ide> } <ide> catch (Throwable t) { <del> logger.error("Failed to complete session timeout checks for [" + getPrefix() + "]", t); <add> logger.error("Failed to complete session timeout checks for [" + getName() + "]", t); <ide> } <ide> } <ide> }, getDisconnectDelay()); <ide><path>spring-websocket/src/main/java/org/springframework/sockjs/server/support/SockJsHttpRequestHandler.java <ide> */ <ide> public class SockJsHttpRequestHandler implements HttpRequestHandler, BeanFactoryAware { <ide> <add> private final String prefix; <add> <ide> private final SockJsService sockJsService; <ide> <ide> private final HandlerProvider<SockJsHandler> handlerProvider; <ide> <ide> private final UrlPathHelper urlPathHelper = new UrlPathHelper(); <ide> <ide> <del> public SockJsHttpRequestHandler(SockJsService sockJsService, SockJsHandler sockJsHandler) { <add> /** <add> * Class constructor with {@link SockJsHandler} instance ... <add> * <add> * @param prefix the path prefix for the SockJS service. All requests with a path <add> * that begins with the specified prefix will be handled by this service. In a <add> * Servlet container this is the path within the current servlet mapping. <add> */ <add> public SockJsHttpRequestHandler(String prefix, SockJsService sockJsService, SockJsHandler sockJsHandler) { <add> <add> Assert.hasText(prefix, "prefix is required"); <ide> Assert.notNull(sockJsService, "sockJsService is required"); <ide> Assert.notNull(sockJsHandler, "sockJsHandler is required"); <add> <add> this.prefix = prefix; <ide> this.sockJsService = sockJsService; <ide> this.sockJsService.registerSockJsHandlers(Collections.singleton(sockJsHandler)); <ide> this.handlerProvider = new HandlerProvider<SockJsHandler>(sockJsHandler); <ide> } <ide> <del> public SockJsHttpRequestHandler(SockJsService sockJsService, Class<? extends SockJsHandler> sockJsHandlerClass) { <add> /** <add> * Class constructor with {@link SockJsHandler} type (per request) ... <add> * <add> * @param prefix the path prefix for the SockJS service. All requests with a path <add> * that begins with the specified prefix will be handled by this service. In a <add> * Servlet container this is the path within the current servlet mapping. <add> */ <add> public SockJsHttpRequestHandler(String prefix, SockJsService sockJsService, <add> Class<? extends SockJsHandler> sockJsHandlerClass) { <add> <add> Assert.hasText(prefix, "prefix is required"); <ide> Assert.notNull(sockJsService, "sockJsService is required"); <ide> Assert.notNull(sockJsHandlerClass, "sockJsHandlerClass is required"); <add> <add> this.prefix = prefix; <ide> this.sockJsService = sockJsService; <ide> this.handlerProvider = new HandlerProvider<SockJsHandler>(sockJsHandlerClass); <ide> } <ide> <del> public String getMappingPattern() { <del> return this.sockJsService.getPrefix() + "/**"; <add> public String getPrefix() { <add> return this.prefix; <add> } <add> <add> public String getPattern() { <add> return this.prefix + "/**"; <ide> } <ide> <ide> @Override <ide> public void handleRequest(HttpServletRequest request, HttpServletResponse respon <ide> throws ServletException, IOException { <ide> <ide> String lookupPath = this.urlPathHelper.getLookupPathForRequest(request); <del> String prefix = this.sockJsService.getPrefix(); <ide> <del> Assert.isTrue(lookupPath.startsWith(prefix), <del> "Request path does not match the prefix of the SockJsService " + prefix); <add> Assert.isTrue(lookupPath.startsWith(this.prefix), <add> "Request path does not match the prefix of the SockJsService " + this.prefix); <ide> <ide> String sockJsPath = lookupPath.substring(prefix.length()); <ide> <ide><path>spring-websocket/src/main/java/org/springframework/sockjs/server/transport/AbstractHttpServerSession.java <ide> private void tryFlushCache() throws IOException { <ide> */ <ide> protected abstract void flushCache() throws IOException; <ide> <add> @Override <add> public void connectionClosed() { <add> super.connectionClosed(); <add> resetRequest(); <add> } <add> <add> @Override <ide> protected void closeInternal() { <ide> resetRequest(); <ide> } <ide><path>spring-websocket/src/main/java/org/springframework/sockjs/server/transport/AbstractSockJsWebSocketHandler.java <del>/* <del> * Copyright 2002-2013 the original author or authors. <del> * <del> * Licensed under the Apache License, Version 2.0 (the "License"); <del> * you may not use this file except in compliance with the License. <del> * You may obtain a copy of the License at <del> * <del> * http://www.apache.org/licenses/LICENSE-2.0 <del> * <del> * Unless required by applicable law or agreed to in writing, software <del> * distributed under the License is distributed on an "AS IS" BASIS, <del> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <del> * See the License for the specific language governing permissions and <del> * limitations under the License. <del> */ <del> <del>package org.springframework.sockjs.server.transport; <del> <del>import java.io.InputStream; <del>import java.util.Map; <del>import java.util.concurrent.ConcurrentHashMap; <del> <del>import org.apache.commons.logging.Log; <del>import org.apache.commons.logging.LogFactory; <del>import org.springframework.sockjs.SockJsHandler; <del>import org.springframework.sockjs.SockJsSessionSupport; <del>import org.springframework.sockjs.server.SockJsConfiguration; <del>import org.springframework.util.Assert; <del>import org.springframework.websocket.WebSocketHandler; <del>import org.springframework.websocket.WebSocketSession; <del> <del> <del>/** <del> * <del> * @author Rossen Stoyanchev <del> * @since 4.0 <del> */ <del>public abstract class AbstractSockJsWebSocketHandler implements WebSocketHandler { <del> <del> protected final Log logger = LogFactory.getLog(getClass()); <del> <del> private final SockJsConfiguration sockJsConfig; <del> <del> private final SockJsHandler sockJsHandler; <del> <del> private final Map<WebSocketSession, SockJsSessionSupport> sessions = <del> new ConcurrentHashMap<WebSocketSession, SockJsSessionSupport>(); <del> <del> <del> public AbstractSockJsWebSocketHandler(SockJsConfiguration sockJsConfig, SockJsHandler sockJsHandler) { <del> Assert.notNull(sockJsConfig, "sockJsConfig is required"); <del> Assert.notNull(sockJsHandler, "sockJsHandler is required"); <del> this.sockJsConfig = sockJsConfig; <del> this.sockJsHandler = sockJsHandler; <del> } <del> <del> protected SockJsConfiguration getSockJsConfig() { <del> return this.sockJsConfig; <del> } <del> <del> protected SockJsHandler getSockJsHandler() { <del> return this.sockJsHandler; <del> } <del> <del> protected SockJsSessionSupport getSockJsSession(WebSocketSession wsSession) { <del> return this.sessions.get(wsSession); <del> } <del> <del> @Override <del> public void newSession(WebSocketSession wsSession) throws Exception { <del> if (logger.isDebugEnabled()) { <del> logger.debug("New session: " + wsSession); <del> } <del> SockJsSessionSupport session = createSockJsSession(wsSession); <del> this.sessions.put(wsSession, session); <del> } <del> <del> protected abstract SockJsSessionSupport createSockJsSession(WebSocketSession wsSession) throws Exception; <del> <del> @Override <del> public void handleTextMessage(WebSocketSession wsSession, String message) throws Exception { <del> if (logger.isTraceEnabled()) { <del> logger.trace("Received payload " + message); <del> } <del> SockJsSessionSupport session = getSockJsSession(wsSession); <del> session.delegateMessages(message); <del> } <del> <del> @Override <del> public void handleBinaryMessage(WebSocketSession session, InputStream message) throws Exception { <del> // should not happen <del> throw new UnsupportedOperationException(); <del> } <del> <del> @Override <del> public void handleException(WebSocketSession webSocketSession, Throwable exception) { <del> SockJsSessionSupport session = getSockJsSession(webSocketSession); <del> session.delegateException(exception); <del> } <del> <del> @Override <del> public void sessionClosed(WebSocketSession webSocketSession, int statusCode, String reason) throws Exception { <del> logger.debug("WebSocket connection closed " + webSocketSession); <del> SockJsSessionSupport session = this.sessions.remove(webSocketSession); <del> session.close(); <del> } <del> <del>} <ide><path>spring-websocket/src/main/java/org/springframework/sockjs/server/transport/SockJsWebSocketHandler.java <ide> package org.springframework.sockjs.server.transport; <ide> <ide> import java.io.IOException; <add>import java.io.InputStream; <add>import java.util.Map; <add>import java.util.concurrent.ConcurrentHashMap; <ide> <add>import org.apache.commons.logging.Log; <add>import org.apache.commons.logging.LogFactory; <ide> import org.springframework.sockjs.SockJsHandler; <ide> import org.springframework.sockjs.SockJsSessionSupport; <ide> import org.springframework.sockjs.server.AbstractServerSession; <ide> import org.springframework.sockjs.server.SockJsConfiguration; <ide> import org.springframework.sockjs.server.SockJsFrame; <add>import org.springframework.util.Assert; <ide> import org.springframework.util.StringUtils; <ide> import org.springframework.websocket.WebSocketHandler; <ide> import org.springframework.websocket.WebSocketSession; <ide> * @author Rossen Stoyanchev <ide> * @since 4.0 <ide> */ <del>public class SockJsWebSocketHandler extends AbstractSockJsWebSocketHandler { <add>public class SockJsWebSocketHandler implements WebSocketHandler { <add> <add> private static final Log logger = LogFactory.getLog(SockJsWebSocketHandler.class); <add> <add> private final SockJsConfiguration sockJsConfig; <add> <add> private final SockJsHandler sockJsHandler; <add> <add> private final Map<WebSocketSession, SockJsSessionSupport> sessions = <add> new ConcurrentHashMap<WebSocketSession, SockJsSessionSupport>(); <ide> <ide> // TODO: JSON library used must be configurable <ide> private final ObjectMapper objectMapper = new ObjectMapper(); <ide> <ide> <ide> public SockJsWebSocketHandler(SockJsConfiguration sockJsConfig, SockJsHandler sockJsHandler) { <del> super(sockJsConfig, sockJsHandler); <add> Assert.notNull(sockJsConfig, "sockJsConfig is required"); <add> Assert.notNull(sockJsHandler, "sockJsHandler is required"); <add> this.sockJsConfig = sockJsConfig; <add> this.sockJsHandler = sockJsHandler; <add> } <add> <add> protected SockJsConfiguration getSockJsConfig() { <add> return this.sockJsConfig; <add> } <add> <add> protected SockJsHandler getSockJsHandler() { <add> return this.sockJsHandler; <add> } <add> <add> protected SockJsSessionSupport getSockJsSession(WebSocketSession wsSession) { <add> return this.sessions.get(wsSession); <ide> } <ide> <ide> @Override <del> protected SockJsSessionSupport createSockJsSession(WebSocketSession wsSession) throws Exception { <del> return new WebSocketServerSession(wsSession, getSockJsConfig()); <add> public void newSession(WebSocketSession wsSession) throws Exception { <add> if (logger.isDebugEnabled()) { <add> logger.debug("New session: " + wsSession); <add> } <add> SockJsSessionSupport session = new WebSocketServerSession(wsSession, getSockJsConfig()); <add> this.sessions.put(wsSession, session); <ide> } <ide> <ide> @Override <ide> public void handleTextMessage(WebSocketSession wsSession, String message) throws <ide> } <ide> } <ide> <add> @Override <add> public void handleBinaryMessage(WebSocketSession session, InputStream message) throws Exception { <add> // should not happen <add> throw new UnsupportedOperationException(); <add> } <add> <add> @Override <add> public void handleException(WebSocketSession webSocketSession, Throwable exception) { <add> SockJsSessionSupport session = getSockJsSession(webSocketSession); <add> session.delegateException(exception); <add> } <add> <add> @Override <add> public void sessionClosed(WebSocketSession webSocketSession, int statusCode, String reason) throws Exception { <add> logger.debug("WebSocket session closed " + webSocketSession); <add> SockJsSessionSupport session = this.sessions.remove(webSocketSession); <add> session.connectionClosed(); <add> } <add> <ide> <ide> private class WebSocketServerSession extends AbstractServerSession { <ide> <ide> protected void writeFrameInternal(SockJsFrame frame) throws IOException { <ide> } <ide> <ide> @Override <del> public void closeInternal() { <del> this.webSocketSession.close(); <add> public void connectionClosed() { <add> super.connectionClosed(); <ide> this.webSocketSession = null; <add> } <add> <add> @Override <add> public void closeInternal() { <add> deactivate(); <ide> updateLastActiveTime(); <ide> } <ide> <ide> @Override <ide> protected void deactivate() { <del> this.webSocketSession.close(); <add> if (this.webSocketSession != null) { <add> this.webSocketSession.close(); <add> this.webSocketSession = null; <add> } <ide> } <ide> } <ide> <ide><path>spring-websocket/src/main/java/org/springframework/sockjs/server/transport/WebSocketSockJsHandlerAdapter.java <ide> package org.springframework.sockjs.server.transport; <ide> <ide> import java.io.IOException; <add>import java.io.InputStream; <add>import java.util.Map; <add>import java.util.concurrent.ConcurrentHashMap; <ide> <add>import org.apache.commons.logging.Log; <add>import org.apache.commons.logging.LogFactory; <ide> import org.springframework.sockjs.SockJsHandler; <ide> import org.springframework.sockjs.SockJsSessionSupport; <ide> import org.springframework.sockjs.server.SockJsConfiguration; <add>import org.springframework.util.Assert; <ide> import org.springframework.websocket.WebSocketHandler; <ide> import org.springframework.websocket.WebSocketSession; <ide> <ide> * @author Rossen Stoyanchev <ide> * @since 4.0 <ide> */ <del>public class WebSocketSockJsHandlerAdapter extends AbstractSockJsWebSocketHandler { <add>public class WebSocketSockJsHandlerAdapter implements WebSocketHandler { <add> <add> private static final Log logger = LogFactory.getLog(WebSocketSockJsHandlerAdapter.class); <add> <add> private final SockJsConfiguration sockJsConfig; <add> <add> private final SockJsHandler sockJsHandler; <add> <add> private final Map<WebSocketSession, SockJsSessionSupport> sessions = <add> new ConcurrentHashMap<WebSocketSession, SockJsSessionSupport>(); <ide> <ide> <ide> public WebSocketSockJsHandlerAdapter(SockJsConfiguration sockJsConfig, SockJsHandler sockJsHandler) { <del> super(sockJsConfig, sockJsHandler); <add> Assert.notNull(sockJsConfig, "sockJsConfig is required"); <add> Assert.notNull(sockJsHandler, "sockJsHandler is required"); <add> this.sockJsConfig = sockJsConfig; <add> this.sockJsHandler = sockJsHandler; <add> } <add> <add> protected SockJsConfiguration getSockJsConfig() { <add> return this.sockJsConfig; <add> } <add> <add> protected SockJsHandler getSockJsHandler() { <add> return this.sockJsHandler; <add> } <add> <add> protected SockJsSessionSupport getSockJsSession(WebSocketSession wsSession) { <add> return this.sessions.get(wsSession); <add> } <add> <add> @Override <add> public void newSession(WebSocketSession wsSession) throws Exception { <add> if (logger.isDebugEnabled()) { <add> logger.debug("New session: " + wsSession); <add> } <add> SockJsSessionSupport session = new WebSocketSessionAdapter(wsSession); <add> this.sessions.put(wsSession, session); <ide> } <ide> <ide> @Override <del> protected SockJsSessionSupport createSockJsSession(WebSocketSession wsSession) throws Exception { <del> return new WebSocketSessionAdapter(wsSession); <add> public void handleTextMessage(WebSocketSession wsSession, String message) throws Exception { <add> if (logger.isTraceEnabled()) { <add> logger.trace("Received payload " + message); <add> } <add> SockJsSessionSupport session = getSockJsSession(wsSession); <add> session.delegateMessages(message); <add> } <add> <add> @Override <add> public void handleBinaryMessage(WebSocketSession session, InputStream message) throws Exception { <add> // should not happen <add> throw new UnsupportedOperationException(); <add> } <add> <add> @Override <add> public void handleException(WebSocketSession webSocketSession, Throwable exception) { <add> SockJsSessionSupport session = getSockJsSession(webSocketSession); <add> session.delegateException(exception); <add> } <add> <add> @Override <add> public void sessionClosed(WebSocketSession webSocketSession, int statusCode, String reason) throws Exception { <add> logger.debug("WebSocket session closed " + webSocketSession); <add> SockJsSessionSupport session = this.sessions.remove(webSocketSession); <add> session.connectionClosed(); <ide> } <ide> <ide> <ide> private class WebSocketSessionAdapter extends SockJsSessionSupport { <ide> <del> private final WebSocketSession wsSession; <add> private WebSocketSession wsSession; <ide> <ide> <ide> public WebSocketSessionAdapter(WebSocketSession wsSession) throws Exception { <ide> public void sendMessage(String message) throws IOException { <ide> this.wsSession.sendText(message); <ide> } <ide> <add> @Override <add> public void connectionClosed() { <add> logger.debug("Session closed"); <add> super.connectionClosed(); <add> this.wsSession = null; <add> } <add> <add> @Override <ide> public void close() { <ide> if (!isClosed()) { <ide> logger.debug("Closing session"); <ide> super.close(); <ide> this.wsSession.close(); <add> this.wsSession = null; <ide> } <ide> } <ide> } <ide><path>spring-websocket/src/main/java/org/springframework/websocket/WebSocketSession.java <ide> */ <ide> public interface WebSocketSession { <ide> <add> String getId(); <add> <ide> boolean isOpen(); <ide> <ide> void sendText(String text) throws IOException; <ide><path>spring-websocket/src/main/java/org/springframework/websocket/endpoint/StandardWebSocketSession.java <ide> public StandardWebSocketSession(javax.websocket.Session session) { <ide> this.session = session; <ide> } <ide> <add> @Override <add> public String getId() { <add> return this.session.getId(); <add> } <add> <ide> @Override <ide> public boolean isOpen() { <ide> return ((this.session != null) && this.session.isOpen()); <ide><path>spring-websocket/src/main/java/org/springframework/websocket/endpoint/WebSocketHandlerEndpoint.java <ide> public void onOpen(javax.websocket.Session session, EndpointConfig config) { <ide> try { <ide> WebSocketSession webSocketSession = new StandardWebSocketSession(session); <ide> this.sessions.put(session.getId(), webSocketSession); <del> session.addMessageHandler(new StandardMessageHandler(session.getId())); <add> session.addMessageHandler(new StandardMessageHandler(session)); <ide> this.webSocketHandler.newSession(webSocketSession); <ide> } <ide> catch (Throwable ex) { <ide> public void onOpen(javax.websocket.Session session, EndpointConfig config) { <ide> <ide> @Override <ide> public void onClose(javax.websocket.Session session, CloseReason closeReason) { <del> String id = session.getId(); <ide> if (logger.isDebugEnabled()) { <del> logger.debug("Closing session: " + session + ", " + closeReason); <add> logger.debug("Session closed: " + session + ", " + closeReason); <ide> } <ide> try { <del> WebSocketSession webSocketSession = getSession(id); <del> this.sessions.remove(id); <del> int code = closeReason.getCloseCode().getCode(); <del> String reason = closeReason.getReasonPhrase(); <del> this.webSocketHandler.sessionClosed(webSocketSession, code, reason); <add> WebSocketSession wsSession = this.sessions.remove(session.getId()); <add> if (wsSession != null) { <add> int code = closeReason.getCloseCode().getCode(); <add> String reason = closeReason.getReasonPhrase(); <add> this.webSocketHandler.sessionClosed(wsSession, code, reason); <add> } <add> else { <add> Assert.notNull(wsSession, "No WebSocket session"); <add> } <ide> } <ide> catch (Throwable ex) { <ide> // TODO <ide> public void onClose(javax.websocket.Session session, CloseReason closeReason) { <ide> public void onError(javax.websocket.Session session, Throwable exception) { <ide> logger.error("Error for WebSocket session: " + session.getId(), exception); <ide> try { <del> WebSocketSession webSocketSession = getSession(session.getId()); <del> this.webSocketHandler.handleException(webSocketSession, exception); <add> WebSocketSession wsSession = getWebSocketSession(session); <add> if (wsSession != null) { <add> this.webSocketHandler.handleException(wsSession, exception); <add> } <add> else { <add> logger.warn("WebSocketSession not found. Perhaps onError was called after onClose?"); <add> } <ide> } <ide> catch (Throwable ex) { <ide> // TODO <ide> logger.error("Failed to handle error", ex); <ide> } <ide> } <ide> <del> private WebSocketSession getSession(String sourceSessionId) { <del> WebSocketSession webSocketSession = this.sessions.get(sourceSessionId); <del> Assert.notNull(webSocketSession, "No session"); <del> return webSocketSession; <add> private WebSocketSession getWebSocketSession(javax.websocket.Session session) { <add> return this.sessions.get(session.getId()); <ide> } <ide> <ide> <ide> private class StandardMessageHandler implements MessageHandler.Whole<String> { <ide> <del> private final String sessionId; <add> private final javax.websocket.Session session; <ide> <del> public StandardMessageHandler(String sessionId) { <del> this.sessionId = sessionId; <add> public StandardMessageHandler(javax.websocket.Session session) { <add> this.session = session; <ide> } <ide> <ide> @Override <ide> public void onMessage(String message) { <ide> if (logger.isTraceEnabled()) { <del> logger.trace("Message for session [" + this.sessionId + "]: " + message); <add> logger.trace("Message for session [" + this.session + "]: " + message); <ide> } <add> WebSocketSession wsSession = getWebSocketSession(this.session); <add> Assert.notNull(wsSession, "WebSocketSession not found"); <ide> try { <del> WebSocketSession session = getSession(this.sessionId); <del> WebSocketHandlerEndpoint.this.webSocketHandler.handleTextMessage(session, message); <add> WebSocketHandlerEndpoint.this.webSocketHandler.handleTextMessage(wsSession, message); <ide> } <ide> catch (Throwable ex) { <ide> // TODO
14
Ruby
Ruby
avoid instance_exec for controller callbacks
b3024484abade42ebf0ec70c7957f5cfe7c102d9
<ide><path>actionpack/lib/abstract_controller/callbacks.rb <ide> module Callbacks <ide> skip_after_callbacks_if_terminated: true <ide> end <ide> <add> class ActionFilter <add> def initialize(actions) <add> @actions = Array(actions).map(&:to_s).to_set <add> end <add> <add> def match?(controller) <add> @actions.include?(controller.action_name) <add> end <add> <add> alias after match? <add> alias before match? <add> alias around match? <add> end <add> <ide> module ClassMethods <ide> # If +:only+ or +:except+ are used, convert the options into the <ide> # +:if+ and +:unless+ options of ActiveSupport::Callbacks. <ide> def _normalize_callback_options(options) <ide> <ide> def _normalize_callback_option(options, from, to) # :nodoc: <ide> if from = options.delete(from) <del> _from = Array(from).map(&:to_s).to_set <del> from = proc { |c| _from.include? c.action_name } <add> from = ActionFilter.new(from) <ide> options[to] = Array(options[to]).unshift(from) <ide> end <ide> end
1
Ruby
Ruby
use homebrew_logs more consistently
37e2005e792a9f6d3e7597e06eb4dc9bb822b5b9
<ide><path>Library/Homebrew/cmd/doctor.rb <ide> def check_access_#{d.sub("/", "_")} <ide> end <ide> <ide> def check_access_logs <del> folder = Pathname.new('~/Library/Logs/Homebrew') <del> if folder.exist? and not folder.writable_real? <add> if HOMEBREW_LOGS.exist? and not HOMEBREW_LOGS.writable_real? <ide> <<-EOS.undent <ide> #{folder} isn't writable. <ide> This can happen if you "sudo make install" software that isn't managed <ide><path>Library/Homebrew/exceptions.rb <ide> def dump <ide> Homebrew.dump_build_env(env) <ide> puts <ide> onoe "#{formula.name} did not build" <del> unless (logs = Dir["#{ENV['HOME']}/Library/Logs/Homebrew/#{formula}/*"]).empty? <add> unless (logs = Dir["#{HOMEBREW_LOGS}/#{formula}/*"]).empty? <ide> puts "Logs:" <ide> puts logs.map{|fn| " #{fn}"}.join("\n") <ide> end
2
Ruby
Ruby
fix update_all api doc [ci-skip]
e2d9747123b5b85904d6accdd182885d3af5564a
<ide><path>activerecord/lib/active_record/relation.rb <ide> def _exec_scope(...) # :nodoc: <ide> # ==== Parameters <ide> # <ide> # * +updates+ - A string, array, or hash representing the SET part of an SQL statement. Any strings provided will <del> # be type cast, unless you use `Arel.sql`. (Don't pass user-provided values to `Arel.sql`.) <add> # be type cast, unless you use +Arel.sql+. (Don't pass user-provided values to +Arel.sql+.) <ide> # <ide> # ==== Examples <ide> #
1
Javascript
Javascript
fix test-tls-server-verify.js on windows ci
1ecd2be2e65d8d33ed6f110422b6795cdf066f01
<ide><path>test/parallel/test-tls-server-verify.js <ide> function runTest(port, testIndex) { <ide> } else { <ide> server.close(); <ide> successfulTests++; <del> runTest(port, nextTest++); <add> runTest(0, nextTest++); <ide> } <ide> } <ide> <ide> function runTest(port, testIndex) { <ide> if (clientsCompleted === tcase.clients.length) { <ide> server.close(); <ide> successfulTests++; <del> runTest(port, nextTest++); <add> runTest(0, nextTest++); <ide> } <ide> }); <ide> } <ide> function runTest(port, testIndex) { <ide> <ide> let nextTest = 0; <ide> runTest(0, nextTest++); <del>runTest(0, nextTest++); <ide> <ide> <ide> process.on('exit', function() {
1
Ruby
Ruby
bring generators tests back to life
b0947bf97c0ac313799f6f1ca739b5666f5fe19f
<ide><path>railties/test/generators/generators_test_helper.rb <ide> def self.inherited(base) <ide> rescue <ide> # Do nothing. <ide> end <add> <add> def test_truth <add> # Don't cry test/unit <add> end <ide> end <ide>\ No newline at end of file
1
Javascript
Javascript
correct readystate on connecting
69d20f535262dec0be74e521c32d151cb16694ab
<ide><path>lib/net_uv.js <ide> function initSocketHandle(self) { <ide> self._writeRequests = []; <ide> <ide> self._flags = 0; <del> <add> self._connectQueueSize = 0; <ide> self.destroyed = false; <ide> } <ide> <ide> Object.defineProperty(Socket.prototype, 'readyState', { <ide> <ide> Object.defineProperty(Socket.prototype, 'bufferSize', { <ide> get: function() { <del> return this._handle.writeQueueSize; <add> return this._handle.writeQueueSize + this._connectQueueSize; <ide> } <ide> }); <ide> <ide> Socket.prototype.destroySoon = function() { <ide> }; <ide> <ide> <add>Socket.prototype._connectQueueCleanUp = function(exception) { <add> this._connecting = false; <add> this._connectQueueSize = 0; <add> this._connectQueue = null; <add>}; <add> <add> <ide> Socket.prototype.destroy = function(exception) { <ide> var self = this; <ide> <add> self._connectQueueCleanUp(); <add> <ide> debug('destroy ' + this.fd); <ide> <ide> this.readable = this.writable = false; <ide> Socket.prototype.write = function(data /* [encoding], [fd], [cb] */) { <ide> data = new Buffer(data, encoding); <ide> } <ide> <add> // If we are still connecting, then buffer this for later. <add> if (this._connecting) { <add> this._connectQueueSize += data.length; <add> if (this._connectQueue) { <add> this._connectQueue.push([data, null, fd, cb]); <add> } else { <add> this._connectQueue = [ [data, null, fd, cb] ]; <add> } <add> return false; <add> } <add> <add> <ide> var writeReq = this._handle.write(data); <ide> writeReq.oncomplete = afterWrite; <ide> writeReq.cb = cb; <ide> function connectip(self, port, ip) { <ide> // TODO return promise from Socket.prototype.connect which <ide> // wraps _connectReq. <ide> <del> assert.ok(!self._connecting); <add> assert.ok(self._connecting); <ide> <ide> var connectReq = self._handle.connect(ip, port); <ide> <ide> if (connectReq) { <del> self._connecting = true; <ide> connectReq.oncomplete = afterConnect; <ide> } else { <ide> self.destroy(errnoException(errno, 'connect')); <ide> Socket.prototype.connect = function(port, host /* [cb] */) { <ide> <ide> timers.active(this); <ide> <del> if (typeof host == 'undefined') { <del> connectip(self, port, '127.0.0.1'); <del> } else { <add> self._connecting = true; <add> <add> if (typeof host == 'string') { <add> debug("connect: find host " + host); <ide> require('dns').lookup(host, function(err, ip, addressType) { <ide> if (err) { <ide> self.emit('error', err); <ide> Socket.prototype.connect = function(port, host /* [cb] */) { <ide> connectip(self, port, ip || '127.0.0.1'); <ide> } <ide> }); <add> } else { <add> debug("connect: missing host"); <add> connectip(self, port, '127.0.0.1'); <ide> } <ide> }; <ide> <ide> function afterConnect(status, handle, req) { <ide> var self = handle.socket; <ide> assert.equal(handle, self._handle); <ide> <add> debug("afterConnect"); <add> <ide> assert.ok(self._connecting); <ide> self._connecting = false; <ide> <ide> if (status == 0) { <ide> self.readable = self.writable = true; <ide> timers.active(self); <add> <ide> handle.readStart(); <add> <add> if (self._connectQueue) { <add> debug('Drain the connect queue'); <add> for (var i = 0; i < self._connectQueue.length; i++) { <add> self.write.apply(self, self._connectQueue[i]); <add> } <add> self._connectQueueCleanUp() <add> } <add> <ide> self.emit('connect'); <ide> } else { <add> self._connectQueueCleanUp() <ide> self.destroy(errnoException(errno, 'connect')); <ide> } <ide> } <ide> function onconnection(clientHandle) { <ide> var handle = this; <ide> var self = handle.socket; <ide> <add> debug("onconnection"); <add> <ide> var socket = new Socket({ <ide> handle: clientHandle, <ide> allowHalfOpen: self.allowHalfOpen <ide><path>test/simple/test-net-connect-buffer.js <ide> var tcp = net.Server(function(s) { <ide> tcp.listen(common.PORT, function () { <ide> var socket = net.Stream(); <ide> <del> console.log('Connecting to socket'); <add> console.log('Connecting to socket '); <ide> <ide> socket.connect(tcpPort, function() { <ide> console.log('socket connected'); <ide> connectHappened = true; <ide> }); <ide> <add> console.log('_connecting = ' + socket._connecting); <add> <ide> assert.equal('opening', socket.readyState); <ide> <ide> var r = socket.write('foo', function () {
2
PHP
PHP
set morphs directly
669c5356e65f0712d4f71f2cd8c6345838401ccb
<ide><path>src/Illuminate/Database/Eloquent/Relations/MorphOneOrMany.php <ide> public function save(Model $model) <ide> */ <ide> public function create(array $attributes) <ide> { <del> $foreign = $this->getForeignAttributesForCreate(); <add> $instance = $this->related->newInstance($attributes); <ide> <ide> // When saving a polymorphic relationship, we need to set not only the foreign <ide> // key, but also the foreign key type, which is typically the class name of <ide> // the parent model. This makes the polymorphic item unique in the table. <del> $attributes = array_merge($attributes, $foreign); <del> <del> $instance = $this->related->newInstance($attributes); <add> $this->setForeignAttributesForCreate($instance); <ide> <ide> $instance->save(); <ide> <ide> return $instance; <ide> } <ide> <ide> /** <del> * Get the foreign ID and type for creating a related model. <add> * Set the foreign ID and type for creating a related model. <ide> * <del> * @return array <add> * @param \Illuminate\Database\Eloquent\Model $model <add> * @return void <ide> */ <del> protected function getForeignAttributesForCreate() <add> protected function setForeignAttributesForCreate(Model $model) <ide> { <del> $foreign = array($this->getPlainForeignKey() => $this->getParentKey()); <del> <del> $foreign[last(explode('.', $this->morphType))] = $this->morphClass; <add> $model->{$this->getPlainForeignKey()} = $this->getParentKey(); <ide> <del> return $foreign; <add> $model->{last(explode('.', $this->morphType))} = $this->morphClass; <ide> } <ide> <ide> /** <ide><path>tests/Database/DatabaseEloquentMorphTest.php <ide> public function testCreateFunctionOnMorph() <ide> { <ide> // Doesn't matter which relation type we use since they share the code... <ide> $relation = $this->getOneRelation(); <del> $created = m::mock('stdClass'); <del> $relation->getRelated()->shouldReceive('newInstance')->once()->with(array('name' => 'taylor', 'morph_id' => 1, 'morph_type' => get_class($relation->getParent())))->andReturn($created); <add> $created = m::mock('Illuminate\Database\Eloquent\Model'); <add> $created->shouldReceive('setAttribute')->once()->with('morph_id', 1); <add> $created->shouldReceive('setAttribute')->once()->with('morph_type', get_class($relation->getParent())); <add> $relation->getRelated()->shouldReceive('newInstance')->once()->with(array('name' => 'taylor'))->andReturn($created); <ide> $created->shouldReceive('save')->once()->andReturn(true); <ide> <ide> $this->assertEquals($created, $relation->create(array('name' => 'taylor')));
2
Python
Python
add test for record array clipping
ef392181a8ed3e9f2646745b19aedbc3d1145c95
<ide><path>numpy/core/tests/test_multiarray.py <ide> def check_basic(self): <ide> #x = self._check_type('uint',1024,-120,100,expected_min=0) <ide> x = self._clip_type('uint',1024,0,0) <ide> <add> # XXX fixme <add> def check_record_array(self,level=2): <add> rec = N.array([(-5, 2.0, 3.0), (5.0, 4.0, 3.0)], <add> dtype=[('x', '<f8'), ('y', '<f8'), ('z', '<f8')]) <add> rec['x'].clip(-0.3,0.5) <add> self._check_range(rec['x'],-0.3,0.5) <add> <ide> # Import tests from unicode <ide> set_local_path() <ide> from test_unicode import *
1
PHP
PHP
apply fixes from styleci
a1611c76ffd9221978378f361af00fac2fe0b4aa
<ide><path>tests/Mail/MailMailableTest.php <ide> public function build() <ide> <ide> public function testAssertHasSubject() <ide> { <del> <ide> } <ide> } <ide>
1
Mixed
Ruby
fix regression on `.select_*` methods
b7fcad8ff04411a8d00f85094b172b6b99402190
<ide><path>activerecord/CHANGELOG.md <add>* Fix regressions on `select_*` methods. <add> When `select_*` methods receive a `Relation` object, they should be able to get the arel/binds from it. <add> Also fix regressions on select_rows that was ignoring the binds. <add> <add> Fixes #7538, #12017, #13731, #12056. <add> <add> *arthurnn* <add> <ide> * Active Record objects can now be correctly dumped, loaded and dumped again without issues. <ide> <ide> Previously, if you did `YAML.dump`, `YAML.load` and then `YAML.dump` again <ide><path>activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb <ide> def to_sql(arel, binds = []) <ide> <ide> # Returns an ActiveRecord::Result instance. <ide> def select_all(arel, name = nil, binds = []) <add> if arel.is_a?(Relation) <add> relation = arel <add> arel = relation.arel <add> if !binds || binds.empty? <add> binds = relation.bind_values <add> end <add> end <add> <ide> select(to_sql(arel, binds), name, binds) <ide> end <ide> <ide> def select_value(arel, name = nil, binds = []) <ide> # Returns an array of the values of the first column in a select: <ide> # select_values("SELECT id FROM companies LIMIT 3") => [1,2,3] <ide> def select_values(arel, name = nil) <del> select_rows(to_sql(arel, []), name) <del> .map { |v| v[0] } <add> binds = [] <add> if arel.is_a?(Relation) <add> arel, binds = arel.arel, arel.bind_values <add> end <add> select_rows(to_sql(arel, binds), name, binds).map(&:first) <ide> end <ide> <ide> # Returns an array of arrays containing the field values. <ide> # Order is the same as that returned by +columns+. <del> def select_rows(sql, name = nil) <add> def select_rows(sql, name = nil, binds = []) <ide> end <ide> undef_method :select_rows <ide> <ide><path>activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb <ide> def build_footer(nrows, elapsed) <ide> <ide> # Returns an array of arrays containing the field values. <ide> # Order is the same as that returned by +columns+. <del> def select_rows(sql, name = nil) <add> def select_rows(sql, name = nil, binds = []) <ide> execute(sql, name).to_a <ide> end <ide> <ide><path>activerecord/lib/active_record/connection_adapters/mysql_adapter.rb <ide> def reset! <ide> <ide> # DATABASE STATEMENTS ====================================== <ide> <del> def select_rows(sql, name = nil) <add> def select_rows(sql, name = nil, binds = []) <ide> @connection.query_with_result = true <del> rows = exec_query(sql, name).rows <add> rows = exec_query(sql, name, binds).rows <ide> @connection.more_results && @connection.next_result # invoking stored procedures with CLIENT_MULTI_RESULTS requires this to tidy up else connection will be dropped <ide> rows <ide> end <ide><path>activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb <ide> def pp(result) <ide> <ide> # Executes a SELECT query and returns an array of rows. Each row is an <ide> # array of field values. <del> def select_rows(sql, name = nil) <del> select_raw(sql, name).last <add> def select_rows(sql, name = nil, binds = []) <add> exec_query(sql, name, binds).rows <ide> end <ide> <ide> # Executes an INSERT query and returns the new record's ID <ide><path>activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb <ide> def select(sql, name = nil, binds = []) <ide> exec_query(sql, name, binds) <ide> end <ide> <del> def select_raw(sql, name = nil) <del> res = execute(sql, name) <del> results = result_as_array(res) <del> fields = res.fields <del> res.clear <del> return fields, results <del> end <del> <ide> # Returns the list of a table's column names, data types, and default values. <ide> # <ide> # The underlying query is roughly: <ide><path>activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb <ide> def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) # <ide> end <ide> alias :create :insert_sql <ide> <del> def select_rows(sql, name = nil) <del> exec_query(sql, name).rows <add> def select_rows(sql, name = nil, binds = []) <add> exec_query(sql, name, binds).rows <ide> end <ide> <ide> def begin_db_transaction #:nodoc: <ide><path>activerecord/test/cases/adapter_test.rb <ide> require "cases/helper" <ide> require "models/book" <add>require "models/post" <add>require "models/author" <ide> <ide> module ActiveRecord <ide> class AdapterTest < ActiveRecord::TestCase <ide> def test_select_all_always_return_activerecord_result <ide> assert result.is_a?(ActiveRecord::Result) <ide> end <ide> <add> def test_select_methods_passing_a_association_relation <add> author = Author.create!(name: 'john') <add> Post.create!(author: author, title: 'foo', body: 'bar') <add> query = author.posts.select(:title) <add> assert_equal({"title" => "foo"}, @connection.select_one(query.arel, nil, query.bind_values)) <add> assert_equal({"title" => "foo"}, @connection.select_one(query)) <add> assert @connection.select_all(query).is_a?(ActiveRecord::Result) <add> assert_equal "foo", @connection.select_value(query) <add> assert_equal ["foo"], @connection.select_values(query) <add> end <add> <add> def test_select_methods_passing_a_relation <add> Post.create!(title: 'foo', body: 'bar') <add> query = Post.where(title: 'foo').select(:title) <add> assert_equal({"title" => "foo"}, @connection.select_one(query.arel, nil, query.bind_values)) <add> assert_equal({"title" => "foo"}, @connection.select_one(query)) <add> assert @connection.select_all(query).is_a?(ActiveRecord::Result) <add> assert_equal "foo", @connection.select_value(query) <add> assert_equal ["foo"], @connection.select_values(query) <add> end <add> <ide> test "type_to_sql returns a String for unmapped types" do <ide> assert_equal "special_db_type", @connection.type_to_sql(:special_db_type) <ide> end
8
Javascript
Javascript
improve dx on deprecation messages
407528ba48992fa538634d9f6c9bc748db55e597
<ide><path>lib/NormalModuleFactory.js <ide> const needCalls = (times, callback) => { <ide> }; <ide> <ide> // TODO webpack 6 remove <del>const deprecationChangedHookMessage = name => <del> `NormalModuleFactory.${name} is no longer a waterfall hook, but a bailing hook instead. ` + <del> "Do not return the passed object, but modify it instead. " + <del> "Returning false will ignore the request and results in no module created."; <add>const deprecationChangedHookMessage = (name, hook) => { <add> const names = hook.taps <add> .map(tapped => { <add> return tapped.name; <add> }) <add> .join(","); <add> <add> return ( <add> `NormalModuleFactory.${name} (${names}) is no longer a waterfall hook, but a bailing hook instead. ` + <add> "Do not return the passed object, but modify it instead. " + <add> "Returning false will ignore the request and results in no module created." <add> ); <add>}; <ide> <ide> const dependencyCache = new WeakMap(); <ide> <ide> class NormalModuleFactory extends ModuleFactory { <ide> <ide> if (typeof result === "object") <ide> throw new Error( <del> deprecationChangedHookMessage("resolve") + <add> deprecationChangedHookMessage( <add> "resolve", <add> this.hooks.afterResolve <add> ) + <ide> " Returning a Module object will result in this module used as result." <ide> ); <ide> <ide> this.hooks.afterResolve.callAsync(resolveData, (err, result) => { <ide> if (err) return callback(err); <ide> <ide> if (typeof result === "object") <del> throw new Error(deprecationChangedHookMessage("afterResolve")); <add> throw new Error( <add> deprecationChangedHookMessage( <add> "afterResolve", <add> this.hooks.afterResolve <add> ) <add> ); <ide> <ide> // Ignored <ide> if (result === false) return callback(); <ide> class NormalModuleFactory extends ModuleFactory { <ide> } <ide> <ide> if (typeof result === "object") <del> throw new Error(deprecationChangedHookMessage("beforeResolve")); <add> throw new Error( <add> deprecationChangedHookMessage( <add> "beforeResolve", <add> this.hooks.beforeResolve <add> ) <add> ); <ide> <ide> this.hooks.factorize.callAsync(resolveData, (err, module) => { <ide> if (err) {
1
Ruby
Ruby
add missing require
7e006057e082c4eab7424618b24f330e47be8744
<ide><path>activesupport/lib/active_support/core_ext/securerandom.rb <add>require 'securerandom' <add> <ide> module SecureRandom <ide> BASE58_ALPHABET = ('0'..'9').to_a + ('A'..'Z').to_a + ('a'..'z').to_a - ['0', 'O', 'I', 'l'] <ide> # SecureRandom.base58 generates a random base58 string.
1
Ruby
Ruby
remove redundant sentence
9bce115e9a0f2c4b5759073e03ef8f6d4f749850
<ide><path>Library/Homebrew/cmd/doctor.rb <ide> def check_usr_bin_ruby <ide> def check_homebrew_prefix <ide> unless HOMEBREW_PREFIX.to_s == '/usr/local' <ide> puts <<-EOS.undent <del> You can install Homebrew anywhere you want, but some brews may only work <ide> You can install Homebrew anywhere you want, but some brews may only build <ide> correctly if you install to /usr/local. <ide>
1
Ruby
Ruby
add transactiontimeout for mysql error code 1205
c5edd97721aeaa4ceaa11e66327842a0aba1fdf7
<ide><path>activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb <ide> def add_options_for_index_columns(quoted_columns, **options) <ide> ER_LOCK_DEADLOCK = 1213 <ide> ER_CANNOT_ADD_FOREIGN = 1215 <ide> ER_CANNOT_CREATE_TABLE = 1005 <add> ER_LOCK_WAIT_TIMEOUT = 1205 <ide> <ide> def translate_exception(exception, message) <ide> case error_number(exception) <ide> def translate_exception(exception, message) <ide> NotNullViolation.new(message) <ide> when ER_LOCK_DEADLOCK <ide> Deadlocked.new(message) <add> when ER_LOCK_WAIT_TIMEOUT <add> TransactionTimeout.new(message) <ide> else <ide> super <ide> end <ide><path>activerecord/lib/active_record/errors.rb <ide> class Deadlocked < TransactionRollbackError <ide> # +reverse_order+ to automatically reverse. <ide> class IrreversibleOrderError < ActiveRecordError <ide> end <add> <add> # TransactionTimeout will be raised when lock wait timeout expires. <add> # Wait time value is set by innodb_lock_wait_timeout. <add> class TransactionTimeout < StatementInvalid <add> end <ide> end <ide><path>activerecord/test/cases/adapters/mysql2/transaction_test.rb <ide> class Sample < ActiveRecord::Base <ide> end <ide> end <ide> end <add> <add> test "raises TransactionTimeout when mysql raises ER_LOCK_WAIT_TIMEOUT" do <add> assert_raises(ActiveRecord::TransactionTimeout) do <add> ActiveRecord::Base.connection.execute("SIGNAL SQLSTATE 'HY000' SET MESSAGE_TEXT = 'Testing error', MYSQL_ERRNO = 1205;") <add> end <add> end <ide> end <ide> end
3
Javascript
Javascript
add missing semicolon in math3node
683502300442112213a6d857cc83fc74cfaeea4e
<ide><path>examples/js/nodes/math/Math3Node.js <ide> THREE.Math3Node.prototype.generate = function( builder, output ) { <ide> var a, b, c, <ide> al = builder.getFormatLength( this.a.getType( builder ) ), <ide> bl = builder.getFormatLength( this.b.getType( builder ) ), <del> cl = builder.getFormatLength( this.c.getType( builder ) ) <add> cl = builder.getFormatLength( this.c.getType( builder ) ); <ide> <ide> // optimzer <ide>
1
Python
Python
use startswith instead
7a305199fb54a7185db9b172811118f7fec5ae8d
<ide><path>airflow/hooks/hive_hooks.py <ide> def test_hql(self, hql): <ide> """ <ide> create, insert, other = [], [], [] <ide> for query in hql.split(';'): # naive <del> query = query.lower() <del> if 'create table' in query: <add> query = query.lower().strip() <add> if query.startswith('create table'): <ide> create.append(query) <del> elif 'set' in query or 'add jar' in query or 'temporary' in query: <add> elif query.startswith(('set', 'add jar', 'temporary')): <ide> other.append(query) <del> elif 'select' in query: <add> elif query.startswith('insert'): <ide> insert.append(query) <ide> other = ';'.join(other) <ide> for query_set in [create, insert]:
1
Python
Python
add unicode declaration
3a3cb2c90ce2591e04806d3e9ccfd2d1fbdc722d
<ide><path>spacy/tests/tokenizer/test_urls.py <add># coding: utf-8 <ide> from __future__ import unicode_literals <ide> <ide> import pytest <ide> <add> <ide> URLS = [ <ide> u"http://www.nytimes.com/2016/04/20/us/politics/new-york-primary-preview.html?hp&action=click&pgtype=Homepage&clickSource=story-heading&module=a-lede-package-region&region=top-news&WT.nav=top-news&_r=0", <ide> u"www.google.com?q=google",
1
Text
Text
add role of \n in explanation of the code
8230f82dd1d9eb555a2a9fb50e46c7abd17d8377
<ide><path>guide/english/c/hello-world/index.md <ide> To write on console you can use the function `printf()` contained in the library <ide> * If you use printf() function without writing #include <stdio.h>, the program will not be compiled. <ide> * The execution of a C program starts from the main() function. <ide> * The printf() is a library function to send formatted output to the screen. In this program, the printf() displays Hello, World! text on the screen. <add> * The \n in printf creates a new line for the forthcoming text. <ide> * The return 0; statement is the "Exit status" of the program. In simple terms, program ends with this statement <ide> <ide> ## Output:
1
Go
Go
check testing code for runconfig and volume
b3e5137856ffd07e179d977eec74ee0566f1bb26
<ide><path>runconfig/hostconfig_test.go <ide> func TestDecodeHostConfig(t *testing.T) { <ide> } <ide> <ide> if len(c.CapDrop) != 1 && c.CapDrop[0] != "NET_ADMIN" { <del> t.Fatalf("Expected CapDrop MKNOD, got %v", c.CapDrop) <add> t.Fatalf("Expected CapDrop NET_ADMIN, got %v", c.CapDrop) <ide> } <ide> } <ide> } <ide><path>volume/store/store_test.go <ide> func TestFilterByUsed(t *testing.T) { <ide> <ide> dangling := s.FilterByUsed(vols, false) <ide> if len(dangling) != 1 { <del> t.Fatalf("expected 1 danging volume, got %v", len(dangling)) <add> t.Fatalf("expected 1 dangling volume, got %v", len(dangling)) <ide> } <ide> if dangling[0].Name() != "fake2" { <del> t.Fatalf("expected danging volume fake2, got %s", dangling[0].Name()) <add> t.Fatalf("expected dangling volume fake2, got %s", dangling[0].Name()) <ide> } <ide> <ide> used := s.FilterByUsed(vols, true)
2
Javascript
Javascript
fix grabquestion on mobile
2277cde61bf7722d61f783ee70d38d7c274ee323
<ide><path>common/app/routes/Hikes/flux/Actions.js <ide> export default Actions({ <ide> }, <ide> <ide> grabQuestion(e) { <del> const { pageX, pageY } = e; <add> let { pageX, pageY, touches } = e; <add> if (touches) { <add> e.preventDefault(); <add> // these re-assigns the values of pageX, pageY from touches <add> ({ pageX, pageY } = touches[0]); <add> } <ide> const delta = [pageX, pageY]; <del> const mouse = getMouse(e, delta); <add> const mouse = [0, 0]; <ide> <ide> return { <ide> transform(state) {
1
Go
Go
add missing error check
a1c5f268e5465e0b3cdb4e2a47c0ade3de7988c0
<ide><path>api/client/commands.go <ide> func (cli *DockerCli) CmdBuild(args ...string) error { <ide> } <ide> } <ide> context, err = archive.TarWithOptions(root, options) <add> if err != nil { <add> return err <add> } <ide> } <ide> var body io.Reader <ide> // Setup an upload progress bar
1
Java
Java
add missing java files to rn fbjni sync
f0e4a6cd2c2f74d85d8141054c8c1b5bea78cd46
<ide><path>ReactAndroid/src/main/java/com/facebook/jni/CpuCapabilitiesJni.java <add>// Copyright 2004-present Facebook. All Rights Reserved. <add> <add>package com.facebook.jni; <add> <add>import com.facebook.proguard.annotations.DoNotStrip; <add>import com.facebook.soloader.SoLoader; <add> <add>/** <add> * Utility class to determine CPU capabilities <add> */ <add>@DoNotStrip <add>public class CpuCapabilitiesJni { <add> <add> static { <add> SoLoader.loadLibrary("fb"); <add> } <add> <add> @DoNotStrip <add> public static native boolean nativeDeviceSupportsNeon(); <add> <add> @DoNotStrip <add> public static native boolean nativeDeviceSupportsVFPFP16(); <add> <add> @DoNotStrip <add> public static native boolean nativeDeviceSupportsX86(); <add> <add>} <ide><path>ReactAndroid/src/main/java/com/facebook/jni/DestructorThread.java <add>// Copyright 2004-present Facebook. All Rights Reserved. <add> <add>package com.facebook.jni; <add> <add>import java.lang.ref.PhantomReference; <add>import java.lang.ref.ReferenceQueue; <add>import java.util.concurrent.atomic.AtomicReference; <add> <add>/** <add> * A thread which invokes the "destruct" routine for objects after they have been garbage collected. <add> * <add> * An object which needs to be destructed should create a static subclass of {@link Destructor}. <add> * Once the referent object is garbage collected, the DestructorThread will callback to the <add> * {@link Destructor#destruct()} method. <add> * <add> * The underlying thread in DestructorThread starts when the first Destructor is constructed <add> * and then runs indefinitely. <add> */ <add>public class DestructorThread { <add> <add> /** <add> * N.B The Destructor <b>SHOULD NOT</b> refer back to its referent object either explicitly or <add> * implicitly (for example, as a non-static inner class). This will create a reference cycle where <add> * the referent object will never be garbage collected. <add> */ <add> public abstract static class Destructor extends PhantomReference<Object> { <add> <add> private Destructor next; <add> private Destructor previous; <add> <add> Destructor(Object referent) { <add> super(referent, sReferenceQueue); <add> sDestructorStack.push(this); <add> } <add> <add> private Destructor() { <add> super(null, sReferenceQueue); <add> } <add> <add> /** Callback which is invoked when the original object has been garbage collected. */ <add> abstract void destruct(); <add> } <add> <add> /** A list to keep all active Destructors in memory confined to the Destructor thread. */ <add> private static DestructorList sDestructorList; <add> /** A thread safe stack where new Destructors are placed before being add to sDestructorList. */ <add> private static DestructorStack sDestructorStack; <add> private static ReferenceQueue sReferenceQueue; <add> private static Thread sThread; <add> <add> static { <add> sDestructorStack = new DestructorStack(); <add> sReferenceQueue = new ReferenceQueue(); <add> sDestructorList = new DestructorList(); <add> sThread = new Thread("HybridData DestructorThread") { <add> @Override <add> public void run() { <add> while (true) { <add> try { <add> Destructor current = (Destructor) sReferenceQueue.remove(); <add> current.destruct(); <add> <add> // If current is in the sDestructorStack, <add> // transfer all the Destructors in the stack to the list. <add> if (current.previous == null) { <add> sDestructorStack.transferAllToList(); <add> } <add> <add> DestructorList.drop(current); <add> } catch (InterruptedException e) { <add> // Continue. This thread should never be terminated. <add> } <add> } <add> } <add> }; <add> <add> sThread.start(); <add> } <add> <add> private static class Terminus extends Destructor { <add> @Override <add> void destruct() { <add> throw new IllegalStateException("Cannot destroy Terminus Destructor."); <add> } <add> } <add> <add> /** This is a thread safe, lock-free Treiber-like Stack of Destructors. */ <add> private static class DestructorStack { <add> private AtomicReference<Destructor> mHead = new AtomicReference<>(); <add> <add> public void push(Destructor newHead) { <add> Destructor oldHead; <add> do { <add> oldHead = mHead.get(); <add> newHead.next = oldHead; <add> } while (!mHead.compareAndSet(oldHead, newHead)); <add> } <add> <add> public void transferAllToList() { <add> Destructor current = mHead.getAndSet(null); <add> while (current != null) { <add> Destructor next = current.next; <add> sDestructorList.enqueue(current); <add> current = next; <add> } <add> } <add> } <add> <add> /** A doubly-linked list of Destructors. */ <add> private static class DestructorList { <add> private Destructor mHead; <add> <add> public DestructorList() { <add> mHead = new Terminus(); <add> mHead.next = new Terminus(); <add> mHead.next.previous = mHead; <add> } <add> <add> public void enqueue(Destructor current) { <add> current.next = mHead.next; <add> mHead.next = current; <add> <add> current.next.previous = current; <add> current.previous = mHead; <add> } <add> <add> private static void drop(Destructor current) { <add> current.next.previous = current.previous; <add> current.previous.next = current.next; <add> } <add> } <add>} <ide><path>ReactAndroid/src/main/java/com/facebook/jni/HybridClassBase.java <add>// Copyright 2004-present Facebook. All Rights Reserved. <add> <add>package com.facebook.jni; <add>import com.facebook.proguard.annotations.DoNotStrip; <add> <add>@DoNotStrip <add>public abstract class HybridClassBase extends HybridData { <add>} <ide><path>ReactAndroid/src/main/java/com/facebook/jni/HybridData.java <ide> <ide> package com.facebook.jni; <ide> <add>import android.util.Log; <add> <ide> import com.facebook.proguard.annotations.DoNotStrip; <ide> import com.facebook.soloader.SoLoader; <ide> <ide> * <ide> * NB: THREAD SAFETY <ide> * <del> * {@link #dispose} deletes the corresponding native object on whatever thread <del> * the method is called on. In the common case when this is called by <del> * HybridData#finalize(), this will be called on the system finalizer <del> * thread. If you manually call resetNative() on the Java object, the C++ <del> * object will be deleted synchronously on that thread. <add> * {@link #resetNative} deletes the corresponding native object synchronously on whatever thread <add> * the method is called on. Otherwise, deletion will occur on the {@link DestructorThread} <add> * thread. <ide> */ <ide> @DoNotStrip <ide> public class HybridData { <ide> public class HybridData { <ide> SoLoader.loadLibrary("fb"); <ide> } <ide> <del> // Private C++ instance <ide> @DoNotStrip <del> private long mNativePointer = 0; <add> private Destructor mDestructor = new Destructor(this); <ide> <ide> /** <ide> * To explicitly delete the instance, call resetNative(). If the C++ <ide> * instance is referenced after this is called, a NullPointerException will <ide> * be thrown. resetNative() may be called multiple times safely. Because <del> * {@link #finalize} calls resetNative, the instance will not leak if this is <add> * the {@link DestructorThread} also calls resetNative, the instance will not leak if this is <ide> * not called, but timing of deletion and the thread the C++ dtor is called <ide> * on will be at the whim of the Java GC. If you want to control the thread <ide> * and timing of the destructor, you should call resetNative() explicitly. <ide> */ <del> public native void resetNative(); <del> <del> protected void finalize() throws Throwable { <del> resetNative(); <del> super.finalize(); <add> public synchronized void resetNative() { <add> mDestructor.destruct(); <ide> } <ide> <add> /** <add> * N.B. Thread safety. <add> * If you call isValid from a different thread than {@link #resetNative()} then be sure to <add> * do so while synchronizing on the hybrid. For example: <add> * <pre><code> <add> * synchronized(hybrid) { <add> * if (hybrid.isValid) { <add> * // Do stuff. <add> * } <add> * } <add> * </code></pre> <add> */ <ide> public boolean isValid() { <del> return mNativePointer != 0; <add> return mDestructor.mNativePointer != 0; <add> } <add> <add> public static class Destructor extends DestructorThread.Destructor { <add> <add> // Private C++ instance <add> @DoNotStrip <add> private long mNativePointer; <add> <add> Destructor(Object referent) { <add> super(referent); <add> } <add> <add> @Override <add> void destruct() { <add> // When invoked from the DestructorThread instead of resetNative, <add> // the DestructorThread has exclusive ownership of the HybridData <add> // so synchronization is not necessary. <add> deleteNative(mNativePointer); <add> mNativePointer = 0; <add> } <add> <add> static native void deleteNative(long pointer); <ide> } <ide> } <ide><path>ReactAndroid/src/main/java/com/facebook/jni/JniTerminateHandler.java <add>// Copyright 2004-present Facebook. All Rights Reserved. <add> <add>package com.facebook.jni; <add> <add>public class JniTerminateHandler { <add> public static void handleTerminate(Throwable t) throws Throwable { <add> Thread.UncaughtExceptionHandler h = Thread.getDefaultUncaughtExceptionHandler(); <add> if (h == null) { <add> // Odd. Let the default std::terminate_handler deal with it. <add> return; <add> } <add> h.uncaughtException(Thread.currentThread(), t); <add> // That should exit. If it doesn't, let the default handler deal with it. <add> } <add>}
5
Text
Text
add redux-mock-store to ecosystem docs
2d8c771c6b07748a329a43b743c7d740b794dd36
<ide><path>docs/introduction/Ecosystem.md <ide> On this page we will only feature a few of them that the Redux maintainers have <ide> * [redux-transducers](https://github.com/acdlite/redux-transducers) — Transducer utilities for Redux <ide> * [redux-immutablejs](https://github.com/indexiatech/redux-immutablejs) — Integration tools between Redux and [Immutable](https://github.com/facebook/immutable-js/) <ide> * [redux-tcomb](https://github.com/gcanti/redux-tcomb) — Immutable and type-checked state and actions for Redux <add>* [redux-mock-store](https://github.com/arnaudbenard/redux-mock-store) - Mock redux store for testing your app <ide> <ide> ## Developer Tools <ide>
1
Javascript
Javascript
name anonymous functions in image.js
11098d66dc6868759940b99eb54ac90b463f175e
<ide><path>src/image.js <ide> var PDFImage = (function pdfImage() { <ide> return constructor; <ide> })(); <ide> <del>var JpegImage = (function() { <add>var JpegImage = (function jpegImage() { <ide> function JpegImage(objId, imageData, objs) { <ide> var src = 'data:image/jpeg;base64,' + window.btoa(imageData); <ide> <ide> var img = new Image(); <del> img.onload = (function() { <add> img.onload = (function jpegImageOnload() { <ide> this.loaded = true; <ide> <ide> objs.resolve(objId, this); <ide> var JpegImage = (function() { <ide> } <ide> <ide> JpegImage.prototype = { <del> getImage: function() { <add> getImage: function jpegImageGetImage() { <ide> return this.domImage; <ide> } <ide> };
1
Python
Python
remove special-casing for proxy/unmanaged models
45e5eedea99ed5aaa1df8ab505527566097e2328
<ide><path>django/db/backends/schema.py <ide> def effective_default(self, field): <ide> <ide> # Actions <ide> <del> def create_model(self, model, force=False): <add> def create_model(self, model): <ide> """ <ide> Takes a model and creates a table for it in the database. <ide> Will also create any accompanying indexes or unique constraints. <ide> """ <del> # Do nothing if this is an unmanaged or proxy model <del> if not force and (not model._meta.managed or model._meta.proxy): <del> return <ide> # Create column SQL, add FK deferreds if needed <ide> column_sqls = [] <ide> params = [] <ide> def create_model(self, model, force=False): <ide> self.execute(sql, params) <ide> # Make M2M tables <ide> for field in model._meta.local_many_to_many: <del> self.create_model(field.rel.through, force=True) <add> self.create_model(field.rel.through) <ide> <del> def delete_model(self, model, force=False): <add> def delete_model(self, model): <ide> """ <ide> Deletes a model from the database. <ide> """ <del> # Do nothing if this is an unmanaged or proxy model <del> if not force and (not model._meta.managed or model._meta.proxy): <del> return <ide> # Delete the table <ide> self.execute(self.sql_delete_table % { <ide> "table": self.quote_name(model._meta.db_table), <ide> def create_field(self, model, field, keep_default=False): <ide> """ <ide> # Special-case implicit M2M tables <ide> if isinstance(field, ManyToManyField) and field.rel.through._meta.auto_created: <del> return self.create_model(field.rel.through, force=True) <add> return self.create_model(field.rel.through) <ide> # Get the column's definition <ide> definition, params = self.column_sql(model, field, include_default=True) <ide> # It might not actually have a column behind it
1
Javascript
Javascript
use capital letters in comments
dc1737881e66acbbfc9cce71a5008e1a2838b0c7
<ide><path>lib/internal/bootstrap/node.js <ide> <ide> setupProcessObject(); <ide> <del> // do this good and early, since it handles errors. <add> // Do this good and early, since it handles errors. <ide> setupProcessFatal(); <ide> <ide> setupV8(); <ide> }); <ide> process.argv[0] = process.execPath; <ide> <del> // Handle `--debug*` deprecation and invalidation <add> // Handle `--debug*` deprecation and invalidation. <ide> if (process._invalidDebug) { <ide> process.emitWarning( <ide> '`node --debug` and `node --debug-brk` are invalid. ' + <ide> 'DeprecationWarning', 'DEP0068'); <ide> } <ide> <del> // Start the debugger agent <add> // Start the debugger agent. <ide> process.nextTick(function() { <ide> NativeModule.require('internal/deps/node-inspect/lib/_inspect').start(); <ide> }); <ide> NativeModule.require('internal/v8_prof_processor'); <ide> <ide> } else { <del> // There is user code to be run <add> // There is user code to be run. <ide> <ide> // If this is a worker in cluster mode, start up the communication <ide> // channel. This needs to be done before any user code gets executed <ide> perf.markMilestone(NODE_PERFORMANCE_MILESTONE_MODULE_LOAD_START); <ide> perf.markMilestone(NODE_PERFORMANCE_MILESTONE_MODULE_LOAD_END); <ide> // User passed '-e' or '--eval' arguments to Node without '-i' or <del> // '--interactive' <add> // '--interactive'. <ide> <ide> perf.markMilestone( <ide> NODE_PERFORMANCE_MILESTONE_PRELOAD_MODULE_LOAD_START); <ide> evalScript('[eval]'); <ide> } else if (process.argv[1] && process.argv[1] !== '-') { <ide> perf.markMilestone(NODE_PERFORMANCE_MILESTONE_MODULE_LOAD_START); <del> // make process.argv[1] into a full path <add> // Make process.argv[1] into a full path. <ide> const path = NativeModule.require('path'); <ide> process.argv[1] = path.resolve(process.argv[1]); <ide> <ide> preloadModules(); <ide> perf.markMilestone( <ide> NODE_PERFORMANCE_MILESTONE_PRELOAD_MODULE_LOAD_END); <del> // check if user passed `-c` or `--check` arguments to Node. <add> // Check if user passed `-c` or `--check` arguments to Node. <ide> if (process._syntax_check_only != null) { <ide> const fs = NativeModule.require('fs'); <del> // read the source <add> // Read the source. <ide> const filename = CJSModule._resolveFilename(process.argv[1]); <ide> const source = fs.readFileSync(filename, 'utf-8'); <ide> checkScriptSyntax(source, filename); <ide> function setupGlobalConsole() { <ide> const originalConsole = global.console; <ide> const CJSModule = NativeModule.require('internal/modules/cjs/loader'); <del> // Setup Node.js global.console <add> // Setup Node.js global.console. <ide> const wrappedConsole = NativeModule.require('console'); <ide> Object.defineProperty(global, 'console', { <ide> configurable: true, <ide> return; <ide> } <ide> const { addCommandLineAPI, consoleCall } = process.binding('inspector'); <del> // Setup inspector command line API <add> // Setup inspector command line API. <ide> const { makeRequireFunction } = <ide> NativeModule.require('internal/modules/cjs/helpers'); <ide> const path = NativeModule.require('path'); <ide> exceptionHandlerState.captureFn(er); <ide> } else if (!process.emit('uncaughtException', er)) { <ide> // If someone handled it, then great. otherwise, die in C++ land <del> // since that means that we'll exit the process, emit the 'exit' event <add> // since that means that we'll exit the process, emit the 'exit' event. <ide> try { <ide> if (!process._exiting) { <ide> process._exiting = true; <ide> process.emit('exit', 1); <ide> } <del> } catch (er) { <del> // nothing to be done about it at this point. <add> } catch { <add> // Nothing to be done about it at this point. <ide> } <ide> try { <ide> const { kExpandStackSymbol } = NativeModule.require('internal/util'); <ide> } <ide> <ide> // If we handled an error, then make sure any ticks get processed <del> // by ensuring that the next Immediate cycle isn't empty <add> // by ensuring that the next Immediate cycle isn't empty. <ide> NativeModule.require('timers').setImmediate(noop); <ide> <ide> // Emit the after() hooks now that the exception has been handled. <ide> process._tickCallback(); <ide> } <ide> <del> // Load preload modules <add> // Load preload modules. <ide> function preloadModules() { <ide> if (process._preload_modules) { <ide> const { <ide> stripShebang, stripBOM <ide> } = NativeModule.require('internal/modules/cjs/helpers'); <ide> <del> // remove Shebang <add> // Remove Shebang. <ide> source = stripShebang(source); <del> // remove BOM <add> // Remove BOM. <ide> source = stripBOM(source); <del> // wrap it <add> // Wrap it. <ide> source = CJSModule.wrap(source); <del> // compile the script, this will throw if it fails <add> // Compile the script, this will throw if it fails. <ide> new vm.Script(source, { displayErrors: true, filename }); <ide> } <ide>
1
PHP
PHP
fix cs error
7fc4cfe3ae7d4c523331a44e2862bab5c8f44f1e
<ide><path>tests/TestCase/ORM/QueryTest.php <ide> public function testCleanCopyBeforeFind() { <ide> $table = TableRegistry::get('Articles'); <ide> $table->hasMany('Comments'); <ide> $table->eventManager() <del> ->attach(function($event, $query) { <add> ->attach(function ($event, $query) { <ide> $query <ide> ->limit(5) <ide> ->order(['Articles.title' => 'DESC']);
1
Text
Text
fix code blocks
e2119f3050b225ccaf25990e5d6714dae2de7afe
<ide><path>docs/sources/installation/google.md <ide> page_keywords: Docker, Docker documentation, installation, google, Google Comput <ide> 2. Download and configure the [Google Cloud SDK][3] to use your <ide> project with the following commands: <ide> <del> ``` <del> $ curl https://dl.google.com/dl/cloudsdk/release/install_google_cloud_sdk.bash | bash <del> $ gcloud auth login <del> Enter a cloud project id (or leave blank to not set): <google-cloud-project-id> <del> ... <del> ``` <add> $ curl https://dl.google.com/dl/cloudsdk/release/install_google_cloud_sdk.bash | bash <add> $ gcloud auth login <add> Enter a cloud project id (or leave blank to not set): <google-cloud-project-id> <add> ... <ide> <ide> 3. Start a new instance using the latest [Container-optimized image][4]: <ide> (select a zone close to you and the desired instance size) <ide> <del> ``` <del> $ gcloud compute instances create docker-playground \ <del> --image projects/google-containers/global/images/container-vm-v20140522 \ <del> --zone us-central1-a \ <del> --machine-type f1-micro <del> ``` <add> $ gcloud compute instances create docker-playground \ <add> --image projects/google-containers/global/images/container-vm-v20140522 \ <add> --zone us-central1-a \ <add> --machine-type f1-micro <ide> <ide> 4. Connect to the instance using SSH: <ide> <del> ``` <del> $ gcloud compute ssh --zone us-central1-a docker-playground <del> ``` <del> ``` <del> docker-playground:~$ sudo docker run busybox echo 'docker on GCE \o/' <del> docker on GCE \o/ <del> ``` <add> $ gcloud compute ssh --zone us-central1-a docker-playground <add> docker-playground:~$ sudo docker run busybox echo 'docker on GCE \o/' <add> docker on GCE \o/ <ide> <ide> Read more about [deploying Containers on Google Cloud Platform][5]. <ide>
1
PHP
PHP
remove unnecessary methods calls
5a43435325e53560408543c81c8849bda87365e4
<ide><path>src/Controller/Controller.php <ide> public function __construct(ServerRequest $request = null, Response $response = <ide> } <ide> <ide> $this->setRequest($request ?: new ServerRequest()); <del> $this->setResponse($response ?: new Response()); <add> $this->response = $response ?: new Response(); <ide> <ide> if ($eventManager !== null) { <ide> $this->setEventManager($eventManager); <ide> public function redirect($url, $status = 302) <ide> */ <ide> public function setAction($action, ...$args) <ide> { <del> $this->setRequest($this->getRequest()->withParam('action', $action)); <add> $this->setRequest($this->request->withParam('action', $action)); <ide> <ide> return $this->$action(...$args); <ide> } <ide> public function render($view = null, $layout = null) <ide> $builder->setTemplatePath($this->_viewPath()); <ide> } <ide> <del> if ($this->getRequest()->getParam('bare')) { <add> if ($this->request->getParam('bare')) { <ide> $builder->enableAutoLayout(false); <ide> } <ide> $this->autoRender = false; <ide> public function render($view = null, $layout = null) <ide> return $event->getResult(); <ide> } <ide> if ($event->isStopped()) { <del> return $this->getResponse(); <add> return $this->response; <ide> } <ide> <del> if ($builder->getTemplate() === null && $this->getRequest()->getParam('action')) { <del> $builder->setTemplate($this->getRequest()->getParam('action')); <add> if ($builder->getTemplate() === null && $this->request->getParam('action')) { <add> $builder->setTemplate($this->request->getParam('action')); <ide> } <ide> <ide> $this->View = $this->createView(); <ide> $contents = $this->View->render($view, $layout); <del> $this->setResponse($this->View->response->withStringBody($contents)); <add> $this->response = $this->View->response->withStringBody($contents); <ide> <del> return $this->getResponse(); <add> return $this->response; <ide> } <ide> <ide> /** <ide> public function render($view = null, $layout = null) <ide> protected function _viewPath() <ide> { <ide> $viewPath = $this->name; <del> if ($this->getRequest()->getParam('prefix')) { <add> if ($this->request->getParam('prefix')) { <ide> $prefixes = array_map( <ide> 'Cake\Utility\Inflector::camelize', <del> explode('/', $this->getRequest()->getParam('prefix')) <add> explode('/', $this->request->getParam('prefix')) <ide> ); <ide> $viewPath = implode(DIRECTORY_SEPARATOR, $prefixes) . DIRECTORY_SEPARATOR . $viewPath; <ide> } <ide> protected function _viewPath() <ide> */ <ide> public function referer($default = null, $local = false) <ide> { <del> if (!$this->getRequest()) { <add> if (!$this->request) { <ide> return Router::url($default, !$local); <ide> } <ide> <del> $referer = $this->getRequest()->referer($local); <add> $referer = $this->request->referer($local); <ide> if ($referer === '/' && $default && $default !== $referer) { <ide> $url = Router::url($default, !$local); <del> $base = $this->getRequest()->getAttribute('base'); <add> $base = $this->request->getAttribute('base'); <ide> if ($local && $base && strpos($url, $base) === 0) { <ide> $url = substr($url, strlen($base)); <ide> if ($url[0] !== '/') {
1
Java
Java
simplify hashmap declaration in test fixture
aaa10e9060d274b346ca9875864733d2e83de094
<ide><path>spring-aop/src/testFixtures/java/org/springframework/aop/testfixture/advice/MethodCounter.java <ide> import java.io.Serializable; <ide> import java.lang.reflect.Method; <ide> import java.util.HashMap; <add>import java.util.Map; <ide> <ide> /** <ide> * Abstract superclass for counting advices etc. <ide> public class MethodCounter implements Serializable { <ide> <ide> /** Method name --> count, does not understand overloading */ <del> private HashMap<String, Integer> map = new HashMap<>(); <add> private Map<String, Integer> map = new HashMap<>(); <ide> <ide> private int allCount; <ide>
1
Text
Text
add @shogunpanda to collaborators
6a51306213cf8f6693d5a817fbd6f14361d84aac
<ide><path>README.md <ide> For information about the governance of the Node.js project, see <ide> **Santiago Gimeno** <<santiago.gimeno@gmail.com>> <ide> * [shisama](https://github.com/shisama) - <ide> **Masashi Hirano** <<shisama07@gmail.com>> (he/him) <add>* [ShogunPanda](https://github.com/ShogunPanda) - <add> **Paolo Insogna** <<paolo@cowtech.it>> (he/him) <ide> * [srl295](https://github.com/srl295) - <ide> **Steven R Loomis** <<srloomis@us.ibm.com>> <ide> * [starkwang](https://github.com/starkwang) -
1
Go
Go
set timeout on splunk batch send
24087399d95d60be4184b9ed3eba56466878b4e1
<ide><path>daemon/logger/splunk/splunk.go <ide> package splunk <ide> import ( <ide> "bytes" <ide> "compress/gzip" <add> "context" <ide> "crypto/tls" <ide> "crypto/x509" <ide> "encoding/json" <ide> const ( <ide> envVarStreamChannelSize = "SPLUNK_LOGGING_DRIVER_CHANNEL_SIZE" <ide> ) <ide> <add>var batchSendTimeout = 30 * time.Second <add> <ide> type splunkLoggerInterface interface { <ide> logger.Logger <ide> worker() <ide> func (l *splunkLogger) worker() { <ide> <ide> func (l *splunkLogger) postMessages(messages []*splunkMessage, lastChance bool) []*splunkMessage { <ide> messagesLen := len(messages) <add> <add> ctx, cancel := context.WithTimeout(context.Background(), batchSendTimeout) <add> defer cancel() <add> <ide> for i := 0; i < messagesLen; i += l.postMessagesBatchSize { <ide> upperBound := i + l.postMessagesBatchSize <ide> if upperBound > messagesLen { <ide> upperBound = messagesLen <ide> } <del> if err := l.tryPostMessages(messages[i:upperBound]); err != nil { <del> logrus.Error(err) <add> <add> if err := l.tryPostMessages(ctx, messages[i:upperBound]); err != nil { <add> logrus.WithError(err).WithField("module", "logger/splunk").Warn("Error while sending logs") <ide> if messagesLen-i >= l.bufferMaximum || lastChance { <ide> // If this is last chance - print them all to the daemon log <ide> if lastChance { <ide> func (l *splunkLogger) postMessages(messages []*splunkMessage, lastChance bool) <ide> return messages[:0] <ide> } <ide> <del>func (l *splunkLogger) tryPostMessages(messages []*splunkMessage) error { <add>func (l *splunkLogger) tryPostMessages(ctx context.Context, messages []*splunkMessage) error { <ide> if len(messages) == 0 { <ide> return nil <ide> } <ide> func (l *splunkLogger) tryPostMessages(messages []*splunkMessage) error { <ide> if err != nil { <ide> return err <ide> } <add> req = req.WithContext(ctx) <ide> req.Header.Set("Authorization", l.auth) <ide> // Tell if we are sending gzip compressed body <ide> if l.gzipCompression { <ide><path>daemon/logger/splunk/splunk_test.go <ide> package splunk <ide> <ide> import ( <ide> "compress/gzip" <add> "context" <ide> "fmt" <ide> "os" <add> "runtime" <ide> "testing" <ide> "time" <ide> <ide> func TestSkipVerify(t *testing.T) { <ide> t.Fatal("No messages should be accepted at this point") <ide> } <ide> <del> hec.simulateServerError = false <add> hec.simulateErr(false) <ide> <ide> for i := defaultStreamChannelSize * 2; i < defaultStreamChannelSize*4; i++ { <ide> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil { <ide> func TestBufferMaximum(t *testing.T) { <ide> } <ide> <ide> hec := NewHTTPEventCollectorMock(t) <del> hec.simulateServerError = true <add> hec.simulateErr(true) <ide> go hec.Serve() <ide> <ide> info := logger.Info{ <ide> func TestCannotSendAfterClose(t *testing.T) { <ide> t.Fatal(err) <ide> } <ide> } <add> <add>func TestDeadlockOnBlockedEndpoint(t *testing.T) { <add> hec := NewHTTPEventCollectorMock(t) <add> go hec.Serve() <add> info := logger.Info{ <add> Config: map[string]string{ <add> splunkURLKey: hec.URL(), <add> splunkTokenKey: hec.token, <add> }, <add> ContainerID: "containeriid", <add> ContainerName: "/container_name", <add> ContainerImageID: "contaimageid", <add> ContainerImageName: "container_image_name", <add> } <add> <add> l, err := New(info) <add> if err != nil { <add> t.Fatal(err) <add> } <add> <add> ctx, unblock := context.WithCancel(context.Background()) <add> hec.withBlock(ctx) <add> defer unblock() <add> <add> batchSendTimeout = 1 * time.Second <add> <add> if err := l.Log(&logger.Message{}); err != nil { <add> t.Fatal(err) <add> } <add> <add> done := make(chan struct{}) <add> go func() { <add> l.Close() <add> close(done) <add> }() <add> <add> select { <add> case <-time.After(60 * time.Second): <add> buf := make([]byte, 1e6) <add> buf = buf[:runtime.Stack(buf, true)] <add> t.Logf("STACK DUMP: \n\n%s\n\n", string(buf)) <add> t.Fatal("timeout waiting for close to finish") <add> case <-done: <add> } <add>} <ide><path>daemon/logger/splunk/splunkhecmock_test.go <ide> package splunk <ide> <ide> import ( <ide> "compress/gzip" <add> "context" <ide> "encoding/json" <ide> "fmt" <ide> "io" <ide> "io/ioutil" <ide> "net" <ide> "net/http" <add> "sync" <ide> "testing" <ide> ) <ide> <ide> type HTTPEventCollectorMock struct { <ide> tcpAddr *net.TCPAddr <ide> tcpListener *net.TCPListener <ide> <add> mu sync.Mutex <ide> token string <ide> simulateServerError bool <add> blockingCtx context.Context <ide> <ide> test *testing.T <ide> <ide> func NewHTTPEventCollectorMock(t *testing.T) *HTTPEventCollectorMock { <ide> connectionVerified: false} <ide> } <ide> <add>func (hec *HTTPEventCollectorMock) simulateErr(b bool) { <add> hec.mu.Lock() <add> hec.simulateServerError = b <add> hec.mu.Unlock() <add>} <add> <add>func (hec *HTTPEventCollectorMock) withBlock(ctx context.Context) { <add> hec.mu.Lock() <add> hec.blockingCtx = ctx <add> hec.mu.Unlock() <add>} <add> <ide> func (hec *HTTPEventCollectorMock) URL() string { <ide> return "http://" + hec.tcpListener.Addr().String() <ide> } <ide> func (hec *HTTPEventCollectorMock) ServeHTTP(writer http.ResponseWriter, request <ide> <ide> hec.numOfRequests++ <ide> <del> if hec.simulateServerError { <add> hec.mu.Lock() <add> simErr := hec.simulateServerError <add> ctx := hec.blockingCtx <add> hec.mu.Unlock() <add> <add> if ctx != nil { <add> <-hec.blockingCtx.Done() <add> } <add> <add> if simErr { <ide> if request.Body != nil { <ide> defer request.Body.Close() <ide> }
3
Text
Text
avoid incomplete sentence in cluster docs
0172d1d48cb6942e82cf4be98a1fa048ab87454e
<ide><path>doc/api/cluster.md <ide> added: v0.7.0 <ide> <ide> * {Object} <ide> <del>A hash that stores the active worker objects, keyed by `id` field. Makes it <add>A hash that stores the active worker objects, keyed by `id` field. This makes it <ide> easy to loop through all the workers. It is only available in the primary <ide> process. <ide>
1
PHP
PHP
fix more tests in sqlserver
7695ddf08893a8a2bdad85819a66fc047178cc35
<ide><path>tests/TestCase/Http/Session/DatabaseSessionTest.php <ide> public function setUp(): void <ide> parent::setUp(); <ide> static::setAppNamespace(); <ide> $this->storage = new DatabaseSession(); <add> <add> // With metadata caching on SQLServer/windows tests fail. <add> ConnectionManager::get('test')->cacheMetadata(false); <ide> } <ide> <ide> /** <ide><path>tests/TestCase/ORM/ColumnSchemaAwareTypeIntegrationTest.php <ide> public function testCustomTypeReceivesAllColumnDefinitionKeys() <ide> }); <ide> <ide> TypeFactory::set('text', $type); <add> TypeFactory::set('nvarchar', $type); <ide> <ide> $table->getSchema()->getColumn('val'); <ide> }
2
Javascript
Javascript
fix incorrect var usage in replacewith
2eccb0a87af11e4ebbded6d0aa41c20ebf8c1f85
<ide><path>packages/ember-routing/lib/system/route.js <ide> Ember.Route = Ember.Object.extend(Ember.ActionHandler, { <ide> */ <ide> replaceWith: function() { <ide> var router = this.router; <del> return this.router.replaceWith.apply(this.router, arguments); <add> return router.replaceWith.apply(router, arguments); <ide> }, <ide> <ide> /**
1
Go
Go
close the returned io.readcloser
91a496055c3e45ab3fba8e643475adb618581e1f
<ide><path>api/client/attach.go <ide> func (cli *DockerCli) CmdAttach(args ...string) error { <ide> return err <ide> } <ide> <add> defer stream.Close() <add> <ide> var c types.ContainerJSON <ide> if err := json.NewDecoder(stream).Decode(&c); err != nil { <ide> return err <ide><path>api/client/commit.go <ide> func (cli *DockerCli) CmdCommit(args ...string) error { <ide> return err <ide> } <ide> <add> defer stream.Close() <add> <ide> if err := json.NewDecoder(stream).Decode(&response); err != nil { <ide> return err <ide> } <ide><path>api/client/create.go <ide> func (cli *DockerCli) createContainer(config *runconfig.Config, hostConfig *runc <ide> return nil, err <ide> } <ide> <add> defer stream.Close() <add> <ide> var response types.ContainerCreateResponse <ide> if err := json.NewDecoder(stream).Decode(&response); err != nil { <ide> return nil, err <ide><path>api/client/diff.go <ide> func (cli *DockerCli) CmdDiff(args ...string) error { <ide> return err <ide> } <ide> <add> defer rdr.Close() <add> <ide> changes := []types.ContainerChange{} <ide> if err := json.NewDecoder(rdr).Decode(&changes); err != nil { <ide> return err <ide><path>api/client/exec.go <ide> func (cli *DockerCli) CmdExec(args ...string) error { <ide> return err <ide> } <ide> <add> defer stream.Close() <add> <ide> var response types.ContainerExecCreateResponse <ide> if err := json.NewDecoder(stream).Decode(&response); err != nil { <ide> return err <ide><path>api/client/history.go <ide> func (cli *DockerCli) CmdHistory(args ...string) error { <ide> return err <ide> } <ide> <add> defer rdr.Close() <add> <ide> history := []types.ImageHistory{} <ide> if err := json.NewDecoder(rdr).Decode(&history); err != nil { <ide> return err <ide><path>api/client/images.go <ide> func (cli *DockerCli) CmdImages(args ...string) error { <ide> return err <ide> } <ide> <add> defer rdr.Close() <add> <ide> images := []types.Image{} <ide> if err := json.NewDecoder(rdr).Decode(&images); err != nil { <ide> return err <ide><path>api/client/info.go <ide> func (cli *DockerCli) CmdInfo(args ...string) error { <ide> return err <ide> } <ide> <add> defer rdr.Close() <add> <ide> info := &types.Info{} <ide> if err := json.NewDecoder(rdr).Decode(info); err != nil { <ide> return fmt.Errorf("Error reading remote info: %v", err) <ide><path>api/client/login.go <ide> func (cli *DockerCli) CmdLogin(args ...string) error { <ide> return err <ide> } <ide> <add> defer stream.Close() <add> <ide> var response types.AuthResponse <ide> if err := json.NewDecoder(stream).Decode(&response); err != nil { <ide> // Upon error, remove entry <ide><path>api/client/port.go <ide> func (cli *DockerCli) CmdPort(args ...string) error { <ide> return err <ide> } <ide> <add> defer stream.Close() <add> <ide> var c struct { <ide> NetworkSettings struct { <ide> Ports nat.PortMap <ide><path>api/client/ps.go <ide> func (cli *DockerCli) CmdPs(args ...string) error { <ide> return err <ide> } <ide> <add> defer rdr.Close() <add> <ide> containers := []types.Container{} <ide> if err := json.NewDecoder(rdr).Decode(&containers); err != nil { <ide> return err <ide><path>api/client/rmi.go <ide> func (cli *DockerCli) CmdRmi(args ...string) error { <ide> fmt.Fprintf(cli.err, "%s\n", err) <ide> errNames = append(errNames, name) <ide> } else { <add> defer rdr.Close() <add> <ide> dels := []types.ImageDelete{} <ide> if err := json.NewDecoder(rdr).Decode(&dels); err != nil { <ide> fmt.Fprintf(cli.err, "%s\n", err) <ide><path>api/client/search.go <ide> func (cli *DockerCli) CmdSearch(args ...string) error { <ide> return err <ide> } <ide> <add> defer rdr.Close() <add> <ide> results := ByStars{} <ide> if err := json.NewDecoder(rdr).Decode(&results); err != nil { <ide> return err <ide><path>api/client/start.go <ide> func (cli *DockerCli) CmdStart(args ...string) error { <ide> return err <ide> } <ide> <add> defer stream.Close() <add> <ide> var c types.ContainerJSON <ide> if err := json.NewDecoder(stream).Decode(&c); err != nil { <ide> return err <ide><path>api/client/stats.go <ide> func (s *containerStats) Collect(cli *DockerCli, streamStats bool) { <ide> s.mu.Unlock() <ide> return <ide> } <add> <ide> defer stream.Close() <add> <ide> var ( <ide> previousCPU uint64 <ide> previousSystem uint64 <ide><path>api/client/top.go <ide> func (cli *DockerCli) CmdTop(args ...string) error { <ide> return err <ide> } <ide> <add> defer stream.Close() <add> <ide> procList := types.ContainerProcessList{} <ide> if err := json.NewDecoder(stream).Decode(&procList); err != nil { <ide> return err <ide><path>api/client/utils.go <ide> func waitForExit(cli *DockerCli, containerID string) (int, error) { <ide> return -1, err <ide> } <ide> <add> defer stream.Close() <add> <ide> var res types.ContainerWaitResponse <ide> if err := json.NewDecoder(stream).Decode(&res); err != nil { <ide> return -1, err <ide> func getExitCode(cli *DockerCli, containerID string) (bool, int, error) { <ide> return false, -1, nil <ide> } <ide> <add> defer stream.Close() <add> <ide> var c types.ContainerJSON <ide> if err := json.NewDecoder(stream).Decode(&c); err != nil { <ide> return false, -1, err <ide> func getExecExitCode(cli *DockerCli, execID string) (bool, int, error) { <ide> return false, -1, nil <ide> } <ide> <add> defer stream.Close() <add> <ide> //TODO: Should we reconsider having a type in api/types? <ide> //this is a response to exex/id/json not container <ide> var c struct { <ide><path>api/client/version.go <ide> func (cli *DockerCli) CmdVersion(args ...string) error { <ide> return err <ide> } <ide> <add> defer stream.Close() <add> <ide> var v types.Version <ide> if err := json.NewDecoder(stream).Decode(&v); err != nil { <ide> fmt.Fprintf(cli.err, "Error reading remote version: %s\n", err)
18
Java
Java
fix onaccessibilityaction on fabric
006527fea70e0d1ef64eee05c7ee9f98dfda8a4d
<ide><path>ReactAndroid/src/main/java/com/facebook/react/uimanager/ReactAccessibilityDelegate.java <ide> import com.facebook.react.bridge.UIManager; <ide> import com.facebook.react.bridge.WritableMap; <ide> import com.facebook.react.uimanager.ReactAccessibilityDelegate.AccessibilityRole; <add>import com.facebook.react.uimanager.common.ViewUtil; <ide> import com.facebook.react.uimanager.events.Event; <ide> import com.facebook.react.uimanager.events.EventDispatcher; <ide> import com.facebook.react.uimanager.util.ReactFindViewUtil; <ide> public boolean performAccessibilityAction(View host, int action, Bundle args) { <ide> if (reactContext.hasActiveReactInstance()) { <ide> final int reactTag = host.getId(); <ide> final int surfaceId = UIManagerHelper.getSurfaceId(reactContext); <del> UIManager uiManager = UIManagerHelper.getUIManager(reactContext, reactTag); <add> UIManager uiManager = <add> UIManagerHelper.getUIManager(reactContext, ViewUtil.getUIManagerType(reactTag)); <ide> if (uiManager != null) { <ide> uiManager <ide> .<EventDispatcher>getEventDispatcher()
1
Javascript
Javascript
fix tests that don't cleanup application instances
bbc901d90dc1aca1f926579deecbcca05b930214
<ide><path>packages/ember-application/tests/system/application_test.js <ide> QUnit.test("you cannot make two default applications without a rootElement error <ide> <ide> QUnit.test("acts like a namespace", function() { <ide> var lookup = Ember.lookup = {}; <del> var app; <ide> <ide> run(function() { <ide> app = lookup.TestApp = Application.create({ rootElement: '#two', router: false }); <ide><path>packages/ember-application/tests/system/initializers_test.js <ide> QUnit.test("initializers set on Application subclasses should not be shared betw <ide> var firstInitializerRunCount = 0; <ide> var secondInitializerRunCount = 0; <ide> var FirstApp = Application.extend(); <add> var firstApp, secondApp; <ide> FirstApp.initializer({ <ide> name: 'first', <ide> initialize(registry) { <ide> QUnit.test("initializers set on Application subclasses should not be shared betw <ide> }); <ide> jQuery('#qunit-fixture').html('<div id="first"></div><div id="second"></div>'); <ide> run(function() { <del> FirstApp.create({ <add> firstApp = FirstApp.create({ <ide> router: false, <ide> rootElement: '#qunit-fixture #first' <ide> }); <ide> }); <ide> equal(firstInitializerRunCount, 1, 'first initializer only was run'); <ide> equal(secondInitializerRunCount, 0, 'first initializer only was run'); <ide> run(function() { <del> SecondApp.create({ <add> secondApp = SecondApp.create({ <ide> router: false, <ide> rootElement: '#qunit-fixture #second' <ide> }); <ide> }); <ide> equal(firstInitializerRunCount, 1, 'second initializer only was run'); <ide> equal(secondInitializerRunCount, 1, 'second initializer only was run'); <add> run(function() { <add> firstApp.destroy(); <add> secondApp.destroy(); <add> }); <ide> }); <ide> <ide> QUnit.test("initializers are concatenated", function() { <ide> var firstInitializerRunCount = 0; <ide> var secondInitializerRunCount = 0; <ide> var FirstApp = Application.extend(); <add> var firstApp, secondApp; <ide> FirstApp.initializer({ <ide> name: 'first', <ide> initialize(registry) { <ide> QUnit.test("initializers are concatenated", function() { <ide> <ide> jQuery('#qunit-fixture').html('<div id="first"></div><div id="second"></div>'); <ide> run(function() { <del> FirstApp.create({ <add> firstApp = FirstApp.create({ <ide> router: false, <ide> rootElement: '#qunit-fixture #first' <ide> }); <ide> QUnit.test("initializers are concatenated", function() { <ide> equal(secondInitializerRunCount, 0, 'first initializer only was run when base class created'); <ide> firstInitializerRunCount = 0; <ide> run(function() { <del> SecondApp.create({ <add> secondApp = SecondApp.create({ <ide> router: false, <ide> rootElement: '#qunit-fixture #second' <ide> }); <ide> }); <ide> equal(firstInitializerRunCount, 1, 'first initializer was run when subclass created'); <ide> equal(secondInitializerRunCount, 1, 'second initializers was run when subclass created'); <add> run(function() { <add> firstApp.destroy(); <add> secondApp.destroy(); <add> }); <ide> }); <ide> <ide> QUnit.test("initializers are per-app", function() { <ide><path>packages/ember-application/tests/system/instance_initializers_test.js <ide> if (Ember.FEATURES.isEnabled('ember-application-instance-initializers')) { <ide> var firstInitializerRunCount = 0; <ide> var secondInitializerRunCount = 0; <ide> var FirstApp = Application.extend(); <add> var firstApp, secondApp; <add> <ide> FirstApp.instanceInitializer({ <ide> name: 'first', <ide> initialize(registry) { <ide> if (Ember.FEATURES.isEnabled('ember-application-instance-initializers')) { <ide> }); <ide> jQuery('#qunit-fixture').html('<div id="first"></div><div id="second"></div>'); <ide> run(function() { <del> FirstApp.create({ <add> firstApp = FirstApp.create({ <ide> router: false, <ide> rootElement: '#qunit-fixture #first' <ide> }); <ide> }); <ide> equal(firstInitializerRunCount, 1, 'first initializer only was run'); <ide> equal(secondInitializerRunCount, 0, 'first initializer only was run'); <ide> run(function() { <del> SecondApp.create({ <add> secondApp = SecondApp.create({ <ide> router: false, <ide> rootElement: '#qunit-fixture #second' <ide> }); <ide> }); <ide> equal(firstInitializerRunCount, 1, 'second initializer only was run'); <ide> equal(secondInitializerRunCount, 1, 'second initializer only was run'); <add> run(function() { <add> firstApp.destroy(); <add> secondApp.destroy(); <add> }); <add> <ide> }); <ide> <ide> QUnit.test("initializers are concatenated", function() { <ide> var firstInitializerRunCount = 0; <ide> var secondInitializerRunCount = 0; <ide> var FirstApp = Application.extend(); <add> var firstApp, secondApp; <add> <ide> FirstApp.instanceInitializer({ <ide> name: 'first', <ide> initialize(registry) { <ide> if (Ember.FEATURES.isEnabled('ember-application-instance-initializers')) { <ide> <ide> jQuery('#qunit-fixture').html('<div id="first"></div><div id="second"></div>'); <ide> run(function() { <del> FirstApp.create({ <add> firstApp = FirstApp.create({ <ide> router: false, <ide> rootElement: '#qunit-fixture #first' <ide> }); <ide> if (Ember.FEATURES.isEnabled('ember-application-instance-initializers')) { <ide> equal(secondInitializerRunCount, 0, 'first initializer only was run when base class created'); <ide> firstInitializerRunCount = 0; <ide> run(function() { <del> SecondApp.create({ <add> secondApp = SecondApp.create({ <ide> router: false, <ide> rootElement: '#qunit-fixture #second' <ide> }); <ide> }); <ide> equal(firstInitializerRunCount, 1, 'first initializer was run when subclass created'); <ide> equal(secondInitializerRunCount, 1, 'second initializers was run when subclass created'); <add> run(function() { <add> firstApp.destroy(); <add> secondApp.destroy(); <add> }); <ide> }); <ide> <ide> QUnit.test("initializers are per-app", function() {
3
Text
Text
use serial comma in net docs
737ca9a3e665426ddebf1199981301a7b3b94483
<ide><path>doc/api/net.md <ide> sockets on other operating systems. <ide> <ide> ### Identifying paths for IPC connections <ide> <del>[`net.connect()`][], [`net.createConnection()`][], [`server.listen()`][] and <add>[`net.connect()`][], [`net.createConnection()`][], [`server.listen()`][], and <ide> [`socket.connect()`][] take a `path` parameter to identify IPC endpoints. <ide> <ide> On Unix, the local domain is also known as the Unix domain. The path is a
1
Javascript
Javascript
throw error in $onchanges immediately
983e27b628fd1eab653e2b3966d90a270f27cc93
<ide><path>src/ng/compile.js <ide> function $CompileProvider($provide, $$sanitizeUriProvider) { <ide> } <ide> // We must run this hook in an apply since the $$postDigest runs outside apply <ide> $rootScope.$apply(function() { <del> var errors = []; <ide> for (var i = 0, ii = onChangesQueue.length; i < ii; ++i) { <ide> try { <ide> onChangesQueue[i](); <ide> } catch (e) { <del> errors.push(e); <add> $exceptionHandler(e); <ide> } <ide> } <ide> // Reset the queue to trigger a new schedule next time there is a change <ide> onChangesQueue = undefined; <del> if (errors.length) { <del> throw errors; <del> } <ide> }); <ide> } finally { <ide> onChangesTtl++; <ide><path>test/ng/compileSpec.js <ide> describe('$compile', function() { <ide> $rootScope.$apply('a = 42'); <ide> <ide> // The first component's error should be logged <del> var errors = $exceptionHandler.errors.pop(); <del> expect(errors[0]).toEqual(new Error('bad hook')); <add> expect($exceptionHandler.errors.pop()).toEqual(new Error('bad hook')); <ide> <ide> // The second component's changes should still be called <ide> expect($log.info.logs.pop()).toEqual(['onChange']); <ide> }); <ide> }); <ide> <ide> <del> it('should collect up all `$onChanges` errors into one throw', function() { <add> it('should throw `$onChanges` errors immediately', function() { <ide> function ThrowingController() { <ide> this.$onChanges = function(change) { <ide> throw new Error('bad hook: ' + this.prop); <ide> describe('$compile', function() { <ide> <ide> $rootScope.$apply('a = 42'); <ide> <del> // Both component's error should be logged <del> var errors = $exceptionHandler.errors.pop(); <del> expect(errors.pop()).toEqual(new Error('bad hook: 84')); <del> expect(errors.pop()).toEqual(new Error('bad hook: 42')); <add> // Both component's error should be logged individually <add> expect($exceptionHandler.errors.pop()).toEqual(new Error('bad hook: 84')); <add> expect($exceptionHandler.errors.pop()).toEqual(new Error('bad hook: 42')); <ide> }); <ide> }); <ide> });
2
Python
Python
release version in documentation
294edfd83d591f3ae841c993662fac6ec7924515
<ide><path>docs/source/conf.py <ide> # The short X.Y version <ide> version = u'' <ide> # The full version, including alpha/beta/rc tags <del>release = u'1.2.0' <add>release = u'2.0.0' <ide> <ide> <ide> # -- General configuration ---------------------------------------------------
1
Go
Go
support hairpin nat
95a400e6e1a3b5da68431e64f9902a3fac218360
<ide><path>pkg/iptables/iptables.go <ide> func (c *Chain) Forward(action Action, ip net.IP, port int, proto, dest_addr str <ide> "-p", proto, <ide> "-d", daddr, <ide> "--dport", strconv.Itoa(port), <del> "!", "-i", c.Bridge, <ide> "-j", "DNAT", <ide> "--to-destination", net.JoinHostPort(dest_addr, strconv.Itoa(dest_port))); err != nil { <ide> return err <ide> func (c *Chain) Forward(action Action, ip net.IP, port int, proto, dest_addr str <ide> return fmt.Errorf("Error iptables forward: %s", output) <ide> } <ide> <add> if output, err := Raw("-t", "nat", string(fAction), "POSTROUTING", <add> "-p", proto, <add> "-s", dest_addr, <add> "-d", dest_addr, <add> "--dport", strconv.Itoa(dest_port), <add> "-j", "MASQUERADE"); err != nil { <add> return err <add> } else if len(output) != 0 { <add> return fmt.Errorf("Error iptables forward: %s", output) <add> } <add> <ide> return nil <ide> } <ide>
1
Text
Text
add issues and requests guidelines
e9c09052a4bf8531bdb94c1789f4138986ebe630
<ide><path>CONTRIBUTING.md <add># How to contribute to transformers? <add> <add>Everyone is welcome to contribute, and we value everybody's contribution. Code <add>is thus not the only way to contribute. Answering questions, helping others, <add>reaching out and improving the documentations are immensely valuable to the <add>community. <add> <add>It also helps us if you spread the word: reference the library from blog posts <add>on the awesome projects it made possible, shout out on twitter every time it has <add>helped you, or simply star the repo to say "thank you". <add> <add>## You can contribute in so many ways! <add> <add>There are 4 ways you can contribute to transformers: <add>* Fixing outstanding issues with the existing code; <add>* Implementing new models; <add>* Contributing to the examples, or to the documentation; <add>* Submitting issues related to bugs or desired new features. <add> <add>*All are equally valuable to the community.* <add> <add>## Submitting a new issue or feature request <add> <add>Do your best to follow these guidelines when submitting an issue or a feature <add>request. It will make it easier for us to come back to you quickly and with good <add>feedback. <add> <add>### Did you find a bug? <add> <add>The transformers are robust and reliable thanks to the users who notify us of <add>the problems they encounter. <add> <add>So thank you for reporting an issue. First, we would really appreciate it if you <add>could **make sure the bug was not already reported** (use the search bar on <add>Github under Issues). <add> <add>Did not find it? :( So we can act quickly on it, please follow these steps: <add> <add>* Include your **OS type and version**, the versions of **Python**, **PyTorch** and <add> **Tensorflow** when applicable; <add>* A short, self-contained, code snippet that allows us to reproduce the bug in <add> less than 30s. <add>* Provide the *full* traceback if an exception is raised. <add> <add>To get the OS and software versions, execute the following code and copy-paste <add>the output: <add> <add>``` <add>import platform; print("Platform", platform.platform()) <add>import sys; print("Python", sys.version) <add>import torch; print("PyTorch", torch.__version__) <add>import tensorflow; print("Tensorflow", tensorflow.__version__) <add>``` <add> <add>### Do you want to implement a new model? <add> <add>Please provide the following: <add> <add>* Short description of the model and link to the paper <add>* Link to the implementation if open-source <add>* Link to the model weights if they are available <add> <add>Let us know if you are willing to contribute so we can best guide you. <add> <add>### Do you want a new feature (that is not a model)? <add> <add>A world-class feature request addresses the following points: <add> <add>1. Motivation first: <add> * Is it related to a problem/frustration with the library? If so, please explain <add> why. Providing a code snippet that demonstrates the problem is best. <add> * Is it related to something you would need for a project? We'd love to hear <add> about it! <add> * Is it something you worked on and think could benefit the community? <add> Awesome! Tell us what problem it solved for you. <add>2. Write a *full paragraph* describing the feature. <add>3. Provide a **code snippet** that demonstrates its future use. <add>4. In case this is related to a paper, please provide a link <add>5. Attach any additional information (drawings, screenshots, etc.) you think may help. <add> <add>If your issue is well-written we're already 80% of the way there by the time you <add>post it. <add> <add>## Contributing code <add> <add>## Contributing examples
1
Ruby
Ruby
add tests for hardcoded compilers in env
65ae6bacd8c92d718b259f7efd50fc3fe9f0838b
<ide><path>Library/Homebrew/rubocops/lines_cop.rb <ide> def audit_formula(_node, _class_node, _parent_class_node, body_node) <ide> problem "Use \"\#{ENV.cxx}\" instead of hard-coding \"#{match[2]}\"" <ide> end <ide> end <del> # <del> # find_instance_method_call(body_node, :ENV, :[]=) do |m| <del> # param = parameters(m)[1] <del> # if match = regex_match_group(param, %r{(/usr/bin/)?(gcc|llvm-gcc|clang)\s?}) <del> # problem "Use \"\#{ENV.cc}\" instead of hard-coding \"#{match[3]}\"" <del> # elsif match = regex_match_group(param, %r{(/usr/bin/)?((g|llvm-g|clang)\+\+)\s?}) <del> # problem "Use \"\#{ENV.cxx}\" instead of hard-coding \"#{match[3]}\"" <del> # end <del> # end <del> # <add> <add> find_instance_method_call(body_node, "ENV", :[]=) do |m| <add> param = parameters(m)[1] <add> if match = regex_match_group(param, %r{(/usr/bin/)?(gcc|llvm-gcc|clang)\s?}) <add> problem "Use \"\#{ENV.cc}\" instead of hard-coding \"#{match[2]}\"" <add> elsif match = regex_match_group(param, %r{(/usr/bin/)?((g|llvm-g|clang)\+\+)\s?}) <add> problem "Use \"\#{ENV.cxx}\" instead of hard-coding \"#{match[2]}\"" <add> end <add> end <add> <ide> # # Prefer formula path shortcuts in strings <ide> # formula_path_strings(body_node, :prefix) do |p| <ide> # next unless match = regex_match_group(p, %r{(/(man))[/'"]}) <ide><path>Library/Homebrew/test/rubocops/lines_cop_spec.rb <ide> def post_install <ide> class Foo < Formula <ide> desc "foo" <ide> url 'http://example.com/foo-1.0.tgz' <del> def test <add> def install <ide> verbose = ARGV.verbose? <ide> end <ide> end <ide> def test <ide> class Foo < Formula <ide> desc "foo" <ide> url 'http://example.com/foo-1.0.tgz' <del> def test <add> def install <ide> man1.install man+"man8" => "faad.1" <ide> end <ide> end <ide> def test <ide> class Foo < Formula <ide> desc "foo" <ide> url 'http://example.com/foo-1.0.tgz' <del> def test <add> def install <ide> system "/usr/bin/gcc", "foo" <ide> end <ide> end <ide> def test <ide> class Foo < Formula <ide> desc "foo" <ide> url 'http://example.com/foo-1.0.tgz' <del> def test <add> def install <ide> system "/usr/bin/g++", "-o", "foo", "foo.cc" <ide> end <ide> end <ide> def test <ide> expect_offense(expected, actual) <ide> end <ide> end <add> <add> it "with hardcoded compiler 3 " do <add> source = <<-EOS.undent <add> class Foo < Formula <add> desc "foo" <add> url 'http://example.com/foo-1.0.tgz' <add> def install <add> ENV["COMPILER_PATH"] = "/usr/bin/llvm-g++" <add> end <add> end <add> EOS <add> <add> expected_offenses = [{ message: "Use \"\#{ENV.cxx}\" instead of hard-coding \"llvm-g++\"", <add> severity: :convention, <add> line: 5, <add> column: 28, <add> source: source }] <add> <add> inspect_source(cop, source) <add> <add> expected_offenses.zip(cop.offenses).each do |expected, actual| <add> expect_offense(expected, actual) <add> end <add> end <add> <add> it "with hardcoded compiler 4 " do <add> source = <<-EOS.undent <add> class Foo < Formula <add> desc "foo" <add> url 'http://example.com/foo-1.0.tgz' <add> def install <add> ENV["COMPILER_PATH"] = "/usr/bin/gcc" <add> end <add> end <add> EOS <add> <add> expected_offenses = [{ message: "Use \"\#{ENV.cc}\" instead of hard-coding \"gcc\"", <add> severity: :convention, <add> line: 5, <add> column: 28, <add> source: source }] <add> <add> inspect_source(cop, source) <add> <add> expected_offenses.zip(cop.offenses).each do |expected, actual| <add> expect_offense(expected, actual) <add> end <add> end <ide> end <ide> def expect_offense(expected, actual) <ide> expect(actual.message).to eq(expected[:message])
2
Python
Python
fix tf.name_scope support for keras nested layers
25709a205dd97d1979b3949b81833d728d15b89d
<ide><path>keras/engine/base_layer.py <ide> import tensorflow.compat.v2 as tf <ide> <ide> import collections <add>import contextlib <ide> import copy <ide> import functools <ide> import itertools <ide> <ide> _is_name_scope_on_model_declaration_enabled = False <ide> <add>_name_scope_unnester_stack = threading.local() <add> <add> <add>@contextlib.contextmanager <add>def _name_scope_unnester(full_name_scope): <add> """Helper to get relative name scope from fully specified nested name scopes. <add> <add> Args: <add> full_name_scope: full(absolute) name scope path. <add> <add> Yields: <add> Relative name scope path from the parent `_name_scope_unnester` context <add> manager. <add> <add> Example: <add> ``` <add> with _name_scope_unnester('a') as name1: # name1 == 'a' <add> with _name_scope_unnester('a/b') as name2: # name2 == 'b' <add> with _name_scope_unnester('a/b/c') as name3: # name3 == 'c' <add> pass <add> ``` <add> """ <add> if not getattr(_name_scope_unnester_stack, 'value', None): <add> _name_scope_unnester_stack.value = [''] <add> <add> _name_scope_unnester_stack.value.append(full_name_scope) <add> <add> try: <add> full_name_scope = _name_scope_unnester_stack.value[-1] <add> outer_name_scope = _name_scope_unnester_stack.value[-2] <add> relative_name_scope = full_name_scope.lstrip(outer_name_scope) <add> relative_name_scope = relative_name_scope.lstrip('/') <add> yield relative_name_scope <add> finally: <add> _name_scope_unnester_stack.value.pop() <add> <ide> <ide> @keras_export('keras.layers.Layer') <ide> class Layer(tf.Module, version_utils.LayerVersionSelector): <ide> def __init__(self, <ide> <ide> # Save outer name scope at layer declaration so that it is preserved at <ide> # the actual layer construction. <del> self._outer_name_scope = tf.get_current_name_scope() <add> self._name_scope_on_declaration = tf.get_current_name_scope() <ide> <ide> @tf.__internal__.tracking.no_automatic_dependency_tracking <ide> @generic_utils.default <ide> def __call__(self, *args, **kwargs): <ide> training=training_mode): <ide> <ide> input_spec.assert_input_compatibility(self.input_spec, inputs, self.name) <add> <ide> if eager: <ide> call_fn = self.call <ide> name_scope = self._name <ide> else: <del> name_scope = self._name_scope() # Avoid autoincrementing. # pylint: disable=not-callable <add> name_scope = self._get_unnested_name_scope() <ide> call_fn = self._autographed_call() <add> <ide> call_fn = traceback_utils.inject_argument_info_in_traceback( <ide> call_fn, <ide> object_name=f'layer "{self.name}" (type {self.__class__.__name__})') <add> with contextlib.ExitStack() as namescope_stack: <add> if _is_name_scope_on_model_declaration_enabled: <add> namescope_stack.enter_context(_name_scope_unnester( <add> self._name_scope_on_declaration)) <add> namescope_stack.enter_context(tf.name_scope(name_scope)) <ide> <del> with tf.name_scope(name_scope): <ide> if not self.built: <ide> self._maybe_build(inputs) <ide> <ide> def __call__(self, *args, **kwargs): <ide> <ide> return outputs <ide> <add> def _get_unnested_name_scope(self): <add> if _is_name_scope_on_model_declaration_enabled: <add> with _name_scope_unnester(self._name_scope_on_declaration <add> ) as relative_name_scope_on_declaration: <add> # To avoid `tf.name_scope` autoincrement, use absolute path. <add> relative_name_scope = filter( <add> None, <add> [tf.get_current_name_scope(), relative_name_scope_on_declaration]) <add> current_name_scope = '/'.join(relative_name_scope) + '/' <add> if current_name_scope == '/': <add> current_name_scope = self._name_scope_on_declaration <add> with tf.name_scope(current_name_scope): <add> name_scope = self._name_scope() # Avoid autoincrementing. # pylint: disable=not-callable <add> else: <add> name_scope = self._name_scope() <add> <add> return name_scope <add> <ide> @property <ide> def dtype(self): <ide> """The dtype of the layer weights. <ide> def _name_scope(self): # pylint: disable=method-hidden <ide> if not tf.__internal__.tf2.enabled(): <ide> return self.name <ide> name_scope = self.name <del> if _is_name_scope_on_model_declaration_enabled and self._outer_name_scope: <del> name_scope = self._outer_name_scope + '/' + name_scope <ide> current_name_scope = tf.__internal__.get_name_scope() <ide> if current_name_scope: <ide> name_scope = current_name_scope + '/' + name_scope <ide><path>keras/engine/base_layer_test.py <ide> def test_apply_name_scope_on_model_declaration(self): <ide> ]) <ide> base_layer._apply_name_scope_on_model_declaration(False) <ide> <add> @test_utils.run_v2_only <add> def test_apply_name_scope_on_nested_layer_model_declaration(self): <add> if not tf.executing_eagerly(): <add> self.skipTest('`apply_name_scope_on_model_declaration` API is supported' <add> ' only for V2 eager') <add> <add> base_layer._apply_name_scope_on_model_declaration(True) <add> <add> class ThreeDenses(layers.Layer): <add> <add> def __init__(self, name='ThreeDenses', **kwargs): <add> super().__init__(name=name, **kwargs) <add> self.inner_dense_1 = layers.Dense(10, name='NestedDense1') <add> with tf.name_scope('inner1/inner2'): <add> self.inner_dense_2 = layers.Dense(20, name='NestedDense2') <add> self.inner_dense_3 = layers.Dense(30, name='NestedDense3') <add> <add> def call(self, x): <add> x = self.inner_dense_1(x) <add> x = self.inner_dense_2(x) <add> x = self.inner_dense_3(x) <add> return x <add> <add> inputs = input_layer.Input((3,)) <add> with tf.name_scope('outer'): <add> x = ThreeDenses()(inputs) <add> outputs = layers.Dense(10, name='OuterDense')(x) <add> <add> model = training_lib.Model(inputs, outputs) <add> node_names = self._get_model_node_names(model, np.random.random((1, 3)), <add> 'call_scope') <add> <add> self.assertListEqual(node_names, [ <add> 'call_scope/Const', 'call_scope/model/Cast', <add> 'call_scope/model/outer/ThreeDenses/NestedDense1/MatMul/ReadVariableOp/resource', <add> 'call_scope/model/outer/ThreeDenses/NestedDense1/MatMul/ReadVariableOp', <add> 'call_scope/model/outer/ThreeDenses/NestedDense1/MatMul', <add> 'call_scope/model/outer/ThreeDenses/NestedDense1/BiasAdd/ReadVariableOp/resource', <add> 'call_scope/model/outer/ThreeDenses/NestedDense1/BiasAdd/ReadVariableOp', <add> 'call_scope/model/outer/ThreeDenses/NestedDense1/BiasAdd', <add> 'call_scope/model/outer/ThreeDenses/inner1/inner2/NestedDense2/MatMul/ReadVariableOp/resource', <add> 'call_scope/model/outer/ThreeDenses/inner1/inner2/NestedDense2/MatMul/ReadVariableOp', <add> 'call_scope/model/outer/ThreeDenses/inner1/inner2/NestedDense2/MatMul', <add> 'call_scope/model/outer/ThreeDenses/inner1/inner2/NestedDense2/BiasAdd/ReadVariableOp/resource', <add> 'call_scope/model/outer/ThreeDenses/inner1/inner2/NestedDense2/BiasAdd/ReadVariableOp', <add> 'call_scope/model/outer/ThreeDenses/inner1/inner2/NestedDense2/BiasAdd', <add> 'call_scope/model/outer/ThreeDenses/NestedDense3/MatMul/ReadVariableOp/resource', <add> 'call_scope/model/outer/ThreeDenses/NestedDense3/MatMul/ReadVariableOp', <add> 'call_scope/model/outer/ThreeDenses/NestedDense3/MatMul', <add> 'call_scope/model/outer/ThreeDenses/NestedDense3/BiasAdd/ReadVariableOp/resource', <add> 'call_scope/model/outer/ThreeDenses/NestedDense3/BiasAdd/ReadVariableOp', <add> 'call_scope/model/outer/ThreeDenses/NestedDense3/BiasAdd', <add> 'call_scope/model/OuterDense/MatMul/ReadVariableOp/resource', <add> 'call_scope/model/OuterDense/MatMul/ReadVariableOp', <add> 'call_scope/model/OuterDense/MatMul', <add> 'call_scope/model/OuterDense/BiasAdd/ReadVariableOp/resource', <add> 'call_scope/model/OuterDense/BiasAdd/ReadVariableOp', <add> 'call_scope/model/OuterDense/BiasAdd', 'Identity', 'NoOp' <add> ]) <add> base_layer._apply_name_scope_on_model_declaration(False) <add> <ide> def _get_model_node_names(self, model, inputs, call_name_scope): <ide> """Returns a list of model's node names.""" <ide>
2
Javascript
Javascript
compare asynclistener instances, not uid's
bf08ac462e2c706aa4a5da7e8363f6e66c5d18c9
<ide><path>src/node.js <ide> var inQueue = false; <ide> // The asyncQueue will be small. Probably always <= 3 items. <ide> for (var i = 0; i < asyncQueue.length; i++) { <del> if (callbacks.uid === asyncQueue[i].uid) { <add> if (callbacks === asyncQueue[i]) { <ide> inQueue = true; <ide> break; <ide> } <ide> <ide> if (asyncQueue) { <ide> for (i = 0; i < asyncQueue.length; i++) { <del> if (obj.uid === asyncQueue[i].uid) { <add> if (obj === asyncQueue[i]) { <ide> asyncQueue.splice(i, 1); <ide> break; <ide> } <ide> if (asyncStack[i] === undefined) <ide> continue; <ide> for (j = 0; j < asyncStack[i].length; j++) { <del> if (obj.uid === asyncStack[i][j].uid) { <add> if (obj === asyncStack[i][j]) { <ide> asyncStack[i].splice(j, 1); <ide> break; <ide> }
1
Java
Java
add conversionservice support for bytebuffers
9dba73dfc90978a98e6d8d214fec107ff71018f4
<ide><path>spring-core/src/main/java/org/springframework/core/convert/support/ByteBufferConverter.java <add>/* <add> * Copyright 2002-2013 the original author or authors. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add> <add>package org.springframework.core.convert.support; <add> <add>import java.nio.ByteBuffer; <add>import java.util.Collections; <add>import java.util.HashSet; <add>import java.util.Set; <add> <add>import org.springframework.core.convert.ConversionService; <add>import org.springframework.core.convert.TypeDescriptor; <add>import org.springframework.core.convert.converter.ConditionalGenericConverter; <add> <add>/** <add> * Converts a {@link ByteBuffer} directly to and from {@code byte[]}s and indirectly to <add> * any type that the {@link ConversionService} support via {@code byte[]}. <add> * <add> * @author Phillip Webb <add> */ <add>public class ByteBufferConverter implements ConditionalGenericConverter { <add> <add> private static final TypeDescriptor BYTE_BUFFER_TYPE = TypeDescriptor.valueOf(ByteBuffer.class); <add> <add> private static final TypeDescriptor BYTE_ARRAY_TYPE = TypeDescriptor.valueOf(byte[].class); <add> <add> private static final Set<ConvertiblePair> CONVERTIBLE_PAIRS; <add> static { <add> Set<ConvertiblePair> convertiblePairs = new HashSet<ConvertiblePair>(); <add> convertiblePairs.add(new ConvertiblePair(ByteBuffer.class, Object.class)); <add> convertiblePairs.add(new ConvertiblePair(Object.class, ByteBuffer.class)); <add> CONVERTIBLE_PAIRS = Collections.unmodifiableSet(convertiblePairs); <add> } <add> <add> <add> private ConversionService conversionService; <add> <add> <add> public ByteBufferConverter(ConversionService conversionService) { <add> this.conversionService = conversionService; <add> } <add> <add> <add> @Override <add> public Set<ConvertiblePair> getConvertibleTypes() { <add> return CONVERTIBLE_PAIRS; <add> } <add> <add> @Override <add> public boolean matches(TypeDescriptor sourceType, TypeDescriptor targetType) { <add> if (sourceType.isAssignableTo(BYTE_BUFFER_TYPE)) { <add> return matchesFromByteBuffer(targetType); <add> } <add> if (targetType.isAssignableTo(BYTE_BUFFER_TYPE)) { <add> return matchesToByteBuffer(sourceType); <add> } <add> return false; <add> } <add> <add> private boolean matchesFromByteBuffer(TypeDescriptor targetType) { <add> return (targetType.isAssignableTo(BYTE_ARRAY_TYPE) || this.conversionService.canConvert( <add> BYTE_ARRAY_TYPE, targetType)); <add> } <add> <add> private boolean matchesToByteBuffer(TypeDescriptor sourceType) { <add> return (sourceType.isAssignableTo(BYTE_ARRAY_TYPE) || this.conversionService.canConvert( <add> sourceType, BYTE_ARRAY_TYPE)); <add> } <add> <add> @Override <add> public Object convert(Object source, TypeDescriptor sourceType, <add> TypeDescriptor targetType) { <add> if (sourceType.isAssignableTo(BYTE_BUFFER_TYPE)) { <add> return convertFromByteBuffer((ByteBuffer) source, targetType); <add> } <add> if (targetType.isAssignableTo(BYTE_BUFFER_TYPE)) { <add> return convertToByteBuffer(source, sourceType); <add> } <add> // Should not happen <add> throw new IllegalStateException("Unexpected source/target types"); <add> } <add> <add> private Object convertFromByteBuffer(ByteBuffer source, TypeDescriptor targetType) { <add> byte[] bytes = new byte[source.remaining()]; <add> source.get(bytes); <add> if (targetType.isAssignableTo(BYTE_ARRAY_TYPE)) { <add> return bytes; <add> } <add> return this.conversionService.convert(bytes, BYTE_ARRAY_TYPE, targetType); <add> } <add> <add> private Object convertToByteBuffer(Object source, TypeDescriptor sourceType) { <add> byte[] bytes = (byte[]) (source instanceof byte[] ? source <add> : this.conversionService.convert(source, sourceType, BYTE_ARRAY_TYPE)); <add> ByteBuffer byteBuffer = ByteBuffer.allocate(bytes.length); <add> byteBuffer.put(bytes); <add> byteBuffer.rewind(); <add> return byteBuffer; <add> } <add> <add>} <ide><path>spring-core/src/main/java/org/springframework/core/convert/support/DefaultConversionService.java <ide> public DefaultConversionService() { <ide> public static void addDefaultConverters(ConverterRegistry converterRegistry) { <ide> addScalarConverters(converterRegistry); <ide> addCollectionConverters(converterRegistry); <add> addBinaryConverters(converterRegistry); <ide> addFallbackConverters(converterRegistry); <ide> } <ide> <ide> private static void addCollectionConverters(ConverterRegistry converterRegistry) <ide> converterRegistry.addConverter(new ObjectToCollectionConverter(conversionService)); <ide> } <ide> <add> private static void addBinaryConverters(ConverterRegistry converterRegistry) { <add> ConversionService conversionService = (ConversionService) converterRegistry; <add> converterRegistry.addConverter(new ByteBufferConverter(conversionService)); <add> } <add> <ide> private static void addFallbackConverters(ConverterRegistry converterRegistry) { <ide> ConversionService conversionService = (ConversionService) converterRegistry; <ide> converterRegistry.addConverter(new ObjectToObjectConverter()); <ide><path>spring-core/src/test/java/org/springframework/core/convert/support/ByteBufferConverterTests.java <add>/* <add> * Copyright 2002-2013 the original author or authors. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add> <add>package org.springframework.core.convert.support; <add> <add>import java.nio.ByteBuffer; <add> <add>import org.junit.Before; <add>import org.junit.Test; <add>import org.springframework.core.convert.converter.Converter; <add> <add>import static org.hamcrest.Matchers.*; <add>import static org.junit.Assert.*; <add> <add>/** <add> * Tests for {@link ByteBufferConverter}. <add> * <add> * @author Phillip Webb <add> */ <add>public class ByteBufferConverterTests { <add> <add> private GenericConversionService conversionService; <add> <add> @Before <add> public void setup() { <add> this.conversionService = new GenericConversionService(); <add> this.conversionService.addConverter(new ByteBufferConverter(conversionService)); <add> this.conversionService.addConverter(new ByteArrayToOtherTypeConverter()); <add> this.conversionService.addConverter(new OtherTypeToByteArrayConverter()); <add> } <add> <add> @Test <add> public void byteArrayToByteBuffer() throws Exception { <add> byte[] bytes = new byte[] { 1, 2, 3 }; <add> ByteBuffer convert = this.conversionService.convert(bytes, ByteBuffer.class); <add> assertThat(bytes, not(sameInstance(convert.array()))); <add> assertThat(bytes, equalTo(convert.array())); <add> } <add> <add> @Test <add> public void byteBufferToByteArray() throws Exception { <add> byte[] bytes = new byte[] { 1, 2, 3 }; <add> ByteBuffer byteBuffer = ByteBuffer.wrap(bytes); <add> byte[] convert = this.conversionService.convert(byteBuffer, byte[].class); <add> assertThat(bytes, not(sameInstance(convert))); <add> assertThat(bytes, equalTo(convert)); <add> } <add> <add> @Test <add> public void byteBufferToOtherType() throws Exception { <add> byte[] bytes = new byte[] { 1, 2, 3 }; <add> ByteBuffer byteBuffer = ByteBuffer.wrap(bytes); <add> OtherType convert = this.conversionService.convert(byteBuffer, OtherType.class); <add> assertThat(bytes, not(sameInstance(convert.bytes))); <add> assertThat(bytes, equalTo(convert.bytes)); <add> } <add> <add> @Test <add> public void otherTypeToByteBuffer() throws Exception { <add> byte[] bytes = new byte[] { 1, 2, 3 }; <add> OtherType otherType = new OtherType(bytes); <add> ByteBuffer convert = this.conversionService.convert(otherType, ByteBuffer.class); <add> assertThat(bytes, not(sameInstance(convert.array()))); <add> assertThat(bytes, equalTo(convert.array())); <add> } <add> <add> private static class OtherType { <add> <add> private byte[] bytes; <add> <add> public OtherType(byte[] bytes) { <add> this.bytes = bytes; <add> } <add> <add> } <add> <add> private static class ByteArrayToOtherTypeConverter implements <add> Converter<byte[], OtherType> { <add> <add> @Override <add> public OtherType convert(byte[] source) { <add> return new OtherType(source); <add> } <add> } <add> <add> private static class OtherTypeToByteArrayConverter implements <add> Converter<OtherType, byte[]> { <add> <add> @Override <add> public byte[] convert(OtherType source) { <add> return source.bytes; <add> } <add> <add> } <add> <add>}
3
Python
Python
fix couchdb keys
7e87cb1dbf02b1798384cc35918dea63ef5bf08d
<ide><path>celery/backends/couchdb.py <ide> """CouchDB result store backend.""" <ide> from __future__ import absolute_import, unicode_literals <ide> <add>from kombu.utils.encoding import bytes_to_str <ide> from kombu.utils.url import _parse_url <ide> <ide> from celery.exceptions import ImproperlyConfigured <ide> def get(self, key): <ide> return None <ide> <ide> def set(self, key, value): <add> key = bytes_to_str(key) <ide> data = {'_id': key, 'value': value} <ide> try: <ide> self.connection.save(data) <ide><path>t/unit/backends/test_couchdb.py <ide> def test_get(self): <ide> <ide> CouchBackend.get should return and take two params <ide> db conn to couchdb is mocked. <del> TODO Should test on key not exists <del> <ide> """ <ide> x = CouchBackend(app=self.app) <ide> x._connection = Mock() <ide> get = x._connection.get = MagicMock() <del> # should return None <ide> assert x.get('1f3fab') == get.return_value['value'] <ide> x._connection.get.assert_called_once_with('1f3fab') <ide> <add> def test_get_non_existent_key(self): <add> x = CouchBackend(app=self.app) <add> x._connection = Mock() <add> get = x._connection.get = MagicMock() <add> get.side_effect = pycouchdb.exceptions.NotFound <add> assert x.get('1f3fab') is None <add> x._connection.get.assert_called_once_with('1f3fab') <add> <add> @pytest.mark.parametrize("key", ['1f3fab', b'1f3fab']) <add> def test_set(self, key): <add> x = CouchBackend(app=self.app) <add> x._connection = Mock() <add> <add> x.set(key, 'value') <add> <add> x._connection.save.assert_called_once_with({'_id': '1f3fab', <add> 'value': 'value'}) <add> <add> @pytest.mark.parametrize("key", ['1f3fab', b'1f3fab']) <add> def test_set_with_conflict(self, key): <add> x = CouchBackend(app=self.app) <add> x._connection = Mock() <add> x._connection.save.side_effect = (pycouchdb.exceptions.Conflict, None) <add> get = x._connection.get = MagicMock() <add> <add> x.set(key, 'value') <add> <add> x._connection.get.assert_called_once_with('1f3fab') <add> x._connection.get('1f3fab').__setitem__.assert_called_once_with( <add> 'value', 'value') <add> x._connection.save.assert_called_with(get('1f3fab')) <add> assert x._connection.save.call_count == 2 <add> <ide> def test_delete(self): <ide> """test_delete <ide>
2
Mixed
Go
add statsformat to the config.json file
12cae3a590a92f0d908c03c172aec974957b3785
<ide><path>cli/command/container/stats.go <ide> func runStats(dockerCli *command.DockerCli, opts *statsOptions) error { <ide> <ide> // before print to screen, make sure each container get at least one valid stat data <ide> waitFirst.Wait() <del> f := "table" <del> if len(opts.format) > 0 { <del> f = opts.format <add> format := opts.format <add> if len(format) == 0 { <add> if len(dockerCli.ConfigFile().StatsFormat) > 0 { <add> format = dockerCli.ConfigFile().StatsFormat <add> } else { <add> format = formatter.TableFormatKey <add> } <ide> } <ide> statsCtx := formatter.Context{ <ide> Output: dockerCli.Out(), <del> Format: formatter.NewStatsFormat(f, daemonOSType), <add> Format: formatter.NewStatsFormat(format, daemonOSType), <ide> } <ide> cleanScreen := func() { <ide> if !opts.noStream { <ide><path>cliconfig/configfile/file.go <ide> type ConfigFile struct { <ide> ImagesFormat string `json:"imagesFormat,omitempty"` <ide> NetworksFormat string `json:"networksFormat,omitempty"` <ide> VolumesFormat string `json:"volumesFormat,omitempty"` <add> StatsFormat string `json:"statsFormat,omitempty"` <ide> DetachKeys string `json:"detachKeys,omitempty"` <ide> CredentialsStore string `json:"credsStore,omitempty"` <ide> Filename string `json:"-"` // Note: for internal use only <ide><path>docs/reference/commandline/cli.md <ide> falls back to the default table format. For a list of supported formatting <ide> directives, see the <ide> [**Formatting** section in the `docker ps` documentation](ps.md) <ide> <add>The property `imagesFormat` specifies the default format for `docker images` output. <add>When the `--format` flag is not provided with the `docker images` command, <add>Docker's client uses this property. If this property is not set, the client <add>falls back to the default table format. For a list of supported formatting <add>directives, see the [**Formatting** section in the `docker images` documentation](images.md) <add> <add>The property `serviceInspectFormat` specifies the default format for `docker <add>service inspect` output. When the `--format` flag is not provided with the <add>`docker service inspect` command, Docker's client uses this property. If this <add>property is not set, the client falls back to the default json format. For a <add>list of supported formatting directives, see the <add>[**Formatting** section in the `docker service inspect` documentation](service_inspect.md) <add> <add>The property `statsFormat` specifies the default format for `docker <add>stats` output. When the `--format` flag is not provided with the <add>`docker stats` command, Docker's client uses this property. If this <add>property is not set, the client falls back to the default table <add>format. For a list of supported formatting directives, see <add>[**Formatting** section in the `docker stats` documentation](stats.md) <add> <ide> Once attached to a container, users detach from it and leave it running using <ide> the using `CTRL-p CTRL-q` key sequence. This detach key sequence is customizable <ide> using the `detachKeys` property. Specify a `<sequence>` value for the <ide> Users can override your custom or the default key sequence on a per-container <ide> basis. To do this, the user specifies the `--detach-keys` flag with the `docker <ide> attach`, `docker exec`, `docker run` or `docker start` command. <ide> <del>The property `imagesFormat` specifies the default format for `docker images` output. <del>When the `--format` flag is not provided with the `docker images` command, <del>Docker's client uses this property. If this property is not set, the client <del>falls back to the default table format. For a list of supported formatting <del>directives, see the [**Formatting** section in the `docker images` documentation](images.md) <del> <del>The property `serviceInspectFormat` specifies the default format for `docker <del>service inspect` output. When the `--format` flag is not provided with the <del>`docker service inspect` command, Docker's client uses this property. If this <del>property is not set, the client falls back to the default json format. For a <del>list of supported formatting directives, see the <del>[**Formatting** section in the `docker service inspect` documentation](service_inspect.md) <del> <ide> Following is a sample `config.json` file: <ide> <ide> {% raw %} <ide> Following is a sample `config.json` file: <ide> }, <ide> "psFormat": "table {{.ID}}\\t{{.Image}}\\t{{.Command}}\\t{{.Labels}}", <ide> "imagesFormat": "table {{.ID}}\\t{{.Repository}}\\t{{.Tag}}\\t{{.CreatedAt}}", <add> "statsFormat": "table {{.Container}}\t{{.CPUPerc}}\t{{.MemUsage}}", <ide> "serviceInspectFormat": "pretty", <ide> "detachKeys": "ctrl-e,e" <ide> }
3
Python
Python
remove unused functions
6f779d3475371b74cce942972d0e00c3984caa4b
<ide><path>libcloud/compute/drivers/solusvm.py <ide> def list_nodes(self): <ide> for vm in response.object] <ide> return nodes <ide> <del> def list_locations(self): <add> def ex_list_vs_parameters(self, vttype): <ide> """ <del> List locations <del> """ <del> pass <add> Get List of VS Parameters <ide> <del> def list_sizes(self): <del> """ <del> List sizes <del> """ <del> pass <add> vttype can be one of openvz, xen, xenhvm, kvm <ide> <del> def list_images(self): <del> """ <del> List images <ide> """ <del> pass <add> response = self.connection.request("/api/virtual_machines/" <add> "createvm_params/%s" % vttype) <add> return response.object <ide> <ide> def _to_node(self, data): <ide> identifier = data['id']
1
Javascript
Javascript
change line for linesegments in objloader
9889425cf33bf29796e9ffd1f80a1b8a7cc43304
<ide><path>examples/js/loaders/OBJLoader.js <ide> THREE.OBJLoader.prototype = { <ide> } <ide> <ide> var multiMaterial = new THREE.MultiMaterial( createdMaterials ); <del> mesh = ( ! isLine ? new THREE.Mesh( buffergeometry, multiMaterial ) : new THREE.Line( buffergeometry, multiMaterial ) ); <add> mesh = ( ! isLine ? new THREE.Mesh( buffergeometry, multiMaterial ) : new THREE.LineSegments( buffergeometry, multiMaterial ) ); <ide> <ide> } else { <ide> <del> mesh = ( ! isLine ? new THREE.Mesh( buffergeometry, createdMaterials[ 0 ] ) : new THREE.Line( buffergeometry, createdMaterials[ 0 ] ) ); <add> mesh = ( ! isLine ? new THREE.Mesh( buffergeometry, createdMaterials[ 0 ] ) : new THREE.LineSegments( buffergeometry, createdMaterials[ 0 ] ) ); <ide> } <ide> <ide> mesh.name = object.name;
1
Go
Go
fix solaris reference to config
335033e25fae0173217e70d4b8dfc5df682ea913
<ide><path>daemon/daemon_solaris.go <ide> import ( <ide> "github.com/docker/docker/api/types" <ide> containertypes "github.com/docker/docker/api/types/container" <ide> "github.com/docker/docker/container" <add> "github.com/docker/docker/daemon/config" <ide> "github.com/docker/docker/image" <del> "github.com/docker/docker/layer" <ide> "github.com/docker/docker/pkg/fileutils" <ide> "github.com/docker/docker/pkg/idtools" <ide> "github.com/docker/docker/pkg/parsers/kernel" <ide> "github.com/docker/docker/pkg/sysinfo" <del> refstore "github.com/docker/docker/reference" <ide> "github.com/docker/libnetwork" <ide> nwconfig "github.com/docker/libnetwork/config" <ide> "github.com/docker/libnetwork/drivers/solaris/bridge" <ide> func (daemon *Daemon) parseSecurityOpt(container *container.Container, hostConfi <ide> } <ide> <ide> func parseSecurityOpt(container *container.Container, config *containertypes.HostConfig) error { <del> //Since config.SecurityOpt is specifically defined as a "List of string values to <add> //Since hostConfig.SecurityOpt is specifically defined as a "List of string values to <ide> //customize labels for MLs systems, such as SELinux" <ide> //until we figure out how to map to Trusted Extensions <ide> //this is being disabled for now on Solaris <ide> func parseSecurityOpt(container *container.Container, config *containertypes.Hos <ide> return err <ide> } <ide> <del>func setupRemappedRoot(config *Config) ([]idtools.IDMap, []idtools.IDMap, error) { <del> return nil, nil, nil <add>func setupRemappedRoot(config *config.Config) (*idtools.IDMappings, error) { <add> return nil, nil <ide> } <ide> <del>func setupDaemonRoot(config *Config, rootDir string, rootUID, rootGID int) error { <add>func setupDaemonRoot(config *config.Config, rootDir string, rootIDs idtools.IDPair) error { <ide> return nil <ide> } <ide> <ide> func (daemon *Daemon) adaptContainerSettings(hostConfig *containertypes.HostConf <ide> } <ide> <ide> // UsingSystemd returns true if cli option includes native.cgroupdriver=systemd <del>func UsingSystemd(config *Config) bool { <add>func UsingSystemd(config *config.Config) bool { <ide> return false <ide> } <ide> <ide> func verifyPlatformContainerSettings(daemon *Daemon, hostConfig *containertypes. <ide> <ide> // reloadPlatform updates configuration with platform specific options <ide> // and updates the passed attributes <del>func (daemon *Daemon) reloadPlatform(config *Config, attributes map[string]string) { <add>func (daemon *Daemon) reloadPlatform(conf *config.Config, attributes map[string]string) { <ide> } <ide> <ide> // verifyDaemonSettings performs validation of daemon config struct <del>func verifyDaemonSettings(config *Config) error { <add>func verifyDaemonSettings(conf *config.Config) error { <ide> <del> if config.DefaultRuntime == "" { <del> config.DefaultRuntime = stockRuntimeName <add> if conf.DefaultRuntime == "" { <add> conf.DefaultRuntime = stockRuntimeName <ide> } <del> if config.Runtimes == nil { <del> config.Runtimes = make(map[string]types.Runtime) <add> if conf.Runtimes == nil { <add> conf.Runtimes = make(map[string]types.Runtime) <ide> } <ide> stockRuntimeOpts := []string{} <del> config.Runtimes[stockRuntimeName] = types.Runtime{Path: DefaultRuntimeBinary, Args: stockRuntimeOpts} <add> conf.Runtimes[stockRuntimeName] = types.Runtime{Path: DefaultRuntimeBinary, Args: stockRuntimeOpts} <ide> <del> // checkSystem validates platform-specific requirements <ide> return nil <ide> } <ide> <add>// checkSystem validates platform-specific requirements <ide> func checkSystem() error { <ide> // check OS version for compatibility, ensure running in global zone <ide> var err error <ide> func checkSystem() error { <ide> <ide> // configureMaxThreads sets the Go runtime max threads threshold <ide> // which is 90% of the kernel setting from /proc/sys/kernel/threads-max <del>func configureMaxThreads(config *Config) error { <add>func configureMaxThreads(config *config.Config) error { <ide> return nil <ide> } <ide> <del>// configureKernelSecuritySupport configures and validate security support for the kernel <add>// configureKernelSecuritySupport configures and validates security support for the kernel <ide> func configureKernelSecuritySupport(config *config.Config, driverNames []string) error { <ide> return nil <ide> } <ide> <del>func (daemon *Daemon) initNetworkController(config *Config, activeSandboxes map[string]interface{}) (libnetwork.NetworkController, error) { <add>func (daemon *Daemon) initNetworkController(config *config.Config, activeSandboxes map[string]interface{}) (libnetwork.NetworkController, error) { <ide> netOptions, err := daemon.networkOptions(config, daemon.PluginStore, activeSandboxes) <ide> if err != nil { <ide> return nil, err <ide> func (daemon *Daemon) initNetworkController(config *Config, activeSandboxes map[ <ide> return controller, nil <ide> } <ide> <del>func initBridgeDriver(controller libnetwork.NetworkController, config *Config) error { <add>func initBridgeDriver(controller libnetwork.NetworkController, config *config.Config) error { <ide> if n, err := controller.NetworkByName("bridge"); err == nil { <ide> if err = n.Delete(); err != nil { <ide> return fmt.Errorf("could not delete the default bridge network: %v", err) <ide> func (daemon *Daemon) conditionalUnmountOnCleanup(container *container.Container <ide> return daemon.Unmount(container) <ide> } <ide> <del>func restoreCustomImage(is image.Store, ls layer.Store, rs refstore.Store) error { <del> // Solaris has no custom images to register <del> return nil <del>} <del> <del>func driverOptions(config *Config) []nwconfig.Option { <add>func driverOptions(config *config.Config) []nwconfig.Option { <ide> return []nwconfig.Option{} <ide> } <ide> <ide> func rootFSToAPIType(rootfs *image.RootFS) types.RootFS { <ide> return types.RootFS{} <ide> } <ide> <del>func setupDaemonProcess(config *Config) error { <add>func setupDaemonProcess(config *config.Config) error { <ide> return nil <ide> } <ide>
1
Python
Python
take advantage of rich comparisons for pypy too
7e70b36f91644f7f1f0e460e1f4343b67ff3541e
<ide><path>glances/compat.py <ide> import sys <ide> import unicodedata <ide> import types <del>import platform <ide> import subprocess <ide> <ide> from glances.logger import logger <ide> <del>PY_CYTHON = platform.python_implementation() == 'CPython' <del>PY_PYPY = platform.python_implementation() == 'PyPy' <del>PY_JYTHON = platform.python_implementation() == 'Jython' <del>PY_IRON = platform.python_implementation() == 'IronPython' <ide> PY3 = sys.version_info[0] == 3 <ide> <ide> try: <ide><path>unitest.py <ide> from glances import __version__ <ide> from glances.globals import WINDOWS, LINUX <ide> from glances.outputs.glances_bars import Bar <del>from glances.compat import PY_PYPY <ide> from glances.thresholds import GlancesThresholdOk <ide> from glances.thresholds import GlancesThresholdCareful <ide> from glances.thresholds import GlancesThresholdWarning <ide> def test_013_gpu(self): <ide> self.assertTrue(type(stats_grab) is list, msg='GPU stats is not a list') <ide> print('INFO: GPU stats: %s' % stats_grab) <ide> <del> @unittest.skipIf(PY_PYPY, True) <ide> def test_094_thresholds(self): <ide> """Test thresholds classes""" <ide> print('INFO: [TEST_094] Thresholds')
2
Ruby
Ruby
allow python versions with two digits
3b944434cfba22338758f0705de4813d076b444f
<ide><path>Library/Homebrew/rubocops/lines.rb <ide> def audit_formula(_node, _class_node, _parent_class_node, body_node) <ide> find_strings(body_node).each do |str| <ide> string_content = string_content(str) <ide> <del> next unless match = string_content.match(/^python(@)?(\d\.\d)$/) <add> next unless match = string_content.match(/^python(@)?(\d\.\d+)$/) <ide> next if python_version == match[2] <ide> <ide> @fix = if match[1] <ide><path>Library/Homebrew/test/rubocops/lines_spec.rb <ide> def install <ide> RUBY <ide> end <ide> <add> it "allow matching versions with two digits" do <add> expect_no_offenses(<<~RUBY) <add> class Foo < Formula <add> depends_on "python@3.10" <add> <add> def install <add> puts "python@3.10" <add> end <add> end <add> RUBY <add> end <add> <add> it "allow matching versions without `@` with two digits" do <add> expect_no_offenses(<<~RUBY) <add> class Foo < Formula <add> depends_on "python@3.10" <add> <add> def install <add> puts "python3.10" <add> end <add> end <add> RUBY <add> end <add> <ide> it "do not allow mismatching versions" do <ide> expect_offense(<<~RUBY) <ide> class Foo < Formula <ide> def install <ide> RUBY <ide> end <ide> <add> it "do not allow mismatching versions with two digits" do <add> expect_offense(<<~RUBY) <add> class Foo < Formula <add> depends_on "python@3.11" <add> <add> def install <add> puts "python@3.10" <add> ^^^^^^^^^^^^^ References to `python@3.10` should match the specified python dependency (`python@3.11`) <add> end <add> end <add> RUBY <add> end <add> <add> it "do not allow mismatching versions without `@` with two digits" do <add> expect_offense(<<~RUBY) <add> class Foo < Formula <add> depends_on "python@3.11" <add> <add> def install <add> puts "python3.10" <add> ^^^^^^^^^^^^ References to `python3.10` should match the specified python dependency (`python3.11`) <add> end <add> end <add> RUBY <add> end <add> <ide> it "autocorrects mismatching versions" do <ide> source = <<~RUBY <ide> class Foo < Formula <ide> def install <ide> new_source = autocorrect_source(source) <ide> expect(new_source).to eq(corrected_source) <ide> end <add> <add> it "autocorrects mismatching versions with two digits" do <add> source = <<~RUBY <add> class Foo < Formula <add> depends_on "python@3.10" <add> <add> def install <add> puts "python@3.9" <add> end <add> end <add> RUBY <add> <add> corrected_source = <<~RUBY <add> class Foo < Formula <add> depends_on "python@3.10" <add> <add> def install <add> puts "python@3.10" <add> end <add> end <add> RUBY <add> <add> new_source = autocorrect_source(source) <add> expect(new_source).to eq(corrected_source) <add> end <add> <add> it "autocorrects mismatching versions without `@` with two digits" do <add> source = <<~RUBY <add> class Foo < Formula <add> depends_on "python@3.11" <add> <add> def install <add> puts "python3.10" <add> end <add> end <add> RUBY <add> <add> corrected_source = <<~RUBY <add> class Foo < Formula <add> depends_on "python@3.11" <add> <add> def install <add> puts "python3.11" <add> end <add> end <add> RUBY <add> <add> new_source = autocorrect_source(source) <add> expect(new_source).to eq(corrected_source) <add> end <ide> end <ide> end <ide>
2
Python
Python
remove "pala" tokenizer exception for spanish
42364dcd9f7c243271416b068a7bc708f9ef6346
<ide><path>spacy/lang/es/tokenizer_exceptions.py <ide> <ide> _exc = { <ide> "pal": [{ORTH: "pa", LEMMA: "para"}, {ORTH: "l", LEMMA: "el", NORM: "el"}], <del> "pala": [{ORTH: "pa", LEMMA: "para"}, {ORTH: "la", LEMMA: "la", NORM: "la"}], <ide> } <ide> <ide>
1
Python
Python
use isort to format the imports
b8c48898a3f316dda43231a39c21fc4a61aed3b1
<ide><path>keras/__init__.py <ide> """ <ide> # pylint: disable=unused-import <ide> from tensorflow.python import tf2 <del>from keras import distribute <add>from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras import distribute <ide> from keras import models <del> <ide> from keras.engine.input_layer import Input <ide> from keras.engine.sequential import Sequential <ide> from keras.engine.training import Model <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> __version__ = "2.10.0" <ide> <ide> keras_export("keras.__version__").export_constant(__name__, "__version__") <ide><path>keras/activations.py <ide> import sys <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <del>from keras import backend <ide> import keras.layers.activation as activation_layers <add>from keras import backend <ide> from keras.utils import generic_utils <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> # b/123041942 <ide> # In TF 2.x, if the `tf.nn.softmax` is used as an activation function in Keras <ide><path>keras/activations_test.py <ide> # ============================================================================== <ide> """Tests for Keras activation functions.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <ide> <add>import keras.layers.activation as activation_layers <ide> from keras import activations <ide> from keras import backend <del>from keras.testing_infra import test_combinations <del>import keras.layers.activation as activation_layers <ide> from keras.layers import core <ide> from keras.layers import serialization <add>from keras.testing_infra import test_combinations <ide> <ide> <ide> def _ref_softmax(values): <ide> def gelu(x, approximate=False): <ide> ) <ide> else: <ide> from scipy.stats import ( <del> norm, <del> ) # pylint: disable=g-import-not-at-top <add> norm, # pylint: disable=g-import-not-at-top <add> ) <ide> <ide> return x * norm.cdf(x) <ide> <ide><path>keras/api/create_python_api_wrapper.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <del>import keras # pylint: disable=unused-import <ide> from tensorflow.python.tools.api.generator import ( <ide> create_python_api, <ide> ) <ide> <add>import keras # pylint: disable=unused-import <add> <ide> if __name__ == "__main__": <ide> create_python_api.main() <ide><path>keras/api/tests/api_compatibility_test.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <del>import tensorflow as tf <del> <ide> import argparse <ide> import os <ide> import re <ide> import sys <ide> <ide> import six <del> <add>import tensorflow as tf <ide> from google.protobuf import message <ide> from google.protobuf import text_format <del> <ide> from tensorflow.python.lib.io import file_io <ide> from tensorflow.python.platform import tf_logging as logging <ide> from tensorflow.tools.api.lib import api_objects_pb2 <ide> from tensorflow.tools.common import public_api <ide> from tensorflow.tools.common import traverse <ide> <del> <ide> # FLAGS defined at the bottom: <ide> FLAGS = None <ide> # DEFINE_boolean, update_goldens, default False: <ide><path>keras/applications/__init__.py <ide> """Keras Applications are premade architectures with pre-trained weights.""" <ide> # pylint: disable=g-bad-import-order <ide> <del>from keras.applications.convnext import ConvNeXtTiny <del>from keras.applications.convnext import ConvNeXtSmall <ide> from keras.applications.convnext import ConvNeXtBase <ide> from keras.applications.convnext import ConvNeXtLarge <add>from keras.applications.convnext import ConvNeXtSmall <add>from keras.applications.convnext import ConvNeXtTiny <ide> from keras.applications.convnext import ConvNeXtXLarge <del> <ide> from keras.applications.densenet import DenseNet121 <ide> from keras.applications.densenet import DenseNet169 <ide> from keras.applications.densenet import DenseNet201 <del> <ide> from keras.applications.efficientnet import EfficientNetB0 <ide> from keras.applications.efficientnet import EfficientNetB1 <ide> from keras.applications.efficientnet import EfficientNetB2 <ide> from keras.applications.efficientnet import EfficientNetB5 <ide> from keras.applications.efficientnet import EfficientNetB6 <ide> from keras.applications.efficientnet import EfficientNetB7 <del> <ide> from keras.applications.efficientnet_v2 import EfficientNetV2B0 <ide> from keras.applications.efficientnet_v2 import EfficientNetV2B1 <ide> from keras.applications.efficientnet_v2 import EfficientNetV2B2 <ide> from keras.applications.efficientnet_v2 import EfficientNetV2B3 <ide> from keras.applications.efficientnet_v2 import EfficientNetV2L <ide> from keras.applications.efficientnet_v2 import EfficientNetV2M <ide> from keras.applications.efficientnet_v2 import EfficientNetV2S <del> <ide> from keras.applications.inception_resnet_v2 import InceptionResNetV2 <ide> from keras.applications.inception_v3 import InceptionV3 <del> <ide> from keras.applications.mobilenet import MobileNet <ide> from keras.applications.mobilenet_v2 import MobileNetV2 <del>from keras.applications.mobilenet_v3 import MobileNetV3Small <ide> from keras.applications.mobilenet_v3 import MobileNetV3Large <del> <add>from keras.applications.mobilenet_v3 import MobileNetV3Small <ide> from keras.applications.nasnet import NASNetLarge <ide> from keras.applications.nasnet import NASNetMobile <del> <ide> from keras.applications.resnet import ResNet50 <ide> from keras.applications.resnet import ResNet101 <ide> from keras.applications.resnet import ResNet152 <del>from keras.applications.resnet_v2 import ResNet50V2 <del>from keras.applications.resnet_v2 import ResNet101V2 <del>from keras.applications.resnet_v2 import ResNet152V2 <del> <ide> from keras.applications.resnet_rs import ResNetRS50 <ide> from keras.applications.resnet_rs import ResNetRS101 <ide> from keras.applications.resnet_rs import ResNetRS152 <ide> from keras.applications.resnet_rs import ResNetRS200 <ide> from keras.applications.resnet_rs import ResNetRS270 <ide> from keras.applications.resnet_rs import ResNetRS350 <ide> from keras.applications.resnet_rs import ResNetRS420 <del> <add>from keras.applications.resnet_v2 import ResNet50V2 <add>from keras.applications.resnet_v2 import ResNet101V2 <add>from keras.applications.resnet_v2 import ResNet152V2 <ide> from keras.applications.vgg16 import VGG16 <ide> from keras.applications.vgg19 import VGG19 <del> <ide> from keras.applications.xception import Xception <ide><path>keras/applications/applications_load_weight_test.py <ide> # ============================================================================== <ide> """Integration tests for Keras applications.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl import flags <ide> from absl.testing import parameterized <del>import numpy as np <ide> <ide> from keras.applications import convnext <ide> from keras.applications import densenet <ide> from keras.utils import data_utils <ide> from keras.utils import image_utils <ide> <del> <ide> ARG_TO_MODEL = { <ide> "resnet": (resnet, [resnet.ResNet50, resnet.ResNet101, resnet.ResNet152]), <ide> "resnet_v2": ( <ide><path>keras/applications/applications_test.py <ide> # ============================================================================== <ide> """Integration tests for Keras applications.""" <ide> <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> from keras import backend <add>from keras import utils <ide> from keras.applications import convnext <ide> from keras.applications import densenet <ide> from keras.applications import efficientnet <ide> from keras.applications import vgg16 <ide> from keras.applications import vgg19 <ide> from keras.applications import xception <del>from keras import utils <del>import tensorflow.compat.v2 as tf <ide> <ide> MODEL_LIST_NO_NASNET = [ <ide> (resnet.ResNet50, 2048), <ide><path>keras/applications/convnext.py <ide> (CVPR 2022) <ide> """ <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras import layers <ide> from keras import utils <ide> from keras.applications import imagenet_utils <ide> from keras.engine import sequential <ide> from keras.engine import training as training_lib <del>import numpy as np <del> <del>import tensorflow.compat.v2 as tf <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> BASE_WEIGHTS_PATH = ( <ide> "https://storage.googleapis.com/tensorflow/keras-applications/convnext/" <ide><path>keras/applications/densenet.py <ide> """ <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> BASE_WEIGHTS_PATH = ( <ide> "https://storage.googleapis.com/tensorflow/" "keras-applications/densenet/" <ide><path>keras/applications/efficientnet.py <ide> import copy <ide> import math <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <ide> <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <del> <del> <ide> BASE_WEIGHTS_PATH = "https://storage.googleapis.com/keras-applications/" <ide> <ide> WEIGHTS_HASHES = { <ide><path>keras/applications/efficientnet_v2.py <ide> import copy <ide> import math <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras import layers <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> BASE_WEIGHTS_PATH = "https://storage.googleapis.com/tensorflow/keras-applications/efficientnet_v2/" <ide> <ide><path>keras/applications/efficientnet_weight_update_util.py <ide> import argparse <ide> import warnings <ide> <del>from keras.utils import io_utils <ide> import tensorflow.compat.v2 as tf <ide> from tensorflow.keras.applications import efficientnet <ide> <add>from keras.utils import io_utils <add> <ide> <ide> def write_ckpt_to_h5(path_h5, path_ckpt, keras_model, use_ema=True): <ide> """Map the weights in checkpoint file (tf) to h5 file (keras). <ide><path>keras/applications/imagenet_utils.py <ide> import warnings <ide> <ide> import numpy as np <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import activations <ide> from keras import backend <ide> from keras.utils import data_utils <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> CLASS_INDEX = None <ide> CLASS_INDEX_PATH = ( <ide><path>keras/applications/imagenet_utils_test.py <ide> # ============================================================================== <ide> """Tests for imagenet_utils.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <ide> <ide> import keras <del>from keras.testing_infra import test_combinations <ide> from keras.applications import imagenet_utils as utils <ide> from keras.mixed_precision.policy import set_global_policy <add>from keras.testing_infra import test_combinations <ide> <ide> <ide> class TestImageNetUtils(test_combinations.TestCase): <ide><path>keras/applications/inception_resnet_v2.py <ide> """ <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> BASE_WEIGHT_URL = ( <ide> "https://storage.googleapis.com/tensorflow/" <ide><path>keras/applications/inception_v3.py <ide> """ <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> WEIGHTS_PATH = ( <ide> "https://storage.googleapis.com/tensorflow/keras-applications/" <ide><path>keras/applications/mobilenet.py <ide> """ <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> BASE_WEIGHT_PATH = ( <ide> "https://storage.googleapis.com/tensorflow/" "keras-applications/mobilenet/" <ide><path>keras/applications/mobilenet_v2.py <ide> https://arxiv.org/abs/1801.04381) (CVPR 2018) <ide> """ <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>import tensorflow.compat.v2 as tf <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> BASE_WEIGHT_PATH = ( <ide> "https://storage.googleapis.com/tensorflow/" <ide><path>keras/applications/mobilenet_v3.py <ide> """MobileNet v3 models for Keras.""" <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras import models <ide> from keras.applications import imagenet_utils <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> # TODO(scottzhu): Change this to the GCS path. <ide> BASE_WEIGHT_PATH = ( <ide><path>keras/applications/nasnet.py <ide> """ <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> BASE_WEIGHTS_PATH = ( <ide> "https://storage.googleapis.com/tensorflow/" "keras-applications/nasnet/" <ide><path>keras/applications/regnet.py <ide> (CVPR 2021) <ide> """ <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras import layers <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>import tensorflow.compat.v2 as tf <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> BASE_WEIGHTS_PATH = ( <ide> "https://storage.googleapis.com/tensorflow/keras-applications/regnet/" <ide><path>keras/applications/resnet.py <ide> """ <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> BASE_WEIGHTS_PATH = ( <ide> "https://storage.googleapis.com/tensorflow/keras-applications/resnet/" <ide><path>keras/applications/resnet_rs.py <ide> https://arxiv.org/pdf/2103.07579.pdf) <ide> """ <ide> import sys <del>from typing import Callable, Dict, List, Union <add>from typing import Callable <add>from typing import Dict <add>from typing import List <add>from typing import Union <add> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras import layers <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>import tensorflow.compat.v2 as tf <del> <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> BASE_WEIGHTS_URL = ( <ide> "https://storage.googleapis.com/tensorflow/" "keras-applications/resnet_rs/" <ide><path>keras/applications/resnet_v2.py <ide> (https://arxiv.org/abs/1603.05027) (CVPR 2016) <ide> """ <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras.applications import imagenet_utils <ide> from keras.applications import resnet <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export( <ide><path>keras/applications/vgg16.py <ide> """ <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> WEIGHTS_PATH = ( <ide> "https://storage.googleapis.com/tensorflow/keras-applications/" <ide><path>keras/applications/vgg19.py <ide> """ <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> WEIGHTS_PATH = ( <ide> "https://storage.googleapis.com/tensorflow/keras-applications/" <ide><path>keras/applications/xception.py <ide> """ <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras.applications import imagenet_utils <ide> from keras.engine import training <ide> from keras.layers import VersionAwareLayers <ide> from keras.utils import data_utils <ide> from keras.utils import layer_utils <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> TF_WEIGHTS_PATH = ( <ide> "https://storage.googleapis.com/tensorflow/keras-applications/" <ide><path>keras/backend.py <ide> # pylint: disable=missing-function-docstring <ide> """Keras backend API.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import collections <ide> import itertools <ide> import json <ide> import weakref <ide> <ide> import numpy as np <del> <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.core.protobuf import config_pb2 <ide> from tensorflow.python.eager import context <ide> from tensorflow.python.eager.context import get_config <ide> from tensorflow.python.framework import config <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util.tf_export import keras_export <add>from tensorflow.tools.docs import doc_controls <add> <ide> from keras import backend_config <ide> from keras.distribute import distribute_coordinator_utils as dc <ide> from keras.engine import keras_tensor <ide> from keras.utils import control_flow_util <ide> from keras.utils import object_identity <ide> from keras.utils import tf_contextlib <ide> from keras.utils import tf_inspect <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util.tf_export import keras_export <del>from tensorflow.tools.docs import doc_controls <ide> <ide> py_all = all <ide> py_sum = sum <ide> def tensor_spec_to_placeholder(tensorspec): <ide> # when the placeholder is built in a top-level eager context <ide> # (intended to be used with keras.backend.function) <ide> from keras.engine import ( <del> input_layer, <del> ) # pylint: disable=g-import-not-at-top <add> input_layer, # pylint: disable=g-import-not-at-top <add> ) <ide> <ide> x = input_layer.Input(tensor=x) <ide> x._is_backend_placeholder = True <ide> class to walkaround this issue until it is resolved on TF side. <ide> self._generator = None <ide> elif self._rng_type == self.RNG_STATEFUL: <ide> from keras.utils import ( <del> tf_utils, <del> ) # pylint: disable=g-import-not-at-top <add> tf_utils, # pylint: disable=g-import-not-at-top <add> ) <ide> <ide> with tf_utils.maybe_init_scope(self): <ide> seed = self._create_seed(self._seed) <ide> def in_train_phase(x, alt, training=None): <ide> the `training` flag defaults to `K.learning_phase()`. <ide> """ <ide> from keras.engine import ( <del> base_layer_utils, <del> ) # pylint: disable=g-import-not-at-top <add> base_layer_utils, # pylint: disable=g-import-not-at-top <add> ) <ide> <ide> if training is None: <ide> training = base_layer_utils.call_context().training <ide><path>keras/backend_test.py <ide> # ============================================================================== <ide> """Tests for Keras backend.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import gc <ide> import warnings <ide> <del>from absl.testing import parameterized <ide> import numpy as np <ide> import scipy.sparse <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> from tensorflow.python.eager import context <ide> from tensorflow.python.eager.context import get_config <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_utils, <ide> ) <add> <ide> from keras import activations <ide> from keras import backend <del>from keras.testing_infra import test_combinations <ide> from keras.engine import input_layer <ide> from keras.layers import activation <ide> from keras.layers.normalization import batch_normalization_v1 <add>from keras.testing_infra import test_combinations <ide> from keras.utils import tf_inspect <ide> from keras.utils import tf_utils <ide> <ide><path>keras/benchmarks/benchmark_util.py <ide> # ============================================================================== <ide> """Common utils for benchmarks.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import timeit <add> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> from keras.benchmarks import distribution_util <ide> <ide><path>keras/benchmarks/distribution_util.py <ide> https://github.com/tensorflow/models/blob/master/official/utils/misc/distribution_utils.py. <ide> """ <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import json <ide> import os <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> <ide> def _collective_communication(all_reduce_alg): <ide> """Return a CollectiveCommunication based on all_reduce_alg. <ide><path>keras/benchmarks/eager_microbenchmarks_test.py <ide> # ============================================================================== <ide> """Microbenchmarks for Keras components in eager mode.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import time <ide> <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.python.eager import context <ide> from tensorflow.python.eager.context import get_executor <add> <ide> from keras.utils import tf_inspect <ide> <ide> <ide><path>keras/benchmarks/keras_cpu_benchmark_test.py <ide> # ============================================================================== <ide> """Benchmark tests for CPU performance of Keras models.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> from keras.benchmarks import benchmark_util <ide> <ide><path>keras/benchmarks/keras_examples_benchmarks/mnist_conv_benchmark_test.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> from keras.benchmarks import benchmark_util <ide> <ide><path>keras/benchmarks/keras_examples_benchmarks/mnist_conv_custom_training_benchmark_test.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import timeit <add> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> from keras.benchmarks import benchmark_util <ide> from keras.benchmarks import distribution_util <ide><path>keras/benchmarks/keras_examples_benchmarks/reuters_mlp_benchmark_test.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> from keras.benchmarks import benchmark_util <ide> <ide><path>keras/benchmarks/layer_benchmarks/layer_benchmarks_test.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import functools <add> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.benchmarks import benchmark_util <ide> from keras.benchmarks.layer_benchmarks import layer_benchmarks_test_base <ide> <ide><path>keras/benchmarks/layer_benchmarks/layer_benchmarks_test_base.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import time <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.benchmarks.layer_benchmarks import run_xprof <ide> <ide> <ide><path>keras/benchmarks/metrics_memory_benchmark_test.py <ide> # ============================================================================== <ide> """Benchmark tests for Keras metrics memory consumption.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> try: <ide> import memory_profiler # pylint:disable=g-import-not-at-top <ide><path>keras/benchmarks/model_components_benchmarks_test.py <ide> # ============================================================================== <ide> r"""Benchmarks on Keras components with different Keras model types.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import time <ide> <ide> import numpy as np <del> <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.python.eager import context <ide> from tensorflow.python.eager.context import get_executor <ide> <ide><path>keras/benchmarks/model_memory_profile.py <ide> 3. Add the model function to the dict `models`. <ide> """ <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl import app <ide> from absl import flags <del> <ide> from absl import logging <del>import numpy as np <ide> <ide> try: <ide> import memory_profiler # pylint:disable=g-import-not-at-top <ide><path>keras/benchmarks/optimizer_benchmarks_test.py <ide> """Benchmark tests for Keras optimizers.""" <ide> <ide> import tensorflow.compat.v2 as tf <del> <del>from keras.benchmarks import benchmark_util <del>from keras.optimizers.optimizer_v2 import adam <ide> from tensorflow.python.platform.benchmark import ( <ide> ParameterizedBenchmark, <ide> ) <ide> <add>from keras.benchmarks import benchmark_util <add>from keras.optimizers.optimizer_v2 import adam <add> <ide> <ide> def bidirect_imdb_lstm_config(): <ide> """Bidirectional LSTM model and IMDB data.""" <ide><path>keras/benchmarks/saved_model_benchmarks/densenet_benchmark_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util <ide> <ide> <ide><path>keras/benchmarks/saved_model_benchmarks/efficientnet_benchmark_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util <ide> <ide> <ide><path>keras/benchmarks/saved_model_benchmarks/inception_resnet_v2_benchmark_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util <ide> <ide> <ide><path>keras/benchmarks/saved_model_benchmarks/mobilenet_benchmark_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util <ide> <ide> <ide><path>keras/benchmarks/saved_model_benchmarks/nasnet_large_benchmark_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util <ide> <ide> <ide><path>keras/benchmarks/saved_model_benchmarks/resnet152_v2_benchmark_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util <ide> <ide> <ide><path>keras/benchmarks/saved_model_benchmarks/saved_model_benchmark_util.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import tempfile <ide> import time <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> <ide> def save_and_load_benchmark(app): <ide> """Util for saved model benchmarks.""" <ide><path>keras/benchmarks/saved_model_benchmarks/vgg_benchmark_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util <ide> <ide> <ide><path>keras/benchmarks/saved_model_benchmarks/xception_benchmark_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.benchmarks.saved_model_benchmarks import saved_model_benchmark_util <ide> <ide> <ide><path>keras/callbacks.py <ide> import sys <ide> import time <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util import deprecation <add>from tensorflow.python.util.tf_export import keras_export <add>from tensorflow.tools.docs import doc_controls <ide> <ide> from keras import backend <ide> from keras.distribute import distributed_file_utils <ide> from keras.utils.data_utils import Sequence <ide> from keras.utils.generic_utils import Progbar <ide> from keras.utils.mode_keys import ModeKeys <del>import numpy as np <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util import deprecation <del>from tensorflow.python.util.tf_export import keras_export <del>from tensorflow.tools.docs import doc_controls <ide> <ide> try: <ide> import requests <ide> def _configure_embeddings(self): <ide> """Configure the Projector for embeddings.""" <ide> # TODO(omalleyt): Add integration tests. <ide> from google.protobuf import text_format <add> <ide> from keras.layers import core <ide> from keras.protobuf import projector_config_pb2 <ide> <ide><path>keras/callbacks_test.py <ide> import unittest <ide> from unittest import mock <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add>from tensorflow.python.platform import tf_logging as logging <add> <ide> import keras <ide> from keras.callbacks import BackupAndRestore <ide> from keras.callbacks import BackupAndRestoreExperimental <ide> from keras.testing_infra import test_utils <ide> from keras.utils import io_utils <ide> from keras.utils import np_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <del>from tensorflow.python.platform import tf_logging as logging <ide> <ide> try: <ide> import h5py # pylint:disable=g-import-not-at-top <ide><path>keras/callbacks_v1.py <ide> # pylint: disable=g-classes-have-attributes <ide> """Callbacks: utilities called at certain points during model training.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import os <add> <ide> import numpy as np <del>from keras import backend <del>from keras import callbacks <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.python.platform import tf_logging as logging <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras import backend <add>from keras import callbacks <add> <ide> <ide> @keras_export(v1=["keras.callbacks.TensorBoard"]) <ide> class TensorBoard(callbacks.TensorBoard): <ide> def set_model(self, model): <ide> if self.embeddings_freq and self.embeddings_data is not None: <ide> # Avoid circular dependency. <ide> from keras.engine import ( <del> training_utils_v1, <del> ) # pylint: disable=g-import-not-at-top <add> training_utils_v1, # pylint: disable=g-import-not-at-top <add> ) <ide> <ide> self.embeddings_data = training_utils_v1.standardize_input_data( <ide> self.embeddings_data, model.input_names <ide><path>keras/callbacks_v1_test.py <ide> # ============================================================================== <ide> """Tests for Keras callbacks.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import os <ide> import shutil <ide> import tempfile <ide> <del>from absl.testing import parameterized <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <add> <ide> from keras import callbacks <ide> from keras import callbacks_v1 <del>from keras.testing_infra import test_combinations <ide> from keras import layers <del>from keras.testing_infra import test_utils <ide> from keras.engine import input_layer <ide> from keras.engine import sequential <ide> from keras.engine import training <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> from keras.utils import np_utils <ide> <del> <ide> TRAIN_SAMPLES = 10 <ide> TEST_SAMPLES = 10 <ide> NUM_CLASSES = 2 <ide><path>keras/constraints.py <ide> """Constraints: functions that impose constraints on weight values.""" <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add>from tensorflow.tools.docs import doc_controls <add> <ide> from keras import backend <ide> from keras.utils.generic_utils import deserialize_keras_object <ide> from keras.utils.generic_utils import serialize_keras_object <del>from tensorflow.python.util.tf_export import keras_export <del>from tensorflow.tools.docs import doc_controls <ide> <ide> <ide> @keras_export("keras.constraints.Constraint") <ide><path>keras/constraints_test.py <ide> # ============================================================================== <ide> """Tests for Keras weights constraints.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import math <ide> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> from keras import backend <del>from keras.testing_infra import test_combinations <ide> from keras import constraints <add>from keras.testing_infra import test_combinations <ide> <ide> <ide> def get_test_values(): <ide><path>keras/datasets/boston_housing.py <ide> """Boston housing price regression dataset.""" <ide> <ide> import numpy as np <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras.utils.data_utils import get_file <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.datasets.boston_housing.load_data") <ide><path>keras/datasets/cifar10.py <ide> import os <ide> <ide> import numpy as np <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras.datasets.cifar import load_batch <ide> from keras.utils.data_utils import get_file <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.datasets.cifar10.load_data") <ide><path>keras/datasets/cifar100.py <ide> import os <ide> <ide> import numpy as np <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import backend <ide> from keras.datasets.cifar import load_batch <ide> from keras.utils.data_utils import get_file <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.datasets.cifar100.load_data") <ide><path>keras/datasets/fashion_mnist.py <ide> import os <ide> <ide> import numpy as np <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras.utils.data_utils import get_file <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.datasets.fashion_mnist.load_data") <ide><path>keras/datasets/imdb.py <ide> import json <ide> <ide> import numpy as np <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras.preprocessing.sequence import _remove_long_seq <ide> from keras.utils.data_utils import get_file <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.datasets.imdb.load_data") <ide><path>keras/datasets/mnist.py <ide> """MNIST handwritten digits dataset.""" <ide> <ide> import numpy as np <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras.utils.data_utils import get_file <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.datasets.mnist.load_data") <ide><path>keras/datasets/reuters.py <ide> import json <ide> <ide> import numpy as np <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras.preprocessing.sequence import _remove_long_seq <ide> from keras.utils.data_utils import get_file <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.datasets.reuters.load_data") <ide><path>keras/distribute/checkpointing_test.py <ide> import os <ide> <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <add> <ide> from keras.optimizers.optimizer_v2 import adam <ide> <ide> <ide><path>keras/distribute/collective_all_reduce_strategy_test.py <ide> """Tests for CollectiveAllReduceStrategy.""" <ide> <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <add> <ide> from keras import layers <del>from keras.testing_infra import test_utils <ide> from keras.engine import training <ide> from keras.optimizers.optimizer_v2 import ( <ide> gradient_descent as gradient_descent_keras, <ide> ) <add>from keras.testing_infra import test_utils <ide> <ide> <ide> @test_utils.run_v2_only <ide><path>keras/distribute/ctl_correctness_test.py <ide> # ============================================================================== <ide> """Custom Training Loop correctness test.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add>from tensorflow.python.ops.losses import losses_impl <add> <ide> import keras <ide> from keras import optimizers <ide> from keras.applications import resnet_v2 <ide> from keras.datasets import fashion_mnist <ide> from keras.distribute import optimizer_combinations <ide> from keras.distribute import strategy_combinations <ide> from keras.testing_infra import test_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.ops.losses import losses_impl <ide> <ide> _NUM_SAMPLES = 66 <ide> _BATCH_SIZE = 32 <ide><path>keras/distribute/custom_training_loop_metrics_test.py <ide> # ============================================================================== <ide> """Tests for custom training loops.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_utils, <ide> ) <add> <ide> from keras import metrics <ide> from keras.distribute import strategy_combinations <ide> <ide><path>keras/distribute/custom_training_loop_models_test.py <ide> # ============================================================================== <ide> """Tests for custom training loops.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import os <ide> <del>from absl.testing import parameterized <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> <ide> import keras <ide> from keras.distribute import strategy_combinations <ide><path>keras/distribute/custom_training_loop_optimizer_test.py <ide> """Tests for custom training loops that involves advanced optimizer usage.""" <ide> <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <ide> from tensorflow.python.distribute import values <add> <ide> from keras.distribute import ( <ide> strategy_combinations as keras_strategy_combinations, <ide> ) <ide><path>keras/distribute/dataset_creator_model_fit_ps_only_test.py <ide> # ============================================================================== <ide> """Tests for `DatasetCreator` with `Model.fit` across usages and strategies.""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import callbacks as callbacks_lib <ide> from keras.distribute import dataset_creator_model_fit_test_base as test_base <ide> from keras.distribute import strategy_combinations <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_utils.run_v2_only <ide><path>keras/distribute/dataset_creator_model_fit_test.py <ide> # ============================================================================== <ide> """Tests for `DatasetCreator` with `Model.fit` across usages and strategies.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_utils, <ide> ) <del>from keras.testing_infra import test_utils <add> <ide> from keras.distribute import dataset_creator_model_fit_test_base as test_base <ide> from keras.distribute import strategy_combinations <add>from keras.testing_infra import test_utils <ide> from keras.utils import dataset_creator <ide> <ide> <ide><path>keras/distribute/dataset_creator_model_fit_test_base.py <ide> # ============================================================================== <ide> """Tests for `DatasetCreator` with `Model.fit` across usages and strategies.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import os <ide> <del>from absl.testing import parameterized <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <add>from tensorflow.python.platform import tf_logging as logging <ide> <ide> import keras <ide> from keras import callbacks as callbacks_lib <ide> from keras.layers.preprocessing import string_lookup <ide> from keras.optimizers.optimizer_v2 import gradient_descent <ide> from keras.utils import dataset_creator <del>from tensorflow.python.platform import tf_logging as logging <ide> <ide> <ide> class DatasetCreatorModelFitTestBase(tf.test.TestCase, parameterized.TestCase): <ide><path>keras/distribute/distribute_coordinator_utils.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import copy <ide> import json <ide> import os <ide> import threading <ide> import time <add> <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.core.protobuf import cluster_pb2 <ide> from tensorflow.python.platform import tf_logging as logging <ide> <ide><path>keras/distribute/distribute_strategy_test.py <ide> # ============================================================================== <ide> """Tests for tf.keras models using tf.distribute.Strategy.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import os <ide> <del>from absl.testing import parameterized <ide> import numpy as np <del> <del>import keras <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> from tensorflow.python.distribute.cluster_resolver import ( <ide> SimpleClusterResolver, <ide> ) <add> <add>import keras <ide> from keras import backend <del>from keras.testing_infra import test_utils <ide> from keras.distribute import distributed_training_utils <ide> from keras.distribute import distributed_training_utils_v1 <ide> from keras.distribute import multi_worker_testing_utils <ide> from keras.optimizers.optimizer_v2 import ( <ide> gradient_descent as gradient_descent_keras, <ide> ) <add>from keras.testing_infra import test_utils <ide> from keras.utils import losses_utils <ide> from keras.utils import np_utils <ide> <ide><path>keras/distribute/distributed_file_utils.py <ide> Experimental. API is subject to change. <ide> """ <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import os <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> <ide> def _get_base_dirpath(strategy): <ide> task_id = strategy.extended._task_id # pylint: disable=protected-access <ide><path>keras/distribute/distributed_file_utils_test.py <ide> # ============================================================================== <ide> """Tests for distributed_file_utils.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import os <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.distribute import distributed_file_utils <ide> <ide> <ide><path>keras/distribute/distributed_training_utils.py <ide> # ============================================================================== <ide> """Utilities related to distributed training.""" <ide> <add>import tensorflow.compat.v2 as tf <ide> from absl import flags <del>from keras import backend <ide> <del>import tensorflow.compat.v2 as tf <add>from keras import backend <ide> <ide> FLAGS = flags.FLAGS <ide> <ide><path>keras/distribute/distributed_training_utils_v1.py <ide> # ============================================================================== <ide> """Utilities related to distributed training.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <del># pylint:disable=protected-access <del> <ide> import functools <ide> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add> <ide> from keras import backend <ide> from keras import callbacks <ide> from keras import metrics as metrics_module <ide> from keras.optimizers.optimizer_v2 import optimizer_v2 <ide> from keras.utils import tf_contextlib <ide> from keras.utils.mode_keys import ModeKeys <del>from tensorflow.python.platform import tf_logging as logging <add> <add># pylint:disable=protected-access <ide> <ide> <ide> def set_weights(distribution_strategy, dist_model, weights): <ide><path>keras/distribute/keras_correctness_test_base.py <ide> # ============================================================================== <ide> """Correctness tests for tf.keras using DistributionStrategy.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import functools <del>from absl.testing import parameterized <add> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> <ide> import keras <ide> from keras.distribute import distributed_training_utils <ide><path>keras/distribute/keras_dnn_correctness_test.py <ide> # ============================================================================== <ide> """Correctness tests for tf.keras DNN model using DistributionStrategy.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> import keras <ide> from keras import backend <del>from keras.testing_infra import test_utils <ide> from keras.distribute import keras_correctness_test_base <ide> from keras.distribute import strategy_combinations <ide> from keras.optimizers.optimizer_v2 import ( <ide> gradient_descent as gradient_descent_keras, <ide> ) <add>from keras.testing_infra import test_utils <ide> <ide> <ide> def all_strategy_combinations_with_eager_and_graph_modes(): <ide><path>keras/distribute/keras_embedding_model_correctness_test.py <ide> # ============================================================================== <ide> """Correctness test for tf.keras Embedding models using DistributionStrategy.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> import keras <ide> from keras.distribute import keras_correctness_test_base <ide><path>keras/distribute/keras_image_model_correctness_test.py <ide> # ============================================================================== <ide> """Correctness tests for tf.keras CNN models using DistributionStrategy.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <ide> <del>import numpy as np <ide> import keras <del>from keras.testing_infra import test_utils <ide> from keras.distribute import keras_correctness_test_base <ide> from keras.optimizers.optimizer_v2 import gradient_descent <add>from keras.testing_infra import test_utils <ide> <ide> <ide> @test_utils.run_all_without_tensor_float_32( <ide><path>keras/distribute/keras_metrics_test.py <ide> # ============================================================================== <ide> """Tests for Keras metrics.""" <ide> <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> from keras import metrics <ide> from keras.engine import base_layer <del>import tensorflow.compat.v2 as tf <ide> <ide> combinations = tf.__internal__.distribute.combinations <ide> <ide><path>keras/distribute/keras_models_test.py <ide> # ============================================================================== <ide> """Tests for Keras high level APIs, e.g. fit, evaluate and predict.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <ide> <ide> import keras <ide> from keras.distribute.strategy_combinations import all_strategies <ide><path>keras/distribute/keras_optimizer_v2_test.py <ide> # ============================================================================== <ide> """Tests that show that DistributionStrategy works with optimizer v2.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <ide> <ide> import keras <ide> from keras.optimizers.optimizer_v2 import adam <ide><path>keras/distribute/keras_premade_models_test.py <ide> # ============================================================================== <ide> """Tests for keras premade models using tf.distribute.Strategy.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <ide> <ide> from keras.engine import sequential <ide> from keras.premade_models import linear <ide> from keras.premade_models import wide_deep <ide> from keras.utils import dataset_creator <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> def strategy_combinations_eager_data_fn(): <ide><path>keras/distribute/keras_rnn_model_correctness_test.py <ide> # ============================================================================== <ide> """Correctness tests for tf.keras RNN models using DistributionStrategy.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> import keras <del>from keras.testing_infra import test_utils <ide> from keras.distribute import keras_correctness_test_base <ide> from keras.layers.rnn import gru <ide> from keras.layers.rnn import gru_v1 <ide> from keras.optimizers.optimizer_v2 import ( <ide> gradient_descent as gradient_descent_keras, <ide> ) <add>from keras.testing_infra import test_utils <ide> <ide> <ide> class _DistributionStrategyRnnModelCorrectnessTest( <ide><path>keras/distribute/keras_save_load_test.py <ide> # ============================================================================== <ide> """Tests for saving and loading using keras save/load APIs with DS.""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.distribute import saved_model_test_base as test_base <ide> from keras.saving import save <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_utils.run_all_without_tensor_float_32( <ide><path>keras/distribute/keras_stateful_lstm_model_correctness_test.py <ide> # ============================================================================== <ide> """Tests for stateful tf.keras LSTM models using DistributionStrategy.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> import keras <ide> from keras.distribute import keras_correctness_test_base <ide><path>keras/distribute/keras_utils_test.py <ide> # ============================================================================== <ide> """Tests for tf.keras models with callbacks, checkpointing with dist strategy.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import collections <ide> import tempfile <ide> <del>from absl.testing import parameterized <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> <ide> import keras <ide> from keras import losses <ide><path>keras/distribute/minimize_loss_test.py <ide> """Tests for running legacy optimizer code with DistributionStrategy.""" <ide> <ide> <add>import numpy <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> from keras.distribute import optimizer_combinations <ide> from keras.distribute.test_example import batchnorm_example <ide> from keras.distribute.test_example import minimize_loss_example <ide> from keras.layers import core <ide> from keras.optimizers.optimizer_v2 import optimizer_v2 <del>import numpy <del>import tensorflow.compat.v2 as tf <del> <ide> <ide> VAR_MAP_V1 = { <ide> "GradientDescent": ("dense/kernel", "dense/bias"), <ide><path>keras/distribute/mirrored_strategy_test.py <ide> # ============================================================================== <ide> """Tests for MirroredStrategy.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <add>from tensorflow.python.eager import backprop <add>from tensorflow.python.training import ( <add> optimizer as optimizer_lib, <add>) <ide> <ide> import keras <del>from tensorflow.python.eager import backprop <ide> from keras.engine import training as keras_training <ide> from keras.layers import core as keras_core <ide> from keras.optimizers.optimizer_v2 import rmsprop <ide> from keras.utils import kpl_test_utils <del>from tensorflow.python.training import ( <del> optimizer as optimizer_lib, <del>) <ide> <ide> <ide> class MiniModel(keras_training.Model): <ide><path>keras/distribute/mirrored_variable_test.py <ide> """Test MirroredVariable in MirroredStrategy and MultiWorkerMirroredStrategy.""" <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.distribute import distributed_training_utils <ide> from keras.layers import core <ide> <ide><path>keras/distribute/model_combinations.py <ide> """Strategy and optimizer combinations for combinations.combine().""" <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.distribute import simple_models <ide> <ide> simple_functional_model = tf.__internal__.test.combinations.NamedObject( <ide><path>keras/distribute/multi_worker_callback_tf2_test.py <ide> # ============================================================================== <ide> """Tests for Keras callbacks in multi-worker training with TF2.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import json <ide> import os <ide> <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> from keras import callbacks <ide> from keras.distribute import distributed_file_utils <ide> from keras.distribute import multi_worker_testing_utils <ide><path>keras/distribute/multi_worker_test.py <ide> # ============================================================================== <ide> """Test multi-worker Keras.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import collections <ide> import copy <ide> import functools <ide> import sys <ide> import threading <ide> <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <ide> <del> <ide> import keras <ide> from keras import backend <ide> from keras import callbacks <ide> from keras import metrics as metrics_module <ide> from keras import models <del>from keras.optimizers import optimizer_v1 <ide> from keras.distribute import multi_worker_testing_utils <add>from keras.optimizers import optimizer_v1 <ide> from keras.optimizers.optimizer_v2 import rmsprop <ide> from keras.utils import kpl_test_utils <ide> <ide><path>keras/distribute/multi_worker_testing_utils.py <ide> # ============================================================================== <ide> """Utilities for testing multi-worker distribution strategies with Keras.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import threading <ide> import unittest <del>import keras <add> <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.python.distribute.cluster_resolver import ( <ide> SimpleClusterResolver, <ide> ) <del>from keras.optimizers.optimizer_v2 import gradient_descent <ide> from tensorflow.python.platform import tf_logging as logging <ide> from tensorflow.python.training.server_lib import ( <ide> ClusterSpec, <ide> ) <ide> <add>import keras <add>from keras.optimizers.optimizer_v2 import gradient_descent <ide> <ide> _portpicker_import_error = None <ide> try: <ide><path>keras/distribute/optimizer_combinations.py <ide> # ============================================================================== <ide> """Strategy and optimizer combinations for combinations.combine().""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.optimizers.optimizer_experimental import adam as adam_experimental <ide> from keras.optimizers.optimizer_v2 import adadelta as adadelta_keras_v2 <ide> from keras.optimizers.optimizer_v2 import adagrad as adagrad_keras_v2 <ide> ) <ide> from keras.optimizers.optimizer_v2 import nadam as nadam_keras_v2 <ide> from keras.optimizers.optimizer_v2 import rmsprop as rmsprop_keras_v2 <del>import tensorflow.compat.v2 as tf <del> <ide> <ide> gradient_descent_optimizer_v1_fn = ( <ide> tf.__internal__.test.combinations.NamedObject( <ide><path>keras/distribute/parameter_server_evaluation_test.py <ide> <ide> import time <ide> <del>import keras <del>from keras.testing_infra import test_utils <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.distribute import ( <ide> multi_worker_test_base, <ide> ) <ide> ) <ide> from tensorflow.python.ops import resource_variable_ops <ide> <add>import keras <add>from keras.testing_infra import test_utils <add> <ide> <ide> # TODO(yuefengz): move the following implementation to Keras core. <ide> class MeanMetricSpec(tf.TypeSpec): <ide><path>keras/distribute/saved_model_mixed_api_test.py <ide> tf.saved_model.save(). <ide> """ <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.distribute import saved_model_test_base as test_base <ide> from keras.saving import save <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> _DEFAULT_FUNCTION_KEY = "serving_default" <ide> <ide><path>keras/distribute/saved_model_save_load_test.py <ide> # ============================================================================== <ide> """Tests for saving and loading using tf's saved_model APIs with DS.""" <ide> <add>import os <add> <ide> import tensorflow.compat.v2 as tf <ide> <del>import os <del>from keras.testing_infra import test_utils <ide> from keras.distribute import model_combinations <ide> from keras.distribute import saved_model_test_base as test_base <add>from keras.testing_infra import test_utils <ide> <ide> <ide> @test_utils.run_v2_only <ide><path>keras/distribute/saved_model_test_base.py <ide> <ide> import os <ide> <del>from absl.testing import parameterized <del>from keras.distribute import model_combinations <ide> import numpy as np <del> <ide> import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <add> <add>from keras.distribute import model_combinations <ide> <ide> _RANDOM_SEED = 1337 <ide> _DEFAULT_FUNCTION_KEY = "serving_default" <ide><path>keras/distribute/sharded_variable_test.py <ide> # ============================================================================== <ide> """Tests for ClusterCoordinator and Keras models.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <ide> <ide> import keras <ide> from keras.distribute import multi_worker_testing_utils <ide> from keras.distribute import strategy_combinations <ide> from keras.engine import base_layer <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class ShardedVariableTest(tf.test.TestCase, parameterized.TestCase): <ide><path>keras/distribute/sidecar_evaluator_test.py <ide> import threading <ide> import time <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add>from tensorflow.python.platform import tf_logging as logging <add> <ide> import keras <ide> from keras.distribute import sidecar_evaluator as sidecar_evaluator_lib <ide> from keras.optimizers.optimizer_v2 import gradient_descent <ide> from keras.testing_infra import test_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <del>from tensorflow.python.platform import tf_logging as logging <ide> <ide> _BATCH_SIZE = 32 <ide> <ide><path>keras/distribute/simple_models.py <ide> # ============================================================================== <ide> """A simple functional keras model with one layer.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> import keras <ide> from keras.distribute import model_collection_base <ide><path>keras/distribute/strategy_combinations.py <ide> <ide> import tensorflow.compat.v2 as tf <ide> <del> <ide> multidevice_strategies = [ <ide> tf.__internal__.distribute.combinations.mirrored_strategy_with_gpu_and_cpu, <ide> tf.__internal__.distribute.combinations.mirrored_strategy_with_two_gpus, <ide><path>keras/distribute/test_example.py <ide> # ============================================================================== <ide> """A simple network to use in tests and examples.""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.legacy_tf_layers import core <ide> from keras.legacy_tf_layers import normalization <ide> from keras.optimizers.optimizer_v2 import optimizer_v2 <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> <ide> def minimize_loss_example(optimizer, use_bias=False, use_callable_loss=True): <ide> """Example of non-distribution-aware legacy code.""" <ide><path>keras/distribute/tpu_strategy_test_utils.py <ide> """Utility functions for tests using TPUStrategy.""" <ide> <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl import flags <ide> <ide> FLAGS = flags.FLAGS <ide><path>keras/distribute/worker_training_state.py <ide> # ============================================================================== <ide> """Training state management.""" <ide> <add>import os <add> <ide> import tensorflow.compat.v2 as tf <ide> <del>import os <ide> from keras import backend <ide> from keras.distribute import distributed_file_utils <ide> from keras.utils import mode_keys <ide><path>keras/distribute/worker_training_state_test.py <ide> # ============================================================================== <ide> """Tests of `worker_training_state.py` utilities.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import os <ide> import sys <ide> <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> from keras import callbacks <ide> from keras.distribute import multi_worker_testing_utils <ide> <ide><path>keras/dtensor/__init__.py <ide> <ide> # Conditional import the dtensor API, since it is currently broken in OSS. <ide> if _DTENSOR_API_ENABLED: <del> from tensorflow.compat.v2.experimental import ( <del> dtensor as dtensor_api, <del> ) # pylint: disable=g-import-not-at-top <add> from tensorflow.compat.v2.experimental import dtensor as dtensor_api <ide> else: <ide> # Leave it with a placeholder, so that the import line from other python file <ide> # will not break. <ide><path>keras/dtensor/initializers_test.py <ide> # ============================================================================== <ide> """Tests for initializers.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> from keras import backend <ide> from keras import initializers <ide> from keras.dtensor import dtensor_api as dtensor <ide> from keras.dtensor import test_util <ide> from keras.utils import tf_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class InitializersTest(test_util.DTensorBaseTest): <ide><path>keras/dtensor/integration_test_utils.py <ide> """ <ide> <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl import logging <add> <ide> from keras import layers <ide> from keras import losses <ide> from keras import models <ide> from keras.datasets import mnist <ide> from keras.dtensor import dtensor_api as dtensor <ide> from keras.dtensor import layout_map as layout_map_lib <ide> from keras.utils import np_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> # pylint: disable=missing-function-docstring <ide> <ide><path>keras/dtensor/layers_test.py <ide> # ============================================================================== <ide> """Tests for layers.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> from keras import backend <ide> from keras import layers <ide> from keras.dtensor import dtensor_api as dtensor <ide> from keras.dtensor import test_util <ide> from keras.utils import tf_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class LayersTest(test_util.DTensorBaseTest): <ide><path>keras/dtensor/layout_map.py <ide> import re <ide> import threading <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras.dtensor import dtensor_api as dtensor <ide> from keras.dtensor import lazy_variable <ide> from keras.dtensor import utils <ide> from keras.engine import base_layer <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> # pylint: disable=missing-class-docstring <ide> <ide> # We will skip the path for certain attributes when mapping the layout, e.g. <ide><path>keras/dtensor/layout_map_test.py <ide> # ============================================================================== <ide> """Tests for layout_map.""" <ide> <del>from keras import backend <del>from keras import layers <del>from keras.dtensor import dtensor_api as dtensor <del>from keras.dtensor import layout_map as layout_map_lib <del>from keras.utils import tf_utils <ide> import numpy as np <ide> import tensorflow.compat.v2 as tf <ide> <ide> # TODO(scottzhu): Fix the layout map test with keras/dtensor/test_util <ide> from keras.dtensor.tests import test_util <ide> <add>from keras import backend <add>from keras import layers <add>from keras.dtensor import dtensor_api as dtensor <add>from keras.dtensor import layout_map as layout_map_lib <add>from keras.utils import tf_utils <add> <ide> <ide> class LayoutMapTest(test_util.DTensorBaseTest): <ide> def setUp(self): <ide><path>keras/dtensor/lazy_variable.py <ide> <ide> import threading <ide> <del> <ide> from tensorflow.core.framework import attr_value_pb2 <ide> from tensorflow.python.eager import context <ide> from tensorflow.python.framework import ops <ide> from tensorflow.python.util import compat <ide> from tensorflow.python.util import tf_contextlib <ide> <del> <ide> _DISABLE_LAZY_VARIABLE_INIT = threading.local() <ide> <ide> <ide><path>keras/dtensor/metrics_test.py <ide> # ============================================================================== <ide> """Tests for metrics.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> from keras import metrics <ide> from keras.dtensor import dtensor_api as dtensor <ide> from keras.dtensor import test_util <ide> from keras.utils import tf_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class MetricsTest(test_util.DTensorBaseTest): <ide><path>keras/dtensor/mnist_model_test.py <ide> # ============================================================================== <ide> """E2E Tests for mnist_model.""" <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.dtensor.python import mesh_util <add>from tensorflow.dtensor.python import tpu_util <add> <ide> from keras import backend <ide> from keras.dtensor import dtensor_api as dtensor <ide> from keras.dtensor import integration_test_utils <ide> from keras.dtensor import optimizers as optimizer_lib <ide> from keras.dtensor import test_util <ide> from keras.utils import tf_utils <ide> <del>import tensorflow.compat.v2 as tf <del> <del> <del>from tensorflow.dtensor.python import mesh_util <del>from tensorflow.dtensor.python import tpu_util <del> <ide> <ide> class MnistTest(test_util.DTensorBaseTest): <ide> def test_mnist_training_cpu(self): <ide><path>keras/dtensor/optimizers.py <ide> # ============================================================================== <ide> """DTensor specific Keras optimizers.""" <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add>from tensorflow.tools.docs import doc_controls <add> <ide> from keras.dtensor import dtensor_api as dtensor <ide> from keras.optimizers.optimizer_experimental import adadelta <ide> from keras.optimizers.optimizer_experimental import adagrad <ide> from keras.optimizers.optimizer_experimental import sgd <ide> from keras.optimizers.schedules import learning_rate_schedule <ide> <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <del>from tensorflow.tools.docs import doc_controls <del> <ide> <ide> # pylint: disable=protected-access,missing-class-docstring <ide> class Optimizer(optimizer_lib._BaseOptimizer): <ide><path>keras/dtensor/optimizers_test.py <ide> # ============================================================================== <ide> """Tests for initializers.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> from keras.dtensor import dtensor_api as dtensor <ide> from keras.dtensor import optimizers <ide> from keras.dtensor import test_util <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class OptimizersTest(test_util.DTensorBaseTest): <ide><path>keras/dtensor/test_util.py <ide> # ============================================================================== <ide> """Keras utilities for DTensor unit test.""" <ide> <del>from absl.testing import parameterized <ide> import numpy as np <del> <ide> import tensorflow.compat.v2 as tf <del> <del> <add>from absl.testing import parameterized <ide> from tensorflow.dtensor.python import api as dtensor_api <ide> from tensorflow.python.eager import context <ide> <del> <ide> _DEFAULT_GPU_MEMORY_LIMIT = 200 # MB <ide> <ide> <ide><path>keras/dtensor/utils.py <ide> <ide> import inspect <ide> <del>from keras.dtensor import dtensor_api as dtensor <ide> import tensorflow.compat.v2 as tf <ide> <add>from keras.dtensor import dtensor_api as dtensor <ide> <ide> # All the variable names in the default keras layers. We will use those to map <ide> # against the args in the __init__ method to find corresponding layout args. <ide><path>keras/dtensor/utils_test.py <ide> # ============================================================================== <ide> """Tests for utils.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> from keras import layers <ide> from keras.dtensor import dtensor_api as dtensor <ide> from keras.dtensor import test_util <ide> from keras.dtensor import utils <ide> <del>import numpy as np <del>import tensorflow.compat.v2 as tf <del> <ide> <ide> class UtilsTest(test_util.DTensorBaseTest): <ide> def setUp(self): <ide><path>keras/engine/base_layer.py <ide> # pylint: disable=g-bad-import-order <ide> """Contains the base Layer class, from which all layers inherit.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import collections <ide> import contextlib <ide> import functools <ide> import weakref <ide> <ide> import numpy as np <del> <add>import tensorflow.compat.v2 as tf <ide> from google.protobuf import json_format <add>from tensorflow.python.platform import tf_logging <add>from tensorflow.python.util.tf_export import ( <add> get_canonical_name_for_symbol, <add>) <add>from tensorflow.python.util.tf_export import keras_export <add>from tensorflow.tools.docs import doc_controls <add> <ide> from keras import backend <ide> from keras import constraints <ide> from keras import initializers <ide> <ide> # A module that only depends on `keras.layers` import these from here. <ide> from keras.utils.generic_utils import ( <del> to_snake_case, <del>) # pylint: disable=unused-import <add> to_snake_case, # pylint: disable=unused-import <add>) <ide> from keras.utils.tf_utils import ( <del> is_tensor_or_tensor_list, <del>) # pylint: disable=unused-import <del>from tensorflow.python.platform import tf_logging <del>from tensorflow.python.util.tf_export import ( <del> get_canonical_name_for_symbol, <add> is_tensor_or_tensor_list, # pylint: disable=unused-import <ide> ) <del>from tensorflow.python.util.tf_export import keras_export <del>from tensorflow.tools.docs import doc_controls <ide> <ide> # pylint: disable=g-inconsistent-quotes <ide> metrics_mod = generic_utils.LazyLoader( <ide><path>keras/engine/base_layer_test.py <ide> # limitations under the License. <ide> # ============================================================================== <ide> """Tests for TensorFlow 2.0 layer behavior.""" <del># pylint: disable=g-bad-import-order <del>import tensorflow.compat.v2 as tf <del> <ide> import copy <ide> import os <ide> <ide> import numpy as np <add> <add># pylint: disable=g-bad-import-order <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import backend <del>from keras.testing_infra import test_combinations <ide> from keras import layers <ide> from keras import regularizers <del>from keras.testing_infra import test_utils <ide> from keras.engine import base_layer <ide> from keras.engine import input_layer <ide> from keras.engine import sequential <ide> from keras.engine import training as training_lib <ide> from keras.legacy_tf_layers import core as legacy_core <ide> from keras.optimizers.optimizer_v2 import rmsprop <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> from keras.utils import control_flow_util <ide> <ide> <ide><path>keras/engine/base_layer_utils.py <ide> <ide> import functools <ide> import threading <add> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.dtensor import dtensor_api as dtensor <ide> from keras.utils import control_flow_util <ide> from keras.utils import tf_inspect <ide> from keras.utils import tf_utils <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> _call_context = threading.local() <ide> <ide> <ide> def create_mean_metric(value, name=None): <ide> # import keras will import base_layer and then this module, and metric <ide> # relies on base_layer, which result into a cyclic dependency. <del> from keras import ( <del> metrics as metrics_module, <del> ) # pylint: disable=g-import-not-at-top <add> from keras import metrics as metrics_module <ide> <ide> metric_obj = metrics_module.Mean(name=name, dtype=value.dtype) <ide> return metric_obj, metric_obj(value) <ide> def _create_keras_history_helper(tensors, processed_ops, created_layers): <ide> # Import of `base_layer` needed in order to create `TensorFlowOpLayer`. <ide> # Cannot be imported at top because of circular dependencies. <ide> # TODO(omalleyt): Resolve circular dependency. <del> from keras.engine import base_layer # pylint: disable=g-import-not-at-top <add> from keras.engine import base_layer <ide> <ide> tensor_list = tf.nest.flatten(tensors) <ide> sparse_ops = [] <ide><path>keras/engine/base_layer_utils_test.py <ide> # ============================================================================== <ide> <ide> import numpy as np <del> <ide> import tensorflow.compat.v2 as tf <ide> <ide> import keras <ide> from keras import backend <del>from keras.testing_infra import test_combinations <ide> from keras.engine import base_layer_utils <add>from keras.testing_infra import test_combinations <ide> <ide> <ide> @test_combinations.generate(test_combinations.combine(mode=["graph", "eager"])) <ide><path>keras/engine/base_layer_v1.py <ide> # pylint: disable=g-bad-import-order <ide> """Contains the base Layer class, from which all layers inherit.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import functools <ide> import itertools <ide> import threading <ide> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging <add>from tensorflow.tools.docs import doc_controls <add> <ide> from keras import backend <ide> from keras import constraints <ide> from keras import initializers <ide> <ide> # A module that only depends on `keras.layers` import these from here. <ide> from keras.utils.generic_utils import ( <del> to_snake_case, <del>) # pylint: disable=unused-import <add> to_snake_case, # pylint: disable=unused-import <add>) <ide> from keras.utils.tf_utils import ( <del> is_tensor_or_tensor_list, <del>) # pylint: disable=unused-import <del>from tensorflow.python.platform import tf_logging <del>from tensorflow.tools.docs import doc_controls <add> is_tensor_or_tensor_list, # pylint: disable=unused-import <add>) <ide> <ide> <ide> # pylint: disable=g-classes-have-attributes <ide> def __setattr__(self, name, value): <ide> pass <ide> <ide> # Keep track of metric instance created in subclassed layer. <del> from keras import ( <del> metrics as metrics_module, <del> ) # pylint: disable=g-import-not-at-top <add> from keras import metrics as metrics_module <ide> <ide> for val in tf.nest.flatten(value): <ide> if isinstance(val, metrics_module.Metric) and hasattr( <ide><path>keras/engine/base_preprocessing_layer.py <ide> <ide> import abc <ide> <del>from keras.engine import data_adapter <del>from keras.engine.base_layer import Layer <del>from keras.utils import version_utils <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.eager import context <ide> from tensorflow.python.util.tf_export import keras_export <ide> from tensorflow.tools.docs import doc_controls <ide> <add>from keras.engine import data_adapter <add>from keras.engine.base_layer import Layer <add>from keras.utils import version_utils <ide> <ide> keras_kpl_gauge = tf.__internal__.monitoring.BoolGauge( <ide> "/tensorflow/api/keras/layers/preprocessing", <ide><path>keras/engine/base_preprocessing_layer_test.py <ide> <ide> import os <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <add> <ide> import keras <ide> from keras.engine import base_preprocessing_layer <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> # Define a test-only implementation of BasePreprocessingLayer to validate <ide><path>keras/engine/compile_utils.py <ide> <ide> <ide> import copy <add> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import losses as losses_mod <ide> from keras import metrics as metrics_mod <ide> from keras.saving.experimental import saving_lib <ide> from keras.utils import generic_utils <ide> from keras.utils import losses_utils <ide> from keras.utils import tf_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class Container: <ide><path>keras/engine/compile_utils_test.py <ide> """Tests for compile utitilies.""" <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras import backend <del>from keras.testing_infra import test_combinations <ide> from keras import losses as losses_mod <ide> from keras import metrics as metrics_mod <ide> from keras.engine import compile_utils <add>from keras.testing_infra import test_combinations <ide> <ide> <ide> class LossesContainerTest(test_combinations.TestCase): <ide><path>keras/engine/control_flow_test.py <ide> # ============================================================================== <ide> """Tests for dynamic control flow behavior with Keras.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <ide> <ide> import keras <del>from keras.testing_infra import test_combinations <del>from keras.testing_infra import test_utils <ide> from keras.engine import base_layer <ide> from keras.optimizers.optimizer_v2 import rmsprop <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> <ide> <ide> class ControlFlowLayer1(base_layer.Layer): <ide><path>keras/engine/correctness_test.py <ide> # ============================================================================== <ide> """Tests for numerical correctness.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <ide> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide><path>keras/engine/data_adapter.py <ide> # ============================================================================== <ide> """Adapter module that convert different input data objects into tf.dataset.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import abc <ide> import contextlib <ide> import functools <ide> import random <ide> <ide> import numpy as np <del>from tensorflow.python.eager import context <del>from keras import backend <del>from keras.engine import training_utils <del>from keras.utils import data_utils <del>from keras.utils import dataset_creator <del>from keras.utils import tf_utils <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.python.distribute.input_lib import ( <ide> DistributedDataset, <ide> ) <add>from tensorflow.python.eager import context <ide> from tensorflow.python.framework import type_spec <ide> from tensorflow.python.platform import tf_logging as logging <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras import backend <add>from keras.engine import training_utils <add>from keras.utils import data_utils <add>from keras.utils import dataset_creator <add>from keras.utils import tf_utils <add> <ide> try: <ide> import pandas as pd # pylint: disable=g-import-not-at-top <ide> except ImportError: <ide><path>keras/engine/data_adapter_test.py <ide> # ============================================================================== <ide> """DataAdapter tests.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import math <ide> <del>from absl.testing import parameterized <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <add>from tensorflow.python.eager import context <ide> <ide> import keras <add>from keras.engine import data_adapter <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>from keras.engine import data_adapter <ide> from keras.utils import data_utils <del>from tensorflow.python.eager import context <ide> <ide> <ide> class DummyArrayLike: <ide><path>keras/engine/deferred_sequential_test.py <ide> # ============================================================================== <ide> """Tests specific to deferred-build `Sequential` models.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import os <ide> import unittest <add> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide><path>keras/engine/feature_columns_integration_test.py <ide> # ============================================================================== <ide> """Tests specific to Feature Columns integration.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> import keras <del>from keras.testing_infra import test_combinations <ide> from keras import metrics as metrics_module <del>from keras.testing_infra import test_utils <ide> from keras.feature_column import dense_features as df <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> from keras.utils import np_utils <ide> <ide> <ide><path>keras/engine/functional.py <ide> import copy <ide> import itertools <ide> import warnings <add> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.tools.docs import doc_controls <add> <ide> from keras import backend <ide> from keras.dtensor import layout_map as layout_map_lib <ide> from keras.engine import base_layer <ide> from keras.utils import generic_utils <ide> from keras.utils import tf_inspect <ide> from keras.utils import tf_utils <del>import tensorflow.compat.v2 as tf <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.tools.docs import doc_controls <ide> <ide> <ide> # pylint: disable=g-classes-have-attributes <ide> def process_layer(layer_data): <ide> layer = created_layers[layer_name] <ide> else: <ide> # Instantiate layer. <del> from keras.layers import ( <del> deserialize as deserialize_layer, <del> ) # pylint: disable=g-import-not-at-top <add> from keras.layers import deserialize as deserialize_layer <ide> <ide> layer = deserialize_layer(layer_data, custom_objects=custom_objects) <ide> created_layers[layer_name] = layer <ide><path>keras/engine/functional_test.py <ide> <ide> import warnings <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.framework import extension_type <add>from tensorflow.python.training.tracking.util import ( <add> Checkpoint, <add>) <add> <ide> from keras import backend <ide> from keras import layers <ide> from keras import losses <ide> from keras.utils import layer_utils <ide> from keras.utils import tf_utils <ide> <del>import numpy as np <del>import tensorflow.compat.v2 as tf <del> <del> <del>from tensorflow.python.framework import extension_type <del>from tensorflow.python.training.tracking.util import ( <del> Checkpoint, <del>) <del> <ide> <ide> class NetworkConstructionTest(test_combinations.TestCase): <ide> def test_default_model_name(self): <ide><path>keras/engine/functional_utils.py <ide> # ============================================================================== <ide> """Utilities for keras functional model.""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import backend <ide> from keras.engine import input_layer as input_layer_module <ide> from keras.engine import keras_tensor <ide> from keras.engine import node as node_module <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> _KERAS_TENSOR_TYPE_CHECK_ERROR_MSG = ( <ide> "Found unexpected instance while processing input tensors for keras " <ide> "functional model. Expecting KerasTensor which is from tf.keras.Input() " <ide><path>keras/engine/functional_utils_test.py <ide> import collections <ide> import os <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import layers <ide> from keras import models <ide> from keras.engine import functional_utils <ide> from keras.engine import input_layer as input_layer_lib <ide> from keras.testing_infra import test_combinations <ide> <del>import numpy as np <del>import tensorflow.compat.v2 as tf <del> <ide> <ide> @test_combinations.run_all_keras_modes(always_skip_v1=True) <ide> class FunctionalModelSlideTest(test_combinations.TestCase): <ide><path>keras/engine/input_layer.py <ide> """Input layer code (`Input` and `InputLayer`).""" <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.distribute import distributed_training_utils <ide> from keras.engine import base_layer <ide> from keras.saving.saved_model import layer_serialization <ide> from keras.utils import tf_utils <ide> from keras.utils import traceback_utils <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> def _assert_other_arg_none(arg_name, arg): <ide><path>keras/engine/input_layer_test.py <ide> <ide> import tensorflow.compat.v2 as tf <ide> from tensorflow.python.framework import type_spec <add> <ide> from keras import backend <del>from keras.testing_infra import test_combinations <ide> from keras.engine import functional <ide> from keras.engine import input_layer as input_layer_lib <ide> from keras.layers import core <ide> from keras.saving import model_config <add>from keras.testing_infra import test_combinations <ide> <ide> <ide> class TwoTensors(tf.__internal__.CompositeTensor): <ide><path>keras/engine/input_spec.py <ide> """Contains the InputSpec class.""" <ide> <ide> import tensorflow.compat.v2 as tf <del>from keras import backend <ide> from tensorflow.python.util.tf_export import keras_export <ide> from tensorflow.python.util.tf_export import tf_export <ide> <add>from keras import backend <add> <ide> <ide> @keras_export( <ide> "keras.layers.InputSpec", <ide><path>keras/engine/keras_tensor.py <ide> # ============================================================================== <ide> """Keras Input Tensor used to track functional API Topology.""" <ide> <del>from keras.utils import object_identity <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.data.util import structure <ide> <add>from keras.utils import object_identity <add> <ide> # pylint: disable=g-classes-have-attributes <ide> <ide> <ide><path>keras/engine/keras_tensor_test.py <ide> # pylint: disable=g-bad-import-order <ide> <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>from keras.testing_infra import test_combinations <add> <ide> from keras import layers <del>from keras.testing_infra import test_utils <ide> from keras.engine import keras_tensor <ide> from keras.engine import training <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> <ide> <ide> class CustomTypeSpec(tf.TypeSpec): <ide><path>keras/engine/node.py <ide> # pylint: disable=g-classes-have-attributes <ide> """Contains the `Node` class.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import collections <ide> import copy <ide> import json <add> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import backend <ide> from keras.engine import base_layer_utils <ide> from keras.saving.saved_model import json_utils <ide><path>keras/engine/node_test.py <ide> # ,============================================================================ <ide> """Tests for layer graphs construction & handling.""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.engine import base_layer <ide> from keras.engine import node as node_module <ide> from keras.testing_infra import test_combinations <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class DummyTensor(tf.__internal__.types.Tensor): <ide><path>keras/engine/partial_batch_padding_handler.py <ide> # ============================================================================== <ide> """Utility object to handler partial batches for TPUStrategy.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <ide> <del># pylint: disable=protected-access <del> <del>import numpy as np <ide> from keras import backend <ide> <add># pylint: disable=protected-access <add> <ide> <ide> class PartialBatchPaddingHandler: <ide> """A container that holds info about partial batches for `predict()`.""" <ide><path>keras/engine/ragged_keras_tensor_test.py <ide> # ============================================================================== <ide> """RaggedKerasTensor tests.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <del>from keras.testing_infra import test_combinations <add> <ide> from keras import layers <del>from keras.testing_infra import test_utils <ide> from keras.engine import training <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> <ide> <ide> @test_utils.run_v2_only <ide><path>keras/engine/sequential.py <ide> # pylint: disable=protected-access <ide> """Home of the `Sequential` model.""" <ide> <add>import copy <add> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util.tf_export import keras_export <ide> <del>import copy <ide> from keras import layers as layer_module <ide> from keras.engine import base_layer <ide> from keras.engine import functional <ide> from keras.utils import tf_inspect <ide> from keras.utils import tf_utils <ide> from keras.utils import traceback_utils <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> SINGLE_LAYER_OUTPUT_ERROR_MSG = ( <ide> "All layers in a Sequential model should have " <ide><path>keras/engine/sequential_test.py <ide> # ============================================================================== <ide> """Tests specific to `Sequential` model.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <del> <del>import keras <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_utils, <ide> ) <add> <add>import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <ide> <ide><path>keras/engine/training.py <ide> import warnings <ide> import weakref <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.eager import context <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util.tf_export import keras_export <add>from tensorflow.tools.docs import doc_controls <add> <ide> from keras import backend <ide> from keras import callbacks as callbacks_module <ide> from keras import optimizers <ide> from keras.utils import version_utils <ide> from keras.utils.mode_keys import ModeKeys <ide> <del>import numpy as np <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.eager import context <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util.tf_export import keras_export <del>from tensorflow.tools.docs import doc_controls <del> <del># pylint: disable=g-import-not-at-top <ide> try: <ide> import h5py <ide> except ImportError: <ide> h5py = None <del># pylint: enable=g-import-not-at-top <ide> <ide> <ide> @keras_export("keras.Model", "keras.models.Model") <ide> def __new__(cls, *args, **kwargs): <ide> # Signature detection <ide> if is_functional_model_init_params(args, kwargs) and cls == Model: <ide> # Functional model <del> from keras.engine import ( <del> functional, <del> ) # pylint: disable=g-import-not-at-top <add> from keras.engine import functional <ide> <ide> return functional.Functional(skip_init=True, *args, **kwargs) <ide> else: <ide> def __init__(self, *args, **kwargs): <ide> # Special case for Subclassed Functional Model, which we couldn't detect <ide> # when __new__ is called. We only realize it is a functional model when <ide> # it calls super.__init__ with input and output tensor. <del> from keras.engine import ( <del> functional, <del> ) # pylint: disable=g-import-not-at-top <add> from keras.engine import functional <ide> <ide> if is_functional_model_init_params(args, kwargs) and not isinstance( <ide> self, functional.Functional <ide> def _convert_to_graph_inputs(x): <ide> _convert_to_graph_inputs, copied_kwargs <ide> ) <ide> <del> # pylint: disable=g-import-not-at-top <ide> with layout_map_lib.layout_map_scope(self._layout_map): <ide> # We ignore the result here. <ide> super().__call__(inputs, *copied_args, **copied_kwargs) <ide> def _updated_config(self): <ide> Returns: <ide> Model config with Keras version information added. <ide> """ <del> from keras import ( <del> __version__ as keras_version, <del> ) # pylint: disable=g-import-not-at-top <add> from keras import __version__ as keras_version <ide> <ide> config = self.get_config() <ide> model_config = { <ide> def from_config(cls, config, custom_objects=None): <ide> # `Functional`. In the case that `cls` is meant to behave like a child <ide> # class of `Functional` but only inherits from the `Model` class, we <ide> # have to call `cls(...)` instead of `Functional.from_config`. <del> from keras.engine import ( <del> functional, <del> ) # pylint: disable=g-import-not-at-top <add> from keras.engine import functional <ide> <ide> with generic_utils.SharedObjectLoadingScope(): <ide> functional_model_keys = [ <ide> def _method_wrapper(self, *args, **kwargs): <ide> <ide> def inject_functional_model_class(cls): <ide> """Inject `Functional` into the hierarchy of this class if needed.""" <del> from keras.engine import functional # pylint: disable=g-import-not-at-top <del> from keras.engine import training_v1 # pylint: disable=g-import-not-at-top <add> from keras.engine import functional <add> from keras.engine import training_v1 <ide> <ide> if cls == Model or cls == training_v1.Model: <ide> return functional.Functional <ide><path>keras/engine/training_arrays_test.py <ide> # ============================================================================== <ide> """Tests for model.fit calls with a Dataset object passed as validation_data.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import io <ide> import sys <ide> from unittest import mock <ide> <del>from absl.testing import parameterized <ide> import numpy as np <del> <del>import keras <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_utils, <ide> ) <add> <add>import keras <ide> from keras.engine import data_adapter <add>from keras.layers import core <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>from keras.layers import core <ide> from keras.utils import io_utils <ide> <ide> <ide><path>keras/engine/training_arrays_v1.py <ide> # ============================================================================== <ide> """Part of the Keras training engine related to plain array data.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <del># pylint: disable=protected-access <del> <ide> import functools <ide> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add> <ide> from keras import backend <ide> from keras import callbacks as cbks <ide> from keras.distribute import distributed_training_utils_v1 <ide> from keras.engine import training_utils_v1 <add>from keras.utils import io_utils <ide> from keras.utils.generic_utils import make_batches <ide> from keras.utils.generic_utils import slice_arrays <del>from keras.utils import io_utils <ide> from keras.utils.mode_keys import ModeKeys <del>from tensorflow.python.platform import tf_logging as logging <add> <add># pylint: disable=protected-access <add> <ide> <ide> try: <ide> from scipy.sparse import issparse # pylint: disable=g-import-not-at-top <ide><path>keras/engine/training_dataset_test.py <ide> # ============================================================================== <ide> """Tests for training routines.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import io <ide> import sys <ide> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <ide> <ide> import keras <ide> from keras import callbacks <del>from keras.testing_infra import test_combinations <ide> from keras import metrics as metrics_module <add>from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <ide> from keras.utils import io_utils <del>from tensorflow.python.platform import tf_logging as logging <ide> <ide> <ide> class BatchCounterCallback(callbacks.Callback): <ide><path>keras/engine/training_distributed_v1.py <ide> # ============================================================================== <ide> """Part of the Keras training engine related to distributed training.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <del># pylint: disable=protected-access <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.python.distribute import input_lib <add>from tensorflow.python.platform import tf_logging as logging <add> <ide> from keras import backend <ide> from keras import callbacks as cbks <ide> from keras.distribute import distribute_coordinator_utils as dc <ide> from keras.engine import training_utils_v1 <ide> from keras.utils.generic_utils import Progbar <ide> from keras.utils.mode_keys import ModeKeys <del>from tensorflow.python.platform import tf_logging as logging <add> <add># pylint: disable=protected-access <ide> <ide> <ide> def _per_replica_execution_function(model, mode): <ide><path>keras/engine/training_eager_test.py <ide> # ============================================================================== <ide> """Tests for training routines.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <ide> <ide> import keras <del>from keras.testing_infra import test_combinations <ide> from keras import metrics as metrics_module <del>from keras.testing_infra import test_utils <ide> from keras.optimizers.optimizer_v2 import rmsprop <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> <ide> <ide> class TrainingTest(test_combinations.TestCase): <ide><path>keras/engine/training_eager_v1.py <ide> # ============================================================================== <ide> """Keras training and evaluation routines for eager execution.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <del># pylint: disable=protected-access <del> <ide> import numpy as np <del> <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.python.eager.backprop import GradientTape <add>from tensorflow.python.platform import tf_logging as logging <add> <ide> from keras import backend <ide> from keras.engine import training_utils <ide> from keras.engine import training_utils_v1 <ide> from keras.mixed_precision import loss_scale_optimizer <ide> from keras.utils import losses_utils <del>from tensorflow.python.platform import tf_logging as logging <add> <add># pylint: disable=protected-access <ide> <ide> <ide> def _eager_loss_fn(outputs, targets, loss_fn, output_name): <ide><path>keras/engine/training_generator_test.py <ide> # ============================================================================== <ide> """Tests for training routines.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import itertools <ide> <del>from absl.testing import parameterized <ide> import numpy as np <del>from keras.testing_infra import test_combinations <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <add> <ide> from keras import layers as layers_module <ide> from keras import losses <ide> from keras import metrics as metrics_module <del>from keras.testing_infra import test_utils <ide> from keras.engine import input_layer <ide> from keras.engine import training <ide> from keras.engine import training_generator_v1 <ide> from keras.optimizers.optimizer_v2 import rmsprop <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> from keras.utils import data_utils <ide> <ide> <ide><path>keras/engine/training_generator_v1.py <ide> """Part of the Keras training engine related to Python generators of array data. <ide> """ <ide> <del>import tensorflow.compat.v2 as tf <del> <del># pylint: disable=protected-access <del> <ide> import functools <ide> import math <ide> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add> <ide> from keras import backend <ide> from keras import callbacks as cbks <ide> from keras.engine import training_utils <ide> from keras.engine import training_utils_v1 <ide> from keras.utils import data_utils <ide> from keras.utils import generic_utils <ide> from keras.utils.mode_keys import ModeKeys <del>from tensorflow.python.platform import tf_logging as logging <add> <add># pylint: disable=protected-access <ide> <ide> <ide> def model_iteration( <ide><path>keras/engine/training_gpu_test.py <ide> # ============================================================================== <ide> """Tests for training routines.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <add> <ide> from keras import backend <del>from keras.testing_infra import test_combinations <del>from keras.testing_infra import test_utils <ide> from keras.engine import input_layer <ide> from keras.engine import training <ide> from keras.layers.convolutional import Conv2D <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> <ide> <ide> class TrainingGPUTest(tf.test.TestCase, parameterized.TestCase): <ide><path>keras/engine/training_integration_test.py <ide> # ============================================================================== <ide> """End-to-end tests for a variety of small models.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import collections <ide> import itertools <ide> <del>from absl.testing import parameterized <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide><path>keras/engine/training_test.py <ide> import sys <ide> import tempfile <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add>from tensorflow.python.framework import ( <add> test_util as tf_test_utils, <add>) <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.training.rmsprop import ( <add> RMSPropOptimizer, <add>) <add> <ide> import keras <ide> from keras import backend <ide> from keras import layers as layers_module <ide> from keras.utils import data_utils <ide> from keras.utils import io_utils <ide> from keras.utils import np_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.framework import ( <del> test_util as tf_test_utils, <del>) <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.training.rmsprop import ( <del> RMSPropOptimizer, <del>) <ide> <ide> try: <ide> import scipy.sparse as scipy_sparse # pylint: disable=g-import-not-at-top <ide><path>keras/engine/training_utils.py <ide> # ============================================================================== <ide> """Training-related utilities.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <ide> <del>import numpy as np <ide> from keras.utils import generic_utils <ide> <ide> <ide><path>keras/engine/training_utils_v1.py <ide> # ============================================================================== <ide> """Training-related utilities.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import abc <ide> import atexit <ide> import collections <ide> import time <ide> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add> <ide> from keras import backend <ide> from keras import callbacks as cbks <ide> from keras import losses <ide> from keras.utils import generic_utils <ide> from keras.utils import losses_utils <ide> from keras.utils import tf_inspect <del>from tensorflow.python.platform import tf_logging as logging <ide> <ide> <ide> def is_composite_or_composite_value(tensor): <ide><path>keras/engine/training_utils_v1_test.py <ide> # ============================================================================== <ide> """Tests for training utility functions.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import functools <ide> import multiprocessing.pool <ide> import time <ide> <del>from absl.testing import parameterized <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <add>from tensorflow.python.platform import tf_logging as logging <add> <ide> from keras import backend <del>from keras.testing_infra import test_combinations <del>from keras.testing_infra import test_utils <ide> from keras.engine import keras_tensor <ide> from keras.engine import training_utils_v1 <del>from tensorflow.python.platform import tf_logging as logging <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> <ide> <ide> class ModelInputsTest(tf.test.TestCase): <ide><path>keras/engine/training_v1.py <ide> # limitations under the License. <ide> # ============================================================================== <ide> """V1 Training-related part of the Keras engine.""" <del># pylint: disable=g-classes-have-attributes <del>import tensorflow.compat.v2 as tf <del> <ide> import collections <ide> import warnings <ide> <ide> import numpy as np <add> <add># pylint: disable=g-classes-have-attributes <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add> <ide> from keras import backend <ide> from keras import losses <ide> from keras import metrics as metrics_module <del>from keras.optimizers import optimizer_v1 <ide> from keras import optimizers <ide> from keras.distribute import distributed_training_utils <ide> from keras.distribute import distributed_training_utils_v1 <ide> from keras.engine import training_utils <ide> from keras.engine import training_utils_v1 <ide> from keras.mixed_precision import loss_scale_optimizer <add>from keras.optimizers import optimizer_v1 <ide> from keras.optimizers.optimizer_v2 import optimizer_v2 <ide> from keras.saving import saving_utils <ide> from keras.saving.saved_model import model_serialization <ide> from keras.utils import tf_inspect <ide> from keras.utils import tf_utils <ide> from keras.utils.mode_keys import ModeKeys <del>from tensorflow.python.platform import tf_logging as logging <ide> <ide> try: <ide> from scipy.sparse import issparse # pylint: disable=g-import-not-at-top <ide><path>keras/estimator/__init__.py <ide> """Keras estimator API.""" <ide> <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <ide> # Keras has undeclared dependency on tensorflow/estimator:estimator_py. <ide> def input_fn(): <ide> <ide> try: <ide> from tensorflow_estimator.python.estimator import ( <del> keras_lib, <del> ) # pylint: disable=g-import-not-at-top <add> keras_lib, # pylint: disable=g-import-not-at-top <add> ) <ide> except ImportError: <ide> raise NotImplementedError( <ide> "tf.keras.estimator.model_to_estimator function not available in your " <ide> def input_fn(): <ide> <ide> try: <ide> from tensorflow_estimator.python.estimator import ( <del> keras_lib, <del> ) # pylint: disable=g-import-not-at-top <add> keras_lib, # pylint: disable=g-import-not-at-top <add> ) <ide> except ImportError: <ide> raise NotImplementedError( <ide> "tf.keras.estimator.model_to_estimator function not available in your " <ide><path>keras/feature_column/base_feature_layer.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import collections <ide> import re <add> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.engine.base_layer import Layer <ide> from keras.utils import generic_utils <ide> <ide><path>keras/feature_column/dense_features.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <add>import json <add> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <del>import json <ide> from keras import backend <ide> from keras.feature_column import base_feature_layer as kfc <ide> from keras.saving.saved_model import json_utils <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export(v1=["keras.layers.DenseFeatures"]) <ide><path>keras/feature_column/dense_features_test.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <ide> from tensorflow.python.eager import backprop <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_utils, <ide> ) <del>from keras.testing_infra import test_combinations <add> <ide> from keras.feature_column import dense_features as df <add>from keras.testing_infra import test_combinations <ide> <ide> <ide> def _initialized_session(config=None): <ide><path>keras/feature_column/dense_features_v2.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras.feature_column import base_feature_layer as kfc <ide> from keras.feature_column import dense_features <ide> from keras.utils import tf_contextlib <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.layers.DenseFeatures", v1=[]) <ide><path>keras/feature_column/dense_features_v2_test.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.python.eager import backprop <del>from keras.testing_infra import test_combinations <add> <ide> from keras.feature_column import dense_features_v2 as df <add>from keras.testing_infra import test_combinations <ide> <ide> <ide> def _initialized_session(config=None): <ide><path>keras/feature_column/sequence_feature_column.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.feature_column import base_feature_layer as kfc <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> # pylint: disable=protected-access <ide> <ide><path>keras/feature_column/sequence_feature_column_integration_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <del> <del> <ide> from google.protobuf import text_format <del> <ide> from tensorflow.core.example import example_pb2 <ide> from tensorflow.core.example import feature_pb2 <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_utils, <ide> ) <add> <ide> from keras import backend <ide> from keras.feature_column import dense_features <ide> from keras.feature_column import sequence_feature_column as ksfc <ide><path>keras/feature_column/sequence_feature_column_test.py <ide> from __future__ import division <ide> from __future__ import print_function <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <del> <ide> from absl.testing import parameterized <del>import numpy as np <ide> <ide> import keras <del>from keras.testing_infra import test_combinations <ide> from keras.feature_column import sequence_feature_column as ksfc <ide> from keras.saving import model_config <add>from keras.testing_infra import test_combinations <ide> <ide> <ide> def _initialized_session(config=None): <ide><path>keras/initializers/__init__.py <ide> # ============================================================================== <ide> """Keras initializer serialization / deserialization.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import threading <ide> <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.python import tf2 <add>from tensorflow.python.ops import init_ops <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras.initializers import initializers_v1 <ide> from keras.initializers import initializers_v2 <ide> from keras.utils import generic_utils <ide> from keras.utils import tf_inspect as inspect <del>from tensorflow.python.ops import init_ops <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> # LOCAL.ALL_OBJECTS is meant to be a global mutable. Hence we need to make it <ide> # thread-local to avoid concurrent mutations. <ide><path>keras/initializers/initializers_test.py <ide> # ============================================================================== <ide> """Tests for Keras initializers.""" <ide> <del>from absl.testing import parameterized <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> <ide> from keras import backend <del>from keras.testing_infra import test_combinations <ide> from keras import initializers <ide> from keras import models <del>from keras.testing_infra import test_utils <ide> from keras.engine import input_layer <ide> from keras.layers import core <del> <del>import tensorflow.compat.v2 as tf <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> <ide> <ide> def _compute_fans(shape): <ide><path>keras/initializers/initializers_v1.py <ide> import tensorflow.compat.v2 as tf <ide> from tensorflow.python.util.tf_export import keras_export <ide> <del> <ide> _v1_zeros_initializer = tf.compat.v1.zeros_initializer <ide> _v1_ones_initializer = tf.compat.v1.ones_initializer <ide> _v1_constant_initializer = tf.compat.v1.constant_initializer <ide><path>keras/initializers/initializers_v2.py <ide> <ide> import math <ide> <del>from keras import backend <del>from keras.dtensor import utils <del> <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras import backend <add>from keras.dtensor import utils <add> <ide> _PARTITION_SHAPE = "partition_shape" <ide> _PARTITION_OFFSET = "partition_offset" <ide> _LAYOUT = "layout" <ide><path>keras/integration_test/central_storage_strategy_test.py <ide> # ============================================================================== <ide> """Tests for KPL + CentralStorageStrategy.""" <ide> <del>from absl.testing import parameterized <ide> import tensorflow.compat.v2 as tf <del> <add>from absl.testing import parameterized <ide> from tensorflow.python.distribute import ( <ide> combinations as ds_combinations, <ide> ) <ide><path>keras/integration_test/custom_object_saving_test.py <ide> from __future__ import absolute_import <ide> from __future__ import division <ide> from __future__ import print_function <add> <ide> import os <ide> import sys <add> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> from keras.saving.experimental import saving_lib <ide> from keras.testing_infra import test_utils <ide> from keras.utils import generic_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> # `tf.print` message is only available in stderr in TF2, which this test checks. <ide><path>keras/integration_test/forwardprop_test.py <ide> <ide> import functools <ide> <del>from absl.testing import parameterized <ide> import numpy as np <ide> import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> <ide> <ide> def _jvp(f, primals, tangents): <ide><path>keras/integration_test/gradient_checkpoint_test.py <ide> import gc <ide> <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_utils, <ide> ) <ide><path>keras/integration_test/multi_worker_tutorial_test.py <ide> import unittest <ide> import uuid <ide> import zipfile <del>from absl import logging <del>from absl.testing import parameterized <add> <ide> import numpy as np <ide> import tensorflow.compat.v2 as tf <add>from absl import logging <add>from absl.testing import parameterized <ide> <ide> PER_WORKER_BATCH_SIZE = 64 <ide> NUM_WORKERS = 2 <ide><path>keras/integration_test/mwms_multi_process_runner_test.py <ide> from __future__ import print_function <ide> <ide> import os <del>from absl import logging <add> <ide> import tensorflow.compat.v2 as tf <add>from absl import logging <ide> <ide> NUM_WORKERS = 2 <ide> NUM_EPOCHS = 2 <ide><path>keras/integration_test/parameter_server_custom_training_loop_test.py <ide> from __future__ import absolute_import <ide> from __future__ import division <ide> from __future__ import print_function <add> <ide> import multiprocessing <del>from absl import logging <add> <ide> import portpicker <ide> import tensorflow.compat.v2 as tf <add>from absl import logging <ide> <ide> NUM_EPOCHS = 10 <ide> NUM_STEPS = 100 <ide><path>keras/integration_test/parameter_server_keras_preprocessing_test.py <ide> import os <ide> import random <ide> import tempfile <del>from absl.testing import parameterized <del>from keras.testing_infra import test_utils <add> <ide> import numpy as np <ide> import portpicker <ide> import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> <add>from keras.testing_infra import test_utils <ide> <ide> # These vocabularies usually come from TFT or a Beam pipeline. <ide> FEATURE_VOCAB = [ <ide><path>keras/integration_test/preprocessing_applied_in_dataset_creator_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.integration_test import preprocessing_test_utils as utils <ide> <ide> ds_combinations = tf.__internal__.distribute.combinations <ide><path>keras/integration_test/preprocessing_applied_in_dataset_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.integration_test import preprocessing_test_utils as utils <ide> <ide> ds_combinations = tf.__internal__.distribute.combinations <ide><path>keras/integration_test/preprocessing_applied_in_model_test.py <ide> from __future__ import print_function <ide> <ide> import tensorflow.compat.v2 as tf <add> <ide> from keras.integration_test import preprocessing_test_utils as utils <ide> <ide> ds_combinations = tf.__internal__.distribute.combinations <ide><path>keras/integration_test/saved_model_test.py <ide> import os <ide> import tempfile <ide> <del>from absl.testing import parameterized <del> <ide> import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> <ide> <ide> def cycle(obj, cycles, signatures=None): <ide><path>keras/integration_test/tf_trt_test.py <ide> import os <ide> import tempfile <ide> <del>from absl import flags <del> <ide> import tensorflow.compat.v2 as tf <ide> import tensorflow_text as tf_text <add>from absl import flags <ide> <ide> <ide> class ConvertResource(tf.test.TestCase): <ide><path>keras/integration_test/tpu_strategy_test.py <ide> import random <ide> import tempfile <ide> <del>from absl import flags <del> <ide> import tensorflow.compat.v2 as tf <add>from absl import flags <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_utils, <ide> ) <ide><path>keras/layers/__init__.py <ide> <ide> from tensorflow.python import tf2 <ide> <add>from keras.engine.base_layer import Layer <add>from keras.engine.base_preprocessing_layer import PreprocessingLayer <add> <ide> # Generic layers. <ide> from keras.engine.input_layer import Input <ide> from keras.engine.input_layer import InputLayer <ide> from keras.engine.input_spec import InputSpec <del>from keras.engine.base_layer import Layer <del>from keras.engine.base_preprocessing_layer import PreprocessingLayer <del> <del># Image preprocessing layers. <del>from keras.layers.preprocessing.image_preprocessing import CenterCrop <del>from keras.layers.preprocessing.image_preprocessing import RandomCrop <del>from keras.layers.preprocessing.image_preprocessing import RandomFlip <del>from keras.layers.preprocessing.image_preprocessing import RandomContrast <del>from keras.layers.preprocessing.image_preprocessing import RandomHeight <del>from keras.layers.preprocessing.image_preprocessing import RandomRotation <del>from keras.layers.preprocessing.image_preprocessing import RandomTranslation <del>from keras.layers.preprocessing.image_preprocessing import RandomWidth <del>from keras.layers.preprocessing.image_preprocessing import RandomZoom <del>from keras.layers.preprocessing.image_preprocessing import Resizing <del>from keras.layers.preprocessing.image_preprocessing import Rescaling <del> <del># Preprocessing layers. <del>from keras.layers.preprocessing.category_encoding import CategoryEncoding <del>from keras.layers.preprocessing.discretization import Discretization <del>from keras.layers.preprocessing.hashing import Hashing <del>from keras.layers.preprocessing.hashed_crossing import HashedCrossing <del>from keras.layers.preprocessing.integer_lookup import IntegerLookup <del>from keras.layers.preprocessing.normalization import Normalization <del>from keras.layers.preprocessing.string_lookup import StringLookup <del>from keras.layers.preprocessing.text_vectorization import TextVectorization <add>from keras.layers.activation.elu import ELU <add>from keras.layers.activation.leaky_relu import LeakyReLU <add>from keras.layers.activation.prelu import PReLU <ide> <ide> # Activations layers. <ide> from keras.layers.activation.relu import ReLU <ide> from keras.layers.activation.softmax import Softmax <del>from keras.layers.activation.leaky_relu import LeakyReLU <del>from keras.layers.activation.prelu import PReLU <del>from keras.layers.activation.elu import ELU <ide> from keras.layers.activation.thresholded_relu import ThresholdedReLU <add>from keras.layers.attention.additive_attention import AdditiveAttention <add>from keras.layers.attention.attention import Attention <ide> <ide> # Attention layers. <ide> from keras.layers.attention.multi_head_attention import MultiHeadAttention <del>from keras.layers.attention.attention import Attention <del>from keras.layers.attention.additive_attention import AdditiveAttention <ide> <add># Convolution layer aliases. <ide> # Convolution layers. <ide> from keras.layers.convolutional.conv1d import Conv1D <del>from keras.layers.convolutional.conv2d import Conv2D <del>from keras.layers.convolutional.conv3d import Conv3D <add>from keras.layers.convolutional.conv1d import Convolution1D <ide> from keras.layers.convolutional.conv1d_transpose import Conv1DTranspose <add>from keras.layers.convolutional.conv1d_transpose import Convolution1DTranspose <add>from keras.layers.convolutional.conv2d import Conv2D <add>from keras.layers.convolutional.conv2d import Convolution2D <ide> from keras.layers.convolutional.conv2d_transpose import Conv2DTranspose <add>from keras.layers.convolutional.conv2d_transpose import Convolution2DTranspose <add>from keras.layers.convolutional.conv3d import Conv3D <add>from keras.layers.convolutional.conv3d import Convolution3D <ide> from keras.layers.convolutional.conv3d_transpose import Conv3DTranspose <add>from keras.layers.convolutional.conv3d_transpose import Convolution3DTranspose <ide> from keras.layers.convolutional.depthwise_conv1d import DepthwiseConv1D <ide> from keras.layers.convolutional.depthwise_conv2d import DepthwiseConv2D <ide> from keras.layers.convolutional.separable_conv1d import SeparableConv1D <del>from keras.layers.convolutional.separable_conv2d import SeparableConv2D <del> <del># Convolution layer aliases. <del>from keras.layers.convolutional.conv1d import Convolution1D <del>from keras.layers.convolutional.conv2d import Convolution2D <del>from keras.layers.convolutional.conv3d import Convolution3D <del>from keras.layers.convolutional.conv1d_transpose import Convolution1DTranspose <del>from keras.layers.convolutional.conv2d_transpose import Convolution2DTranspose <del>from keras.layers.convolutional.conv3d_transpose import Convolution3DTranspose <ide> from keras.layers.convolutional.separable_conv1d import SeparableConvolution1D <add>from keras.layers.convolutional.separable_conv2d import SeparableConv2D <ide> from keras.layers.convolutional.separable_conv2d import SeparableConvolution2D <ide> <del># Regularization layers. <del>from keras.layers.regularization.dropout import Dropout <del>from keras.layers.regularization.spatial_dropout1d import SpatialDropout1D <del>from keras.layers.regularization.spatial_dropout2d import SpatialDropout2D <del>from keras.layers.regularization.spatial_dropout3d import SpatialDropout3D <del>from keras.layers.regularization.gaussian_dropout import GaussianDropout <del>from keras.layers.regularization.gaussian_noise import GaussianNoise <del>from keras.layers.regularization.activity_regularization import ( <del> ActivityRegularization, <del>) <del>from keras.layers.regularization.alpha_dropout import AlphaDropout <del> <del># Reshaping layers. <del>from keras.layers.reshaping.cropping1d import Cropping1D <del>from keras.layers.reshaping.cropping2d import Cropping2D <del>from keras.layers.reshaping.cropping3d import Cropping3D <del>from keras.layers.reshaping.flatten import Flatten <del>from keras.layers.reshaping.permute import Permute <del>from keras.layers.reshaping.repeat_vector import RepeatVector <del>from keras.layers.reshaping.reshape import Reshape <del>from keras.layers.reshaping.up_sampling1d import UpSampling1D <del>from keras.layers.reshaping.up_sampling2d import UpSampling2D <del>from keras.layers.reshaping.up_sampling3d import UpSampling3D <del>from keras.layers.reshaping.zero_padding1d import ZeroPadding1D <del>from keras.layers.reshaping.zero_padding2d import ZeroPadding2D <del>from keras.layers.reshaping.zero_padding3d import ZeroPadding3D <del> <ide> # Core layers. <ide> from keras.layers.core.activation import Activation <ide> from keras.layers.core.dense import Dense <ide> LocallyConnected2D, <ide> ) <ide> <add># Merging functions. <ide> # Merging layers. <ide> from keras.layers.merging.add import Add <del>from keras.layers.merging.subtract import Subtract <del>from keras.layers.merging.multiply import Multiply <add>from keras.layers.merging.add import add <ide> from keras.layers.merging.average import Average <del>from keras.layers.merging.maximum import Maximum <del>from keras.layers.merging.minimum import Minimum <add>from keras.layers.merging.average import average <ide> from keras.layers.merging.concatenate import Concatenate <add>from keras.layers.merging.concatenate import concatenate <ide> from keras.layers.merging.dot import Dot <del> <del># Merging functions. <del>from keras.layers.merging.add import add <del>from keras.layers.merging.subtract import subtract <del>from keras.layers.merging.multiply import multiply <del>from keras.layers.merging.average import average <add>from keras.layers.merging.dot import dot <add>from keras.layers.merging.maximum import Maximum <ide> from keras.layers.merging.maximum import maximum <add>from keras.layers.merging.minimum import Minimum <ide> from keras.layers.merging.minimum import minimum <del>from keras.layers.merging.concatenate import concatenate <del>from keras.layers.merging.dot import dot <del> <del># Normalization layers. <del>from keras.layers.normalization.layer_normalization import LayerNormalization <add>from keras.layers.merging.multiply import Multiply <add>from keras.layers.merging.multiply import multiply <add>from keras.layers.merging.subtract import Subtract <add>from keras.layers.merging.subtract import subtract <ide> from keras.layers.normalization.batch_normalization import ( <ide> SyncBatchNormalization, <ide> ) <add> <add># Normalization layers. <add>from keras.layers.normalization.layer_normalization import LayerNormalization <ide> from keras.layers.normalization.unit_normalization import UnitNormalization <ide> <add># Preprocessing layers. <add>from keras.layers.preprocessing.category_encoding import CategoryEncoding <add>from keras.layers.preprocessing.discretization import Discretization <add>from keras.layers.preprocessing.hashed_crossing import HashedCrossing <add>from keras.layers.preprocessing.hashing import Hashing <add> <add># Image preprocessing layers. <add>from keras.layers.preprocessing.image_preprocessing import CenterCrop <add>from keras.layers.preprocessing.image_preprocessing import RandomContrast <add>from keras.layers.preprocessing.image_preprocessing import RandomCrop <add>from keras.layers.preprocessing.image_preprocessing import RandomFlip <add>from keras.layers.preprocessing.image_preprocessing import RandomHeight <add>from keras.layers.preprocessing.image_preprocessing import RandomRotation <add>from keras.layers.preprocessing.image_preprocessing import RandomTranslation <add>from keras.layers.preprocessing.image_preprocessing import RandomWidth <add>from keras.layers.preprocessing.image_preprocessing import RandomZoom <add>from keras.layers.preprocessing.image_preprocessing import Rescaling <add>from keras.layers.preprocessing.image_preprocessing import Resizing <add>from keras.layers.preprocessing.integer_lookup import IntegerLookup <add>from keras.layers.preprocessing.normalization import Normalization <add>from keras.layers.preprocessing.string_lookup import StringLookup <add>from keras.layers.preprocessing.text_vectorization import TextVectorization <add>from keras.layers.regularization.activity_regularization import ( <add> ActivityRegularization, <add>) <add>from keras.layers.regularization.alpha_dropout import AlphaDropout <add> <add># Regularization layers. <add>from keras.layers.regularization.dropout import Dropout <add>from keras.layers.regularization.gaussian_dropout import GaussianDropout <add>from keras.layers.regularization.gaussian_noise import GaussianNoise <add>from keras.layers.regularization.spatial_dropout1d import SpatialDropout1D <add>from keras.layers.regularization.spatial_dropout2d import SpatialDropout2D <add>from keras.layers.regularization.spatial_dropout3d import SpatialDropout3D <add> <add># Reshaping layers. <add>from keras.layers.reshaping.cropping1d import Cropping1D <add>from keras.layers.reshaping.cropping2d import Cropping2D <add>from keras.layers.reshaping.cropping3d import Cropping3D <add>from keras.layers.reshaping.flatten import Flatten <add>from keras.layers.reshaping.permute import Permute <add>from keras.layers.reshaping.repeat_vector import RepeatVector <add>from keras.layers.reshaping.reshape import Reshape <add>from keras.layers.reshaping.up_sampling1d import UpSampling1D <add>from keras.layers.reshaping.up_sampling2d import UpSampling2D <add>from keras.layers.reshaping.up_sampling3d import UpSampling3D <add>from keras.layers.reshaping.zero_padding1d import ZeroPadding1D <add>from keras.layers.reshaping.zero_padding2d import ZeroPadding2D <add>from keras.layers.reshaping.zero_padding3d import ZeroPadding3D <add> <ide> if tf.__internal__.tf2.enabled(): <ide> from keras.layers.normalization.batch_normalization import ( <ide> BatchNormalization, <ide> <ide> BatchNormalizationV2 = BatchNormalization <ide> else: <del> from keras.layers.normalization.batch_normalization_v1 import ( <del> BatchNormalization, <del> ) <ide> from keras.layers.normalization.batch_normalization import ( <ide> BatchNormalization as BatchNormalizationV2, <ide> ) <add> from keras.layers.normalization.batch_normalization_v1 import ( <add> BatchNormalization, <add> ) <ide> <ide> BatchNormalizationV1 = BatchNormalization <ide> <ide> # Kernelized layers. <ide> from keras.layers.kernelized import RandomFourierFeatures <ide> <add># Pooling layer aliases. <ide> # Pooling layers. <ide> from keras.layers.pooling.average_pooling1d import AveragePooling1D <add>from keras.layers.pooling.average_pooling1d import AvgPool1D <ide> from keras.layers.pooling.average_pooling2d import AveragePooling2D <add>from keras.layers.pooling.average_pooling2d import AvgPool2D <ide> from keras.layers.pooling.average_pooling3d import AveragePooling3D <del>from keras.layers.pooling.max_pooling1d import MaxPooling1D <del>from keras.layers.pooling.max_pooling2d import MaxPooling2D <del>from keras.layers.pooling.max_pooling3d import MaxPooling3D <add>from keras.layers.pooling.average_pooling3d import AvgPool3D <ide> from keras.layers.pooling.global_average_pooling1d import GlobalAveragePooling1D <add>from keras.layers.pooling.global_average_pooling1d import GlobalAvgPool1D <ide> from keras.layers.pooling.global_average_pooling2d import GlobalAveragePooling2D <add>from keras.layers.pooling.global_average_pooling2d import GlobalAvgPool2D <ide> from keras.layers.pooling.global_average_pooling3d import GlobalAveragePooling3D <add>from keras.layers.pooling.global_average_pooling3d import GlobalAvgPool3D <add>from keras.layers.pooling.global_max_pooling1d import GlobalMaxPool1D <ide> from keras.layers.pooling.global_max_pooling1d import GlobalMaxPooling1D <add>from keras.layers.pooling.global_max_pooling2d import GlobalMaxPool2D <ide> from keras.layers.pooling.global_max_pooling2d import GlobalMaxPooling2D <add>from keras.layers.pooling.global_max_pooling3d import GlobalMaxPool3D <ide> from keras.layers.pooling.global_max_pooling3d import GlobalMaxPooling3D <del> <del># Pooling layer aliases. <del>from keras.layers.pooling.average_pooling1d import AvgPool1D <del>from keras.layers.pooling.average_pooling2d import AvgPool2D <del>from keras.layers.pooling.average_pooling3d import AvgPool3D <ide> from keras.layers.pooling.max_pooling1d import MaxPool1D <add>from keras.layers.pooling.max_pooling1d import MaxPooling1D <ide> from keras.layers.pooling.max_pooling2d import MaxPool2D <add>from keras.layers.pooling.max_pooling2d import MaxPooling2D <ide> from keras.layers.pooling.max_pooling3d import MaxPool3D <del>from keras.layers.pooling.global_average_pooling1d import GlobalAvgPool1D <del>from keras.layers.pooling.global_average_pooling2d import GlobalAvgPool2D <del>from keras.layers.pooling.global_average_pooling3d import GlobalAvgPool3D <del>from keras.layers.pooling.global_max_pooling1d import GlobalMaxPool1D <del>from keras.layers.pooling.global_max_pooling2d import GlobalMaxPool2D <del>from keras.layers.pooling.global_max_pooling3d import GlobalMaxPool3D <add>from keras.layers.pooling.max_pooling3d import MaxPooling3D <add>from keras.layers.rnn.abstract_rnn_cell import AbstractRNNCell <ide> <ide> # Recurrent layers. <ide> from keras.layers.rnn.base_rnn import RNN <del>from keras.layers.rnn.abstract_rnn_cell import AbstractRNNCell <del>from keras.layers.rnn.stacked_rnn_cells import StackedRNNCells <del>from keras.layers.rnn.simple_rnn import SimpleRNNCell <ide> from keras.layers.rnn.simple_rnn import SimpleRNN <add>from keras.layers.rnn.simple_rnn import SimpleRNNCell <add>from keras.layers.rnn.stacked_rnn_cells import StackedRNNCells <ide> <ide> if tf.__internal__.tf2.enabled(): <ide> from keras.layers.rnn.gru import GRU <ide> from keras.layers.rnn.gru import GRUCell <del> from keras.layers.rnn.lstm import LSTM <del> from keras.layers.rnn.lstm import LSTMCell <ide> from keras.layers.rnn.gru_v1 import GRU as GRUV1 <ide> from keras.layers.rnn.gru_v1 import GRUCell as GRUCellV1 <add> from keras.layers.rnn.lstm import LSTM <add> from keras.layers.rnn.lstm import LSTMCell <ide> from keras.layers.rnn.lstm_v1 import LSTM as LSTMV1 <ide> from keras.layers.rnn.lstm_v1 import LSTMCell as LSTMCellV1 <ide> <ide> LSTMV2 = LSTM <ide> LSTMCellV2 = LSTMCell <ide> else: <del> from keras.layers.rnn.gru_v1 import GRU <del> from keras.layers.rnn.gru_v1 import GRUCell <del> from keras.layers.rnn.lstm_v1 import LSTM <del> from keras.layers.rnn.lstm_v1 import LSTMCell <ide> from keras.layers.rnn.gru import GRU as GRUV2 <ide> from keras.layers.rnn.gru import GRUCell as GRUCellV2 <add> from keras.layers.rnn.gru_v1 import GRU <add> from keras.layers.rnn.gru_v1 import GRUCell <ide> from keras.layers.rnn.lstm import LSTM as LSTMV2 <ide> from keras.layers.rnn.lstm import LSTMCell as LSTMCellV2 <add> from keras.layers.rnn.lstm_v1 import LSTM <add> from keras.layers.rnn.lstm_v1 import LSTMCell <ide> <ide> GRUV1 = GRU <ide> GRUCellV1 = GRUCell <ide> LSTMV1 = LSTM <ide> LSTMCellV1 = LSTMCell <ide> <del># Convolutional-recurrent layers. <del>from keras.layers.rnn.conv_lstm1d import ConvLSTM1D <del>from keras.layers.rnn.conv_lstm2d import ConvLSTM2D <del>from keras.layers.rnn.conv_lstm3d import ConvLSTM3D <del> <del># cuDNN recurrent layers. <del>from keras.layers.rnn.cudnn_lstm import CuDNNLSTM <del>from keras.layers.rnn.cudnn_gru import CuDNNGRU <add># Serialization functions. <add>from keras.layers import serialization <ide> <ide> # Wrapper functions. <ide> from keras.layers.rnn.base_wrapper import Wrapper <ide> from keras.layers.rnn.bidirectional import Bidirectional <del>from keras.layers.rnn.time_distributed import TimeDistributed <ide> <ide> # RNN Cell wrappers. <ide> from keras.layers.rnn.cell_wrappers import DeviceWrapper <ide> from keras.layers.rnn.cell_wrappers import DropoutWrapper <ide> from keras.layers.rnn.cell_wrappers import ResidualWrapper <ide> <del># Serialization functions. <del>from keras.layers import serialization <add># Convolutional-recurrent layers. <add>from keras.layers.rnn.conv_lstm1d import ConvLSTM1D <add>from keras.layers.rnn.conv_lstm2d import ConvLSTM2D <add>from keras.layers.rnn.conv_lstm3d import ConvLSTM3D <add>from keras.layers.rnn.cudnn_gru import CuDNNGRU <add> <add># cuDNN recurrent layers. <add>from keras.layers.rnn.cudnn_lstm import CuDNNLSTM <add>from keras.layers.rnn.time_distributed import TimeDistributed <ide> from keras.layers.serialization import deserialize <ide> from keras.layers.serialization import deserialize_from_json <del>from keras.layers.serialization import serialize <ide> from keras.layers.serialization import get_builtin_layer <add>from keras.layers.serialization import serialize <ide> <ide> <ide> class VersionAwareLayers: <ide><path>keras/layers/activation/__init__.py <ide> """Layers that act as activation functions.""" <ide> # pylint: disable=g-bad-import-order <ide> <del>from keras.layers.activation.relu import ReLU <del>from keras.layers.activation.softmax import Softmax <add>from keras.layers.activation.elu import ELU <ide> from keras.layers.activation.leaky_relu import LeakyReLU <ide> from keras.layers.activation.prelu import PReLU <del>from keras.layers.activation.elu import ELU <add>from keras.layers.activation.relu import ReLU <add>from keras.layers.activation.softmax import Softmax <ide> from keras.layers.activation.thresholded_relu import ThresholdedReLU <ide><path>keras/layers/activation/elu.py <ide> """Exponential Linear Unit activation layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.engine.base_layer import Layer <ide> from keras.utils import tf_utils <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.ELU") <ide> class ELU(Layer): <ide><path>keras/layers/activation/elu_test.py <ide> # ============================================================================== <ide> """Tests for ELU layer.""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/activation/leaky_relu.py <ide> """Leaky version of a Rectified Linear Unit activation layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.engine.base_layer import Layer <ide> from keras.utils import tf_utils <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.LeakyReLU") <ide> class LeakyReLU(Layer): <ide><path>keras/layers/activation/leaky_relu_test.py <ide> # ============================================================================== <ide> """Tests for LeakyReLU layer.""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/activation/prelu.py <ide> """Parametric Rectified Linear Unit activation layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras import constraints <ide> from keras import initializers <ide> from keras.engine.input_spec import InputSpec <ide> from keras.utils import tf_utils <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.PReLU") <ide> class PReLU(Layer): <ide><path>keras/layers/activation/prelu_test.py <ide> # ============================================================================== <ide> """Tests for PReLU layer.""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/activation/relu.py <ide> """Rectified Linear Unit activation layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.engine.base_layer import Layer <ide> from keras.utils import tf_utils <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.ReLU") <ide> class ReLU(Layer): <ide><path>keras/layers/activation/relu_test.py <ide> # ============================================================================== <ide> """Tests for ReLU layer.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/activation/softmax.py <ide> """Softmax activation layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.engine.base_layer import Layer <ide> from keras.utils import tf_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> def _large_compatible_negative(tensor_type): <ide><path>keras/layers/activation/softmax_test.py <ide> # ============================================================================== <ide> """Tests for Softmax layer.""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/activation/thresholded_relu.py <ide> """Thresholded Rectified Linear Unit activation layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.engine.base_layer import Layer <ide> from keras.utils import tf_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.layers.ThresholdedReLU") <ide><path>keras/layers/activation/thresholded_relu_test.py <ide> # ============================================================================== <ide> """Tests for ThresholdedReLU layer.""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/attention/__init__.py <ide> """Keras attention layers.""" <ide> # pylint: disable=g-bad-import-order <ide> <del>from keras.layers.attention.multi_head_attention import MultiHeadAttention <del>from keras.layers.attention.attention import Attention <ide> from keras.layers.attention.additive_attention import AdditiveAttention <add>from keras.layers.attention.attention import Attention <add>from keras.layers.attention.multi_head_attention import MultiHeadAttention <ide><path>keras/layers/attention/additive_attention.py <ide> """ <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <del>from keras.layers.attention.base_dense_attention import BaseDenseAttention <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.attention.base_dense_attention import BaseDenseAttention <add> <ide> <ide> @keras_export("keras.layers.AdditiveAttention") <ide> class AdditiveAttention(BaseDenseAttention): <ide><path>keras/layers/attention/additive_attention_test.py <ide> # ============================================================================== <ide> """Tests AdditiveAttention layer.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras.mixed_precision import policy <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.generate(test_combinations.combine(mode=["graph", "eager"])) <ide><path>keras/layers/attention/attention.py <ide> """ <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <del>from keras.layers.attention.base_dense_attention import BaseDenseAttention <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.attention.base_dense_attention import BaseDenseAttention <add> <ide> <ide> @keras_export("keras.layers.Attention") <ide> class Attention(BaseDenseAttention): <ide><path>keras/layers/attention/attention_test.py <ide> # ============================================================================== <ide> """Tests Attention layer.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras.layers import core <ide> from keras.testing_infra import test_combinations <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.generate(test_combinations.combine(mode=["graph", "eager"])) <ide><path>keras/layers/attention/base_dense_attention.py <ide> """ <ide> # pylint: disable=g-classes-have-attributes <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import backend <ide> from keras.engine import base_layer <ide> from keras.utils import control_flow_util <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class BaseDenseAttention(base_layer.BaseRandomLayer): <ide><path>keras/layers/attention/base_dense_attention_test.py <ide> # ============================================================================== <ide> """Tests BaseDenseAttention layer.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <del>from keras.layers.attention.base_dense_attention import _lower_triangular_mask <add> <ide> from keras.layers.attention.base_dense_attention import BaseDenseAttention <add>from keras.layers.attention.base_dense_attention import _lower_triangular_mask <ide> from keras.testing_infra import test_combinations <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.generate(test_combinations.combine(mode=["graph", "eager"])) <ide><path>keras/layers/attention/multi_head_attention.py <ide> import math <ide> import string <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import constraints <ide> from keras import initializers <ide> from keras import regularizers <ide> from keras.layers import core <ide> from keras.layers import regularization <ide> from keras.utils import tf_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> _CHR_IDX = string.ascii_lowercase <ide> <ide><path>keras/layers/attention/multi_head_attention_test.py <ide> # ============================================================================== <ide> """Tests for the MultiHeadAttention layer.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> # This decorator runs the test in V1, V2-Eager, and V2-Functional mode. It <ide><path>keras/layers/convolutional/__init__.py <ide> """Keras convolution layers.""" <ide> # pylint: disable=g-bad-import-order <ide> <add># Convolution layer aliases. <ide> # Convolution layers. <ide> from keras.layers.convolutional.conv1d import Conv1D <del>from keras.layers.convolutional.conv2d import Conv2D <del>from keras.layers.convolutional.conv3d import Conv3D <add>from keras.layers.convolutional.conv1d import Convolution1D <ide> from keras.layers.convolutional.conv1d_transpose import Conv1DTranspose <add>from keras.layers.convolutional.conv1d_transpose import Convolution1DTranspose <add>from keras.layers.convolutional.conv2d import Conv2D <add>from keras.layers.convolutional.conv2d import Convolution2D <ide> from keras.layers.convolutional.conv2d_transpose import Conv2DTranspose <add>from keras.layers.convolutional.conv2d_transpose import Convolution2DTranspose <add>from keras.layers.convolutional.conv3d import Conv3D <add>from keras.layers.convolutional.conv3d import Convolution3D <ide> from keras.layers.convolutional.conv3d_transpose import Conv3DTranspose <add>from keras.layers.convolutional.conv3d_transpose import Convolution3DTranspose <ide> from keras.layers.convolutional.depthwise_conv1d import DepthwiseConv1D <ide> from keras.layers.convolutional.depthwise_conv2d import DepthwiseConv2D <ide> from keras.layers.convolutional.separable_conv1d import SeparableConv1D <del>from keras.layers.convolutional.separable_conv2d import SeparableConv2D <del> <del># Convolution layer aliases. <del>from keras.layers.convolutional.conv1d import Convolution1D <del>from keras.layers.convolutional.conv2d import Convolution2D <del>from keras.layers.convolutional.conv3d import Convolution3D <del>from keras.layers.convolutional.conv1d_transpose import Convolution1DTranspose <del>from keras.layers.convolutional.conv2d_transpose import Convolution2DTranspose <del>from keras.layers.convolutional.conv3d_transpose import Convolution3DTranspose <ide> from keras.layers.convolutional.separable_conv1d import SeparableConvolution1D <add>from keras.layers.convolutional.separable_conv2d import SeparableConv2D <ide> from keras.layers.convolutional.separable_conv2d import SeparableConvolution2D <ide> <ide> # Pooling layers imported for backwards namespace compatibility. <ide><path>keras/layers/convolutional/base_conv.py <ide> """Keras base class for convolution layers.""" <ide> # pylint: disable=g-classes-have-attributes <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import activations <ide> from keras import constraints <ide> from keras import initializers <ide> from keras import regularizers <ide> from keras.engine.base_layer import Layer <ide> from keras.engine.input_spec import InputSpec <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class Conv(Layer): <ide><path>keras/layers/convolutional/base_depthwise_conv.py <ide> """Keras abstract base for depthwise convolutions.""" <ide> # pylint: disable=g-classes-have-attributes <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import constraints <ide> from keras import initializers <ide> from keras import regularizers <ide> from keras.engine.input_spec import InputSpec <ide> from keras.layers.convolutional.base_conv import Conv <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class DepthwiseConv(Conv): <ide><path>keras/layers/convolutional/base_separable_conv.py <ide> """Keras abstract base layer for separable nD convolution.""" <ide> # pylint: disable=g-classes-have-attributes <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import activations <ide> from keras import constraints <ide> from keras import initializers <ide> from keras import regularizers <ide> from keras.engine.input_spec import InputSpec <ide> from keras.layers.convolutional.base_conv import Conv <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class SeparableConv(Conv): <ide><path>keras/layers/convolutional/conv1d.py <ide> """Keras 1D convolution layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import constraints <ide> from keras import initializers <ide> from keras import regularizers <ide> from keras.dtensor import utils <ide> from keras.layers.convolutional.base_conv import Conv <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.Conv1D", "keras.layers.Convolution1D") <ide> class Conv1D(Conv): <ide><path>keras/layers/convolutional/conv1d_transpose.py <ide> """Keras 1D transposed convolution layer (sometimes called deconvolution).""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import constraints <ide> from keras import initializers <ide> from keras.engine.input_spec import InputSpec <ide> from keras.layers.convolutional.conv1d import Conv1D <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export( <ide><path>keras/layers/convolutional/conv2d.py <ide> """Keras 2D convolution layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import constraints <ide> from keras import initializers <ide> from keras import regularizers <ide> from keras.dtensor import utils <ide> from keras.layers.convolutional.base_conv import Conv <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.Conv2D", "keras.layers.Convolution2D") <ide> class Conv2D(Conv): <ide><path>keras/layers/convolutional/conv2d_transpose.py <ide> """Keras 2D transposed convolution layer (sometimes called deconvolution).""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import backend <ide> from keras import constraints <ide> from keras.engine.input_spec import InputSpec <ide> from keras.layers.convolutional.conv2d import Conv2D <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export( <ide><path>keras/layers/convolutional/conv3d.py <ide> """Keras 3D convolution layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import constraints <ide> from keras import initializers <ide> from keras import regularizers <ide> from keras.dtensor import utils <ide> from keras.layers.convolutional.base_conv import Conv <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.Conv3D", "keras.layers.Convolution3D") <ide> class Conv3D(Conv): <ide><path>keras/layers/convolutional/conv3d_transpose.py <ide> """Keras 3D transposed convolution layer (sometimes called deconvolution).""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import constraints <ide> from keras import initializers <ide> from keras.engine.input_spec import InputSpec <ide> from keras.layers.convolutional.conv3d import Conv3D <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export( <ide><path>keras/layers/convolutional/conv_test.py <ide> """Tests for convolutional layers.""" <ide> <ide> <del>from absl.testing import parameterized <del>import keras <del>from keras.testing_infra import test_combinations <del>from keras.testing_infra import test_utils <ide> import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <add>from absl.testing import parameterized <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_utils, <ide> ) <ide> <add>import keras <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <add> <ide> <ide> @test_combinations.run_all_keras_modes <ide> class Conv1DTest(test_combinations.TestCase): <ide><path>keras/layers/convolutional/conv_transpose_test.py <ide> # ============================================================================== <ide> """Tests for convolutional transpose layers.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/convolutional/depthwise_conv1d.py <ide> """Keras depthwise 1D convolution.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras.layers.convolutional.base_depthwise_conv import DepthwiseConv <ide> from keras.utils import conv_utils <ide> from keras.utils import tf_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.layers.DepthwiseConv1D") <ide><path>keras/layers/convolutional/depthwise_conv2d.py <ide> """Keras depthwise 2D convolution.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.layers.convolutional.base_depthwise_conv import DepthwiseConv <ide> from keras.utils import conv_utils <ide> from keras.utils import tf_utils <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.DepthwiseConv2D") <ide> class DepthwiseConv2D(DepthwiseConv): <ide><path>keras/layers/convolutional/depthwise_conv_test.py <ide> # ============================================================================== <ide> """Tests for depthwise convolutional layers.""" <ide> <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/convolutional/separable_conv1d.py <ide> """Keras depthwise separable 1D convolution.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import constraints <ide> from keras import initializers <ide> from keras import regularizers <ide> from keras.layers.convolutional.base_separable_conv import SeparableConv <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export( <ide><path>keras/layers/convolutional/separable_conv2d.py <ide> """Keras depthwise separable 2D convolution.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import constraints <ide> from keras import initializers <ide> from keras import regularizers <ide> from keras.layers.convolutional.base_separable_conv import SeparableConv <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export( <ide><path>keras/layers/convolutional/separable_conv_test.py <ide> # ============================================================================== <ide> """Tests for separable convolutional layers.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/core/__init__.py <ide> from keras.layers.core.masking import Masking <ide> <ide> # Required by third_party/py/tensorflow_gnn/keras/keras_tensors.py <del>from keras.layers.core.tf_op_layer import _delegate_method <del>from keras.layers.core.tf_op_layer import _delegate_property <ide> from keras.layers.core.tf_op_layer import ClassMethod <ide> from keras.layers.core.tf_op_layer import InstanceMethod <ide> from keras.layers.core.tf_op_layer import InstanceProperty <del> <ide> from keras.layers.core.tf_op_layer import SlicingOpLambda <ide> from keras.layers.core.tf_op_layer import TFOpLambda <add>from keras.layers.core.tf_op_layer import _delegate_method <add>from keras.layers.core.tf_op_layer import _delegate_property <ide> <ide> # Regularization layers imported for backwards namespace compatibility <ide> from keras.layers.regularization.activity_regularization import ( <ide><path>keras/layers/core/activation.py <ide> """Contains the Activation layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras.engine.base_layer import Layer <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.layers.Activation") <ide><path>keras/layers/core/core_test.py <ide> import os <ide> import textwrap <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <add> <ide> import keras <ide> from keras import initializers <ide> from keras.layers import core <ide> from keras.mixed_precision import policy <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import numpy as np <del> <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/core/dense.py <ide> """Contains the Dense layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import backend <ide> from keras import constraints <ide> from keras.dtensor import utils <ide> from keras.engine.base_layer import Layer <ide> from keras.engine.input_spec import InputSpec <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.layers.Dense") <ide><path>keras/layers/core/einsum_dense.py <ide> <ide> import re <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import constraints <ide> from keras import initializers <ide> from keras import regularizers <ide> from keras.engine.base_layer import Layer <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export( <ide><path>keras/layers/core/einsum_dense_test.py <ide> """Tests for Keras-based einsum dense layer.""" <ide> <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras.layers.core import einsum_dense <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/core/embedding.py <ide> """Embedding layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras import constraints <ide> from keras import initializers <ide> from keras.engine import base_layer_utils <ide> from keras.engine.base_layer import Layer <ide> from keras.utils import tf_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.layers.Embedding") <ide><path>keras/layers/core/embedding_test.py <ide> # ============================================================================== <ide> """Tests for embedding layer.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <add> <ide> import keras <ide> from keras.mixed_precision import policy <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class EmbeddingTest(test_combinations.TestCase): <ide><path>keras/layers/core/lambda_layer.py <ide> import textwrap <ide> import types as python_types <ide> import warnings <del>from keras.engine.base_layer import Layer <del>from keras.utils import generic_utils <del>from keras.utils import tf_inspect <del>from keras.utils import tf_utils <add> <ide> import numpy as np <ide> import tensorflow.compat.v2 as tf <ide> from tensorflow.python.platform import tf_logging <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.engine.base_layer import Layer <add>from keras.utils import generic_utils <add>from keras.utils import tf_inspect <add>from keras.utils import tf_utils <add> <ide> <ide> @keras_export("keras.layers.Lambda") <ide> class Lambda(Layer): <ide><path>keras/layers/core/masking.py <ide> """Contains the Masking layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <del>from keras.engine.base_layer import Layer <ide> import tensorflow.compat.v2 as tf <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.engine.base_layer import Layer <add> <ide> <ide> @keras_export("keras.layers.Masking") <ide> class Masking(Layer): <ide><path>keras/layers/core/tf_op_layer.py <ide> # ============================================================================== <ide> """Contains the TFOpLambda layer.""" <ide> import tensorflow.compat.v2 as tf <del> <del># pylint: enable=g-bad-import-order <del> <del>from keras import backend <del>from keras.engine import keras_tensor <del>from keras.engine.base_layer import Layer <del> <ide> from tensorflow.python.platform import tf_logging <ide> from tensorflow.python.util.tf_export import ( <ide> get_canonical_name_for_symbol, <ide> get_symbol_from_name, <ide> ) <ide> <add>from keras import backend <add>from keras.engine import keras_tensor <add>from keras.engine.base_layer import Layer <add> <add># pylint: enable=g-bad-import-order <add> <ide> <ide> class ClassMethod(Layer): <ide> """Wraps a TF API Class's class method in a `Layer` object. <ide><path>keras/layers/kernelized.py <ide> # pylint: disable=g-classes-have-attributes <ide> """Keras layers that implement explicit (approximate) kernel feature maps.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <del>import numpy as np <ide> from keras import initializers <ide> from keras.engine import base_layer <ide> from keras.engine import input_spec <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> _SUPPORTED_RBF_KERNEL_TYPES = ["gaussian", "laplacian"] <ide> <ide><path>keras/layers/kernelized_test.py <ide> # ============================================================================== <ide> """Tests for kernelized.py.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import functools <ide> import math <ide> import os <ide> import shutil <ide> <del>from absl.testing import parameterized <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <add>from absl.testing import parameterized <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_utils, <ide> ) <add> <ide> from keras import backend as keras_backend <del>from keras.testing_infra import test_combinations <ide> from keras import initializers <del>from keras.testing_infra import test_utils <ide> from keras.engine import base_layer_utils <ide> from keras.engine import input_layer <ide> from keras.engine import training <ide> from keras.layers import kernelized as kernel_layers <ide> from keras.saving import save <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> from keras.utils import kernelized_utils <ide> <ide> <ide><path>keras/layers/layers_test.py <ide> # pylint: disable=g-classes-have-attributes <ide> """Tests for layers.__init__.""" <ide> <del>from keras import layers <ide> import tensorflow.compat.v2 as tf <ide> <add>from keras import layers <add> <ide> <ide> class LayersTest(tf.test.TestCase): <ide> def test_keras_private_symbol(self): <ide><path>keras/layers/locally_connected/locally_connected1d.py <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> """Locally-connected layer for 1D input.""" <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import backend <ide> from keras import constraints <ide> from keras.utils import conv_utils <ide> from keras.utils import tf_utils <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.LocallyConnected1D") <ide> class LocallyConnected1D(Layer): <ide><path>keras/layers/locally_connected/locally_connected2d.py <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> """Locally-connected layer for 2D input.""" <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import activations <ide> from keras import backend <ide> from keras import constraints <ide> from keras.utils import conv_utils <ide> from keras.utils import tf_utils <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.LocallyConnected2D") <ide> class LocallyConnected2D(Layer): <ide><path>keras/layers/locally_connected/locally_connected_test.py <ide> <ide> import os <ide> <del>from absl.testing import parameterized <del>import keras <del>from keras.layers.locally_connected import locally_connected_utils <del>from keras.optimizers.optimizer_v2 import rmsprop <del>from keras.testing_infra import test_combinations <del>from keras.testing_infra import test_utils <ide> import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <add>from absl.testing import parameterized <ide> from tensorflow.python.framework import ( <ide> test_util as tf_test_util, <ide> ) <ide> from tensorflow.python.training.rmsprop import ( <ide> RMSPropOptimizer, <ide> ) <ide> <add>import keras <add>from keras.layers.locally_connected import locally_connected_utils <add>from keras.optimizers.optimizer_v2 import rmsprop <add>from keras.testing_infra import test_combinations <add>from keras.testing_infra import test_utils <ide> <ide> _DATA_FORMAT_PADDING_IMPLEMENTATION = [ <ide> {"data_format": "channels_first", "padding": "valid", "implementation": 1}, <ide><path>keras/layers/locally_connected/locally_connected_utils.py <ide> # ============================================================================== <ide> """Private utilities for locally-connected layers.""" <ide> <del>from keras import backend <del>from keras.utils import conv_utils <ide> import numpy as np <ide> import tensorflow.compat.v2 as tf <ide> <add>from keras import backend <add>from keras.utils import conv_utils <add> <ide> <ide> def get_locallyconnected_mask( <ide> input_shape, kernel_shape, strides, padding, data_format <ide><path>keras/layers/merging/__init__.py <ide> """Keras merging layers.""" <ide> # pylint: disable=g-bad-import-order <ide> <add># Merging functions. <ide> # Merging layers. <ide> from keras.layers.merging.add import Add <del>from keras.layers.merging.subtract import Subtract <del>from keras.layers.merging.multiply import Multiply <add>from keras.layers.merging.add import add <ide> from keras.layers.merging.average import Average <del>from keras.layers.merging.maximum import Maximum <del>from keras.layers.merging.minimum import Minimum <add>from keras.layers.merging.average import average <ide> from keras.layers.merging.concatenate import Concatenate <add>from keras.layers.merging.concatenate import concatenate <ide> from keras.layers.merging.dot import Dot <del> <del># Merging functions. <del>from keras.layers.merging.add import add <del>from keras.layers.merging.subtract import subtract <del>from keras.layers.merging.multiply import multiply <del>from keras.layers.merging.average import average <add>from keras.layers.merging.dot import dot <add>from keras.layers.merging.maximum import Maximum <ide> from keras.layers.merging.maximum import maximum <add>from keras.layers.merging.minimum import Minimum <ide> from keras.layers.merging.minimum import minimum <del>from keras.layers.merging.concatenate import concatenate <del>from keras.layers.merging.dot import dot <add>from keras.layers.merging.multiply import Multiply <add>from keras.layers.merging.multiply import multiply <add>from keras.layers.merging.subtract import Subtract <add>from keras.layers.merging.subtract import subtract <ide><path>keras/layers/merging/add.py <ide> """Layer that adds several inputs.""" <ide> <ide> <del>from keras.layers.merging.base_merge import _Merge <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.merging.base_merge import _Merge <add> <ide> <ide> @keras_export("keras.layers.Add") <ide> class Add(_Merge): <ide><path>keras/layers/merging/average.py <ide> """Layer that averages several inputs.""" <ide> <ide> <del>from keras.layers.merging.base_merge import _Merge <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.merging.base_merge import _Merge <add> <ide> <ide> @keras_export("keras.layers.Average") <ide> class Average(_Merge): <ide><path>keras/layers/merging/base_merge.py <ide> # ============================================================================== <ide> """Private base class for layers that can merge several inputs into one.""" <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import backend <ide> from keras.engine.base_layer import Layer <ide> from keras.utils import tf_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class _Merge(Layer): <ide><path>keras/layers/merging/concatenate.py <ide> """Layer that concatenates several inputs.""" <ide> <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.layers.merging.base_merge import _Merge <ide> from keras.utils import tf_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.layers.Concatenate") <ide><path>keras/layers/merging/dot.py <ide> """Layer that computes the dot product between two inputs.""" <ide> <ide> <add>import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.engine import base_layer_utils <ide> from keras.layers.merging.base_merge import _Merge <ide> from keras.utils import tf_utils <del>import tensorflow.compat.v2 as tf <del> <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> @keras_export("keras.layers.Dot") <ide><path>keras/layers/merging/maximum.py <ide> """Layer that computes the maximum (element-wise) of several inputs.""" <ide> <ide> <del>from keras.layers.merging.base_merge import _Merge <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.merging.base_merge import _Merge <add> <ide> <ide> @keras_export("keras.layers.Maximum") <ide> class Maximum(_Merge): <ide><path>keras/layers/merging/merging_test.py <ide> # ============================================================================== <ide> """Tests for merging layers.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras import backend <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <ide> from keras.utils import tf_inspect <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.run_all_keras_modes <ide><path>keras/layers/merging/minimum.py <ide> """Layer that computes the minimum (element-wise) of several inputs.""" <ide> <ide> <del>from keras.layers.merging.base_merge import _Merge <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.merging.base_merge import _Merge <add> <ide> <ide> @keras_export("keras.layers.Minimum") <ide> class Minimum(_Merge): <ide><path>keras/layers/merging/multiply.py <ide> """Layer that multiplies (element-wise) several inputs.""" <ide> <ide> <del>from keras.layers.merging.base_merge import _Merge <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.merging.base_merge import _Merge <add> <ide> <ide> @keras_export("keras.layers.Multiply") <ide> class Multiply(_Merge): <ide><path>keras/layers/merging/subtract.py <ide> """Layer that subtracts two inputs.""" <ide> <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras.layers.merging.base_merge import _Merge <ide> from keras.utils import tf_utils <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.Subtract") <ide> class Subtract(_Merge): <ide><path>keras/layers/noise.py <ide> """Layers that operate regularization via the addition of noise.""" <ide> # pylint: disable=g-bad-import-order,unused-import <ide> <add>from keras.layers.regularization.alpha_dropout import AlphaDropout <add> <ide> # Regularization layers imported for backwards namespace compatibility <ide> from keras.layers.regularization.gaussian_dropout import GaussianDropout <ide> from keras.layers.regularization.gaussian_noise import GaussianNoise <del>from keras.layers.regularization.alpha_dropout import AlphaDropout <ide><path>keras/layers/normalization/batch_normalization.py <ide> """The V2 implementation of Normalization layers.""" <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.ops.control_flow_ops import ( <add> get_enclosing_xla_context, <add>) <add>from tensorflow.python.platform import tf_logging as logging <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras import constraints <ide> from keras import initializers <ide> from keras.engine.input_spec import InputSpec <ide> from keras.utils import control_flow_util <ide> from keras.utils import tf_utils <del>from tensorflow.python.ops.control_flow_ops import ( <del> get_enclosing_xla_context, <del>) <del>from tensorflow.python.platform import tf_logging as logging <del>from tensorflow.python.util.tf_export import keras_export <ide> <ide> <ide> class BatchNormalizationBase(Layer): <ide><path>keras/layers/normalization/batch_normalization_test.py <ide> # ============================================================================== <ide> """Tests for normalization layers.""" <ide> <add>import numpy as np <ide> import tensorflow.compat.v2 as tf <del> <ide> from absl.testing import parameterized <del>import numpy as np <ide> <ide> import keras <ide> from keras.layers.normalization import batch_normalization <ide><path>keras/layers/normalization/batch_normalization_v1.py <ide> """Batch Normalization V1 layer.""" <ide> # pylint: disable=g-classes-have-attributes <ide> <del>from keras.layers.normalization import batch_normalization <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.normalization import batch_normalization <add> <ide> <ide> # pylint: disable=missing-docstring <ide> @keras_export(v1=["keras.layers.BatchNormalization"]) <ide><path>keras/layers/normalization/layer_normalization.py <ide> """Layer Normalization layer.""" <ide> <ide> import tensorflow.compat.v2 as tf <del> <del># pylint: disable=g-classes-have-attributes <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras import constraints <ide> from keras import initializers <ide> from keras.engine.base_layer import Layer <ide> from keras.utils import tf_utils <ide> <del>from tensorflow.python.util.tf_export import keras_export <add># pylint: disable=g-classes-have-attributes <ide> <ide> <ide> @keras_export("keras.layers.LayerNormalization") <ide><path>keras/layers/normalization/layer_normalization_test.py <ide> # ============================================================================== <ide> """Tests for normalization layers.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <add>import tensorflow.compat.v2 as tf <ide> <ide> import keras <add>from keras.layers.normalization import layer_normalization <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>from keras.layers.normalization import layer_normalization <ide> <ide> <ide> def _run_layernorm_correctness_test(layer, dtype="float32"): <ide><path>keras/layers/normalization/unit_normalization.py <ide> # pylint: disable=g-classes-have-attributes <ide> <ide> import tensorflow.compat.v2 as tf <add>from tensorflow.python.util.tf_export import keras_export <ide> <ide> from keras.engine import base_layer <ide> from keras.utils import tf_utils <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.UnitNormalization", v1=[]) <ide> class UnitNormalization(base_layer.Layer): <ide><path>keras/layers/pooling/__init__.py <ide> """Keras Pooling layers.""" <ide> # pylint: disable=g-bad-import-order <ide> <add># Pooling layer aliases. <ide> # Pooling layers. <ide> from keras.layers.pooling.average_pooling1d import AveragePooling1D <add>from keras.layers.pooling.average_pooling1d import AvgPool1D <ide> from keras.layers.pooling.average_pooling2d import AveragePooling2D <add>from keras.layers.pooling.average_pooling2d import AvgPool2D <ide> from keras.layers.pooling.average_pooling3d import AveragePooling3D <del>from keras.layers.pooling.max_pooling1d import MaxPooling1D <del>from keras.layers.pooling.max_pooling2d import MaxPooling2D <del>from keras.layers.pooling.max_pooling3d import MaxPooling3D <add>from keras.layers.pooling.average_pooling3d import AvgPool3D <ide> from keras.layers.pooling.global_average_pooling1d import GlobalAveragePooling1D <add>from keras.layers.pooling.global_average_pooling1d import GlobalAvgPool1D <ide> from keras.layers.pooling.global_average_pooling2d import GlobalAveragePooling2D <add>from keras.layers.pooling.global_average_pooling2d import GlobalAvgPool2D <ide> from keras.layers.pooling.global_average_pooling3d import GlobalAveragePooling3D <add>from keras.layers.pooling.global_average_pooling3d import GlobalAvgPool3D <add>from keras.layers.pooling.global_max_pooling1d import GlobalMaxPool1D <ide> from keras.layers.pooling.global_max_pooling1d import GlobalMaxPooling1D <add>from keras.layers.pooling.global_max_pooling2d import GlobalMaxPool2D <ide> from keras.layers.pooling.global_max_pooling2d import GlobalMaxPooling2D <add>from keras.layers.pooling.global_max_pooling3d import GlobalMaxPool3D <ide> from keras.layers.pooling.global_max_pooling3d import GlobalMaxPooling3D <del> <del># Pooling layer aliases. <del>from keras.layers.pooling.average_pooling1d import AvgPool1D <del>from keras.layers.pooling.average_pooling2d import AvgPool2D <del>from keras.layers.pooling.average_pooling3d import AvgPool3D <ide> from keras.layers.pooling.max_pooling1d import MaxPool1D <add>from keras.layers.pooling.max_pooling1d import MaxPooling1D <ide> from keras.layers.pooling.max_pooling2d import MaxPool2D <add>from keras.layers.pooling.max_pooling2d import MaxPooling2D <ide> from keras.layers.pooling.max_pooling3d import MaxPool3D <del>from keras.layers.pooling.global_average_pooling1d import GlobalAvgPool1D <del>from keras.layers.pooling.global_average_pooling2d import GlobalAvgPool2D <del>from keras.layers.pooling.global_average_pooling3d import GlobalAvgPool3D <del>from keras.layers.pooling.global_max_pooling1d import GlobalMaxPool1D <del>from keras.layers.pooling.global_max_pooling2d import GlobalMaxPool2D <del>from keras.layers.pooling.global_max_pooling3d import GlobalMaxPool3D <add>from keras.layers.pooling.max_pooling3d import MaxPooling3D <ide><path>keras/layers/pooling/average_pooling1d.py <ide> <ide> import functools <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.layers.pooling.base_pooling1d import Pooling1D <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.AveragePooling1D", "keras.layers.AvgPool1D") <ide> class AveragePooling1D(Pooling1D): <ide><path>keras/layers/pooling/average_pooling2d.py <ide> """Average pooling 2D layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <del>from keras.layers.pooling.base_pooling2d import Pooling2D <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.pooling.base_pooling2d import Pooling2D <add> <ide> <ide> @keras_export("keras.layers.AveragePooling2D", "keras.layers.AvgPool2D") <ide> class AveragePooling2D(Pooling2D): <ide><path>keras/layers/pooling/average_pooling3d.py <ide> """Average pooling 3D layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <del>from keras.layers.pooling.base_pooling3d import Pooling3D <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.pooling.base_pooling3d import Pooling3D <add> <ide> <ide> @keras_export("keras.layers.AveragePooling3D", "keras.layers.AvgPool3D") <ide> class AveragePooling3D(Pooling3D): <ide><path>keras/layers/pooling/average_pooling_test.py <ide> # ============================================================================== <ide> """Tests for average pooling layers.""" <ide> <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.generate(test_combinations.combine(mode=["graph", "eager"])) <ide><path>keras/layers/pooling/base_global_pooling1d.py <ide> """Private base class for global pooling 1D layers.""" <ide> # pylint: disable=g-classes-have-attributes <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.engine.base_layer import Layer <ide> from keras.engine.input_spec import InputSpec <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class GlobalPooling1D(Layer): <ide><path>keras/layers/pooling/base_global_pooling2d.py <ide> """Private base class for global pooling 2D layers.""" <ide> # pylint: disable=g-classes-have-attributes <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.engine.base_layer import Layer <ide> from keras.engine.input_spec import InputSpec <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class GlobalPooling2D(Layer): <ide><path>keras/layers/pooling/base_global_pooling3d.py <ide> """Private base class for global pooling 3D layers.""" <ide> # pylint: disable=g-classes-have-attributes <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras.engine.base_layer import Layer <ide> from keras.engine.input_spec import InputSpec <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class GlobalPooling3D(Layer): <ide><path>keras/layers/pooling/base_pooling1d.py <ide> """Private base class for pooling 1D layers.""" <ide> # pylint: disable=g-classes-have-attributes <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import backend <ide> from keras.engine.base_layer import Layer <ide> from keras.engine.input_spec import InputSpec <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class Pooling1D(Layer): <ide><path>keras/layers/pooling/base_pooling2d.py <ide> """Private base class for pooling 2D layers.""" <ide> # pylint: disable=g-classes-have-attributes <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import backend <ide> from keras.engine.base_layer import Layer <ide> from keras.engine.input_spec import InputSpec <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class Pooling2D(Layer): <ide><path>keras/layers/pooling/base_pooling3d.py <ide> """Private base class for pooling 3D layers.""" <ide> # pylint: disable=g-classes-have-attributes <ide> <add>import tensorflow.compat.v2 as tf <add> <ide> from keras import backend <ide> from keras.engine.base_layer import Layer <ide> from keras.engine.input_spec import InputSpec <ide> from keras.utils import conv_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> class Pooling3D(Layer): <ide><path>keras/layers/pooling/global_average_pooling1d.py <ide> """Global average pooling 1D layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <del>from keras import backend <del>from keras.layers.pooling.base_global_pooling1d import GlobalPooling1D <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras import backend <add>from keras.layers.pooling.base_global_pooling1d import GlobalPooling1D <add> <ide> <ide> @keras_export( <ide> "keras.layers.GlobalAveragePooling1D", "keras.layers.GlobalAvgPool1D" <ide><path>keras/layers/pooling/global_average_pooling2d.py <ide> """Global average pooling 2D layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.layers.pooling.base_global_pooling2d import GlobalPooling2D <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export( <ide> "keras.layers.GlobalAveragePooling2D", "keras.layers.GlobalAvgPool2D" <ide><path>keras/layers/pooling/global_average_pooling3d.py <ide> """Global average pooling 3D layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.layers.pooling.base_global_pooling3d import GlobalPooling3D <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export( <ide> "keras.layers.GlobalAveragePooling3D", "keras.layers.GlobalAvgPool3D" <ide><path>keras/layers/pooling/global_average_pooling_test.py <ide> # ============================================================================== <ide> """Tests for global average pooling layers.""" <ide> <add>import numpy as np <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras.mixed_precision import policy <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import numpy as np <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.generate(test_combinations.combine(mode=["graph", "eager"])) <ide><path>keras/layers/pooling/global_max_pooling1d.py <ide> """Global max pooling 1D layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.layers.pooling.base_global_pooling1d import GlobalPooling1D <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.GlobalMaxPool1D", "keras.layers.GlobalMaxPooling1D") <ide> class GlobalMaxPooling1D(GlobalPooling1D): <ide><path>keras/layers/pooling/global_max_pooling2d.py <ide> """Global max pooling 2D layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.layers.pooling.base_global_pooling2d import GlobalPooling2D <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.GlobalMaxPool2D", "keras.layers.GlobalMaxPooling2D") <ide> class GlobalMaxPooling2D(GlobalPooling2D): <ide><path>keras/layers/pooling/global_max_pooling3d.py <ide> """Global max pooling 3D layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.layers.pooling.base_global_pooling3d import GlobalPooling3D <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.GlobalMaxPool3D", "keras.layers.GlobalMaxPooling3D") <ide> class GlobalMaxPooling3D(GlobalPooling3D): <ide><path>keras/layers/pooling/global_max_pooling_test.py <ide> # ============================================================================== <ide> """Tests for global max pooling layers.""" <ide> <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.generate(test_combinations.combine(mode=["graph", "eager"])) <ide><path>keras/layers/pooling/max_pooling1d.py <ide> <ide> import functools <ide> <add>from tensorflow.python.util.tf_export import keras_export <add> <ide> from keras import backend <ide> from keras.layers.pooling.base_pooling1d import Pooling1D <ide> <del>from tensorflow.python.util.tf_export import keras_export <del> <ide> <ide> @keras_export("keras.layers.MaxPool1D", "keras.layers.MaxPooling1D") <ide> class MaxPooling1D(Pooling1D): <ide><path>keras/layers/pooling/max_pooling2d.py <ide> """Max pooling 2D layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <del>from keras.layers.pooling.base_pooling2d import Pooling2D <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.pooling.base_pooling2d import Pooling2D <add> <ide> <ide> @keras_export("keras.layers.MaxPool2D", "keras.layers.MaxPooling2D") <ide> class MaxPooling2D(Pooling2D): <ide><path>keras/layers/pooling/max_pooling3d.py <ide> """Max pooling 3D layer.""" <ide> # pylint: disable=g-classes-have-attributes,g-direct-tensorflow-import <ide> <del>from keras.layers.pooling.base_pooling3d import Pooling3D <ide> import tensorflow.compat.v2 as tf <del> <ide> from tensorflow.python.util.tf_export import keras_export <ide> <add>from keras.layers.pooling.base_pooling3d import Pooling3D <add> <ide> <ide> @keras_export("keras.layers.MaxPool3D", "keras.layers.MaxPooling3D") <ide> class MaxPooling3D(Pooling3D): <ide><path>keras/layers/pooling/max_pooling_test.py <ide> # ============================================================================== <ide> """Tests for max pooling layers.""" <ide> <add>import tensorflow.compat.v2 as tf <ide> from absl.testing import parameterized <add> <ide> import keras <ide> from keras.testing_infra import test_combinations <ide> from keras.testing_infra import test_utils <del>import tensorflow.compat.v2 as tf <ide> <ide> <ide> @test_combinations.generate(test_combinations.combine(mode=["graph", "eager"])) <ide><path>keras/layers/preprocessing/benchmarks/bucketized_column_dense_benchmark.py <ide> # ============================================================================== <ide> """Benchmark for KPL implementation of bucketized columns with dense inputs.""" <ide> <del>import tensorflow.compat.v2 as tf <del> <ide> import numpy as np <del> <del>import keras <add>import tensorflow.compat.v2 as tf <ide> from tensorflow.python.eager.def_function import ( <ide> function as tf_function, <ide> ) <add> <add>import keras <ide> from keras.layers.preprocessing import discretization <ide> from keras.layers.preprocessing.benchmarks import ( <ide> feature_column_benchmark as fc_bm,
300
Javascript
Javascript
fix reference to invariant
3e4a8928b32301a746bc66fca862ff2ab04a5ea4
<ide><path>Libraries/Inspector/Inspector.js <ide> const UIManager = require('UIManager'); <ide> const View = require('View'); <ide> <ide> const emptyObject = require('fbjs/lib/emptyObject'); <del>const invariant = require('invariant'); <add>const invariant = require('fbjs/lib/invariant'); <ide> <ide> export type ReactRenderer = { <ide> getInspectorDataForViewTag: (viewTag: number) => Object,
1
Ruby
Ruby
fix audit of new formulae
5647fdb2f9317d3b808a08b6ac711634e463ed75
<ide><path>Library/Homebrew/dev-cmd/audit.rb <ide> def audit_revision_and_version_scheme <ide> current_version_scheme = formula.version_scheme <ide> [:stable, :devel].each do |spec| <ide> spec_version_scheme_map = attributes_map[:version_scheme][spec] <del> next if spec_version_scheme_map.nil? || spec_version_scheme_map.empty? <add> next if spec_version_scheme_map.empty? <ide> <ide> version_schemes = spec_version_scheme_map.values.flatten <ide> max_version_scheme = version_schemes.max <ide> def audit_revision_and_version_scheme <ide> end <ide> <ide> current_revision = formula.revision <del> if formula.stable <del> if revision_map = attributes_map[:revision][:stable] <del> if !revision_map.nil? && !revision_map.empty? <del> stable_revisions = revision_map[formula.stable.version] <del> stable_revisions ||= [] <del> current_revision = formula.revision <del> max_revision = stable_revisions.max || 0 <del> <del> if current_revision < max_revision <del> problem "revision should not decrease (from #{max_revision} to #{current_revision})" <del> end <add> revision_map = attributes_map[:revision][:stable] <add> if formula.stable && !revision_map.empty? <add> stable_revisions = revision_map[formula.stable.version] <add> stable_revisions ||= [] <add> max_revision = stable_revisions.max || 0 <add> <add> if current_revision < max_revision <add> problem "revision should not decrease (from #{max_revision} to #{current_revision})" <add> end <ide> <del> stable_revisions -= [formula.revision] <del> if !current_revision.zero? && stable_revisions.empty? && <del> revision_map.keys.length > 1 <del> problem "'revision #{formula.revision}' should be removed" <del> elsif current_revision > 1 && <del> current_revision != max_revision && <del> !stable_revisions.include?(current_revision - 1) <del> problem "revisions should only increment by 1" <del> end <del> end <add> stable_revisions -= [formula.revision] <add> if !current_revision.zero? && stable_revisions.empty? && <add> revision_map.keys.length > 1 <add> problem "'revision #{formula.revision}' should be removed" <add> elsif current_revision > 1 && <add> current_revision != max_revision && <add> !stable_revisions.include?(current_revision - 1) <add> problem "revisions should only increment by 1" <ide> end <ide> elsif !current_revision.zero? # head/devel-only formula <ide> problem "'revision #{current_revision}' should be removed" <ide><path>Library/Homebrew/formula_versions.rb <ide> def version_attributes_map(attributes, branch) <ide> attributes_map = {} <ide> return attributes_map if attributes.empty? <ide> <add> attributes.each do |attribute| <add> attributes_map[attribute] ||= { <add> stable: {}, <add> devel: {}, <add> } <add> end <add> <ide> stable_versions_seen = 0 <ide> rev_list(branch) do |rev| <ide> formula_at_revision(rev) do |f| <ide> attributes.each do |attribute| <del> attributes_map[attribute] ||= {} <ide> map = attributes_map[attribute] <ide> set_attribute_map(map, f, attribute) <ide> <ide> def version_attributes_map(attributes, branch) <ide> <ide> def set_attribute_map(map, f, attribute) <ide> if f.stable <del> map[:stable] ||= {} <ide> map[:stable][f.stable.version] ||= [] <ide> map[:stable][f.stable.version] << f.send(attribute) <ide> end <ide> return unless f.devel <del> map[:devel] ||= {} <ide> map[:devel][f.devel.version] ||= [] <ide> map[:devel][f.devel.version] << f.send(attribute) <ide> end
2
Java
Java
add test of backpressure to operatorall
3c2f25939d41be094af304f03e5fdd7b66bcf9eb
<ide><path>rxjava-core/src/test/java/rx/internal/operators/OperatorAllTest.java <ide> */ <ide> package rx.internal.operators; <ide> <add>import static org.junit.Assert.assertFalse; <ide> import static org.mockito.Mockito.mock; <ide> import static org.mockito.Mockito.verify; <ide> import static org.mockito.Mockito.verifyNoMoreInteractions; <ide> import rx.Observer; <ide> import rx.functions.Func1; <ide> <add>import java.util.Arrays; <add> <ide> public class OperatorAllTest { <ide> <ide> @Test <ide> public Boolean call(String s) { <ide> verify(observer).onError(error); <ide> verifyNoMoreInteractions(observer); <ide> } <add> <add> @Test <add> public void testFollowingFirst() { <add> Observable<Integer> o = Observable.from(Arrays.asList(1, 3, 5, 6)); <add> Observable<Boolean> allOdd = o.all(new Func1<Integer, Boolean>() { <add> @Override <add> public Boolean call(Integer i) { <add> return i % 2 == 1; <add> } <add> }); <add> assertFalse(allOdd.toBlocking().first()); <add> } <ide> }
1
Go
Go
check dockerinit only if lxc driver is used
96bc377a8d293cf786722ebb0ff89a81d63e43ed
<ide><path>daemon/daemon.go <ide> func NewDaemon(config *Config, registryService *registry.Service) (daemon *Daemo <ide> <ide> d.containerGraph = graph <ide> <del> sysInitPath, err := configureSysInit(config) <del> if err != nil { <del> return nil, err <add> var sysInitPath string <add> if config.ExecDriver == "lxc" { <add> initPath, err := configureSysInit(config) <add> if err != nil { <add> return nil, err <add> } <add> sysInitPath = initPath <ide> } <ide> <ide> sysInfo := sysinfo.New(false)
1
Ruby
Ruby
remove whitespaces from the default mailer file
61d995f7ed94f0da38ac4c24bc8c964047352d01
<ide><path>railties/lib/generators/rails/mailer/templates/mailer.rb <ide> class <%= class_name %> < ActionMailer::Base <del> <ide> <% for action in actions -%> <ide> <ide> def <%= action %>(sent_at = Time.now) <ide> def <%= action %>(sent_at = Time.now) <ide> <ide> body :greeting => 'Hi,' <ide> end <del><% end -%> <ide> <add><% end -%> <ide> end
1
PHP
PHP
apply fixes from styleci
d3df6d855d2d98ac1ffb05b0d90e2f153f34ae28
<ide><path>tests/Integration/Session/SessionPersistenceTest.php <ide> class FakeNullSessionHandler extends NullSessionHandler <ide> public function write($sessionId, $data) <ide> { <ide> $this->written = true; <add> <ide> return true; <ide> } <ide> }
1
Text
Text
add loadsmart in the list of companies using it
3a7a65c1b654346a08125e5334d2943fb0cd9c8f
<ide><path>INTHEWILD.md <ide> Currently, **officially** using Airflow: <ide> 1. [Liberty Global](https://www.libertyglobal.com/) [[@LibertyGlobal](https://github.com/LibertyGlobal/)] <ide> 1. [liligo](http://liligo.com/) [[@tromika](https://github.com/tromika)] <ide> 1. [LingoChamp](http://www.liulishuo.com/) [[@haitaoyao](https://github.com/haitaoyao)] <add>1. [Loadsmart](https://loadsmart.com/) [[@loadsmart](https://github.com/loadsmart)] <ide> 1. [Logitravel Group](https://www.logitravel.com/) <ide> 1. [LokSuvidha](http://loksuvidha.com/) [[@saurabhwahile](https://github.com/saurabhwahile)] <ide> 1. [Los Angeles Times](http://www.latimes.com/) [[@standyro](https://github.com/standyro)]
1
Ruby
Ruby
fix bundler deprecation
bfd22634252577fc851c6a925bf796dd1cd3c0a1
<ide><path>Library/Homebrew/test/support/helper/spec/shared_context/integration_test.rb <ide> def brew(*args) <ide> ruby_args << (HOMEBREW_LIBRARY_PATH/"brew.rb").resolved_path.to_s <ide> end <ide> <del> Bundler.with_clean_env do <add> Bundler.with_unbundled_env do <ide> stdout, stderr, status = Open3.capture3(env, *@ruby_args, *args) <ide> $stdout.print stdout <ide> $stderr.print stderr <ide> def brew(*args) <ide> end <ide> <ide> def brew_sh(*args) <del> Bundler.with_clean_env do <add> Bundler.with_unbundled_env do <ide> stdout, stderr, status = Open3.capture3("#{ENV.fetch("HOMEBREW_PREFIX")}/bin/brew", *args) <ide> $stdout.print stdout <ide> $stderr.print stderr
1
Javascript
Javascript
fix build cycle detection in compilation
8f2c26696654ed1db574fddb162f4779e2d77a7e
<ide><path>lib/Compilation.js <ide> BREAKING CHANGE: Asset processing hooks in Compilation has been merged into a si <ide> creatingModuleDuringBuildSet <ide> ); <ide> } <del> creatingModuleDuringBuildSet.add(originModule); <add> creatingModuleDuringBuildSet.add(module); <ide> <ide> // When building is blocked by another module <ide> // search for a cycle, cancel the cycle by throwing
1
Go
Go
use default no-op uuid.loggerf for client cli
3face3c521127cb86570c68d5f2a8f55e7633146
<ide><path>api/client/cli.go <ide> import ( <ide> "os" <ide> "strings" <ide> <del> "github.com/docker/distribution/uuid" <ide> "github.com/docker/docker/cli" <ide> "github.com/docker/docker/cliconfig" <ide> "github.com/docker/docker/opts" <ide> func NewDockerCli(in io.ReadCloser, out, err io.Writer, clientFlags *cli.ClientF <ide> <ide> cli.init = func() error { <ide> <del> // ignore errors from uuid package when running client commands <del> uuid.Loggerf = func(string, ...interface{}) {} <del> <ide> clientFlags.PostParse() <ide> <ide> hosts := clientFlags.Common.Hosts
1
Text
Text
change introduction/readme.md title to h1
7271a9c0a6ca7514650ef31d55cbf61f0901d8b6
<ide><path>docs/introduction/README.md <del>## Introduction <add># Introduction <ide> <ide> * [Motivation](Motivation.md) <ide> * [Core Concepts](CoreConcepts.md)
1
PHP
PHP
update signature formatting
968a36478814c740d25ec4d6f9c8bef859a2f834
<ide><path>src/Illuminate/Foundation/Console/StorageLinkCommand.php <ide> class StorageLinkCommand extends Command <ide> * <ide> * @var string <ide> */ <del> protected $signature = 'storage:link {--relative : Create the symbolic link using relative paths} <add> protected $signature = 'storage:link <add> {--relative : Create the symbolic link using relative paths} <ide> {--force : Recreate already existing symbolic links}'; <ide> <ide> /**
1
Javascript
Javascript
fix duplex._construct race
02c4869beec52d2664c747d520dfe078d2b3c714
<ide><path>lib/internal/streams/destroy.js <ide> function construct(stream, cb) { <ide> return; <ide> } <ide> <del> stream.once(kConstruct, cb); <del> <del> if (stream.listenerCount(kConstruct) > 1) { <del> // Duplex <del> return; <del> } <del> <ide> const r = stream._readableState; <ide> const w = stream._writableState; <ide> <ide> function construct(stream, cb) { <ide> w.constructed = false; <ide> } <ide> <add> stream.once(kConstruct, cb); <add> <add> if (stream.listenerCount(kConstruct) > 1) { <add> // Duplex <add> return; <add> } <add> <ide> process.nextTick(constructNT, stream); <ide> } <ide> <ide><path>test/parallel/test-stream-construct.js <ide> testDestroy((opts) => new Writable({ <ide> construct: common.mustCall() <ide> }); <ide> } <add> <add>{ <add> // https://github.com/nodejs/node/issues/34448 <add> <add> let constructed = false; <add> const d = new Duplex({ <add> readable: false, <add> construct: common.mustCall((callback) => { <add> setImmediate(common.mustCall(() => { <add> constructed = true; <add> callback(); <add> })); <add> }), <add> write(chunk, encoding, callback) { <add> callback(); <add> }, <add> read() { <add> this.push(null); <add> } <add> }); <add> d.resume(); <add> d.end('foo'); <add> d.on('close', common.mustCall(() => { <add> assert.strictEqual(constructed, true); <add> })); <add>}
2
Ruby
Ruby
remove warnings by calling remove_method
325fdfc92833cd2a0ccc97f4e168f11a406edc76
<ide><path>activemodel/test/cases/serializers/xml_serialization_test.rb <ide> class Contact <ide> <ide> attr_accessor :address, :friends <ide> <add> remove_method :attributes <ide> def attributes <ide> instance_values.except("address", "friends") <ide> end
1
Text
Text
remove david-dm from readme
5023184174d63aea1f50f4dfb0d66740aa2aa1ab
<ide><path>README.md <ide> [![npm][npm]][npm-url] <ide> <ide> [![node][node]][node-url] <del>[![deps][deps]][deps-url] <ide> [![builds2][builds2]][builds2-url] <ide> [![coverage][cover]][cover-url] <ide> [![licenses][licenses]][licenses-url] <ide> src="https://static.monei.net/monei-logo.svg" height="30" alt="MONEI"></a> <ide> [npm-url]: https://npmjs.com/package/webpack <ide> [node]: https://img.shields.io/node/v/webpack.svg <ide> [node-url]: https://nodejs.org <del>[deps]: https://img.shields.io/david/webpack/webpack.svg <del>[deps-url]: https://david-dm.org/webpack/webpack <ide> [prs]: https://img.shields.io/badge/PRs-welcome-brightgreen.svg <ide> [prs-url]: https://webpack.js.org/contribute/ <ide> [builds2]: https://dev.azure.com/webpack/webpack/_apis/build/status/webpack.webpack
1
Python
Python
update demo_camembert.py with new classes
3e20c2e871db82f81c3b2b814265a481be15273c
<ide><path>examples/contrib/demo_camembert.py <ide> import torch <ide> <ide> from transformers.tokenization_camembert import CamembertTokenizer <del>from transformers.modeling_roberta import RobertaForMaskedLM <add>from transformers.modeling_camembert import CamembertForMaskedLM <ide> <ide> <ide> def fill_mask(masked_input, model, tokenizer, topk=5): <ide> def fill_mask(masked_input, model, tokenizer, topk=5): <ide> return topk_filled_outputs <ide> <ide> <del>model_path = Path('camembert.v0.pytorch') <del>if not model_path.exists(): <del> compressed_path = model_path.with_suffix('.tar.gz') <del> url = 'http://dl.fbaipublicfiles.com/camembert/camembert.v0.pytorch.tar.gz' <del> print('Downloading model...') <del> urllib.request.urlretrieve(url, compressed_path) <del> print('Extracting model...') <del> with tarfile.open(compressed_path) as f: <del> f.extractall(model_path.parent) <del> assert model_path.exists() <del>tokenizer_path = model_path / 'sentencepiece.bpe.model' <del>tokenizer = CamembertTokenizer.from_pretrained(tokenizer_path) <del>model = RobertaForMaskedLM.from_pretrained(model_path) <add>tokenizer = CamembertTokenizer.from_pretrained('camembert-base') <add>model = CamembertForMaskedLM.from_pretrained('camembert-base') <ide> model.eval() <ide> <ide> masked_input = "Le camembert est <mask> :)"
1
Javascript
Javascript
fix minification bug
f960bf42a5b6c1accae46557dbdf4084debc032c
<ide><path>examples/js/loaders/FBXLoader.js <ide> <ide> }, <ide> <del> parseSubNode( name, node, subNode ) { <add> parseSubNode: function ( name, node, subNode ) { <ide> <ide> // special case: child node is single property <ide> if ( subNode.singleProperty === true ) {
1
Text
Text
add release key for rafaelgss
fbee0a896c4a42f7ffbf2dc11b2877641a8568f4
<ide><path>README.md <ide> Primary GPG keys for Node.js Releasers (some Releasers sign with subkeys): <ide> `8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600` <ide> * **Myles Borins** <<myles.borins@gmail.com>> <ide> `C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8` <add>* **RafaelGSS** <<rafael.nunu@hotmail.com>> <add> `890C08DB8579162FEE0DF9DB8BEAB4DFCF555EF4` <ide> * **Richard Lau** <<rlau@redhat.com>> <ide> `C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C` <ide> * **Rod Vagg** <<rod@vagg.org>> <ide> gpg --keyserver hkps://keys.openpgp.org --recv-keys 71DCFD284A79C3B38668286BC97E <ide> gpg --keyserver hkps://keys.openpgp.org --recv-keys 61FC681DFB92A079F1685E77973F295594EC4689 <ide> gpg --keyserver hkps://keys.openpgp.org --recv-keys 8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600 <ide> gpg --keyserver hkps://keys.openpgp.org --recv-keys C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8 <add>gpg --keyserver hkps://keys.openpgp.org --recv-keys 890C08DB8579162FEE0DF9DB8BEAB4DFCF555EF4 <ide> gpg --keyserver hkps://keys.openpgp.org --recv-keys C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C <ide> gpg --keyserver hkps://keys.openpgp.org --recv-keys DD8F2338BAE7501E3DD5AC78C273792F7D83545D <ide> gpg --keyserver hkps://keys.openpgp.org --recv-keys A48C2BEE680E841632CD4E44F07496B3EB3C1762
1
Python
Python
add new automodel classes in pipeline
c8bdf7f4ecd73680cb0751d9efc8fa3a992c2c2d
<ide><path>src/transformers/pipelines.py <ide> AutoModelForSequenceClassification, <ide> AutoModelForQuestionAnswering, <ide> AutoModelForTokenClassification, <del> AutoModelWithLMHead, <ide> AutoModelForSeq2SeqLM, <del> MODEL_WITH_LM_HEAD_MAPPING, <add> AutoModelForCausalLM, <add> AutoModelForMaskedLM, <ide> MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING, <ide> MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING, <ide> MODEL_FOR_QUESTION_ANSWERING_MAPPING, <ide> MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING, <add> MODEL_FOR_MASKED_LM_MAPPING, <ide> ) <ide> <ide> if TYPE_CHECKING: <ide> def __init__( <ide> task=task, <ide> ) <ide> <del> self.check_model_type(TF_MODEL_WITH_LM_HEAD_MAPPING if self.framework == "tf" else MODEL_WITH_LM_HEAD_MAPPING) <add> self.check_model_type(TF_MODEL_WITH_LM_HEAD_MAPPING if self.framework == "tf" else MODEL_FOR_MASKED_LM_MAPPING) <ide> <ide> self.topk = topk <ide> <ide> class TranslationPipeline(Pipeline): <ide> def __init__(self, *args, **kwargs): <ide> super().__init__(*args, **kwargs) <ide> <del> self.check_model_type(TF_MODEL_WITH_LM_HEAD_MAPPING if self.framework == "tf" else MODEL_WITH_LM_HEAD_MAPPING) <add> self.check_model_type( <add> TF_MODEL_WITH_LM_HEAD_MAPPING if self.framework == "tf" else MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING <add> ) <ide> <ide> def __call__( <ide> self, *args, return_tensors=False, return_text=True, clean_up_tokenization_spaces=False, **generate_kwargs <ide> def __call__( <ide> "fill-mask": { <ide> "impl": FillMaskPipeline, <ide> "tf": TFAutoModelWithLMHead if is_tf_available() else None, <del> "pt": AutoModelWithLMHead if is_torch_available() else None, <add> "pt": AutoModelForMaskedLM if is_torch_available() else None, <ide> "default": {"model": {"pt": "distilroberta-base", "tf": "distilroberta-base"}}, <ide> }, <ide> "summarization": { <ide> def __call__( <ide> "translation_en_to_fr": { <ide> "impl": TranslationPipeline, <ide> "tf": TFAutoModelWithLMHead if is_tf_available() else None, <del> "pt": AutoModelWithLMHead if is_torch_available() else None, <add> "pt": AutoModelForSeq2SeqLM if is_torch_available() else None, <ide> "default": {"model": {"pt": "t5-base", "tf": "t5-base"}}, <ide> }, <ide> "translation_en_to_de": { <ide> "impl": TranslationPipeline, <ide> "tf": TFAutoModelWithLMHead if is_tf_available() else None, <del> "pt": AutoModelWithLMHead if is_torch_available() else None, <add> "pt": AutoModelForSeq2SeqLM if is_torch_available() else None, <ide> "default": {"model": {"pt": "t5-base", "tf": "t5-base"}}, <ide> }, <ide> "translation_en_to_ro": { <ide> "impl": TranslationPipeline, <ide> "tf": TFAutoModelWithLMHead if is_tf_available() else None, <del> "pt": AutoModelWithLMHead if is_torch_available() else None, <add> "pt": AutoModelForSeq2SeqLM if is_torch_available() else None, <ide> "default": {"model": {"pt": "t5-base", "tf": "t5-base"}}, <ide> }, <ide> "text-generation": { <ide> "impl": TextGenerationPipeline, <ide> "tf": TFAutoModelWithLMHead if is_tf_available() else None, <del> "pt": AutoModelWithLMHead if is_torch_available() else None, <add> "pt": AutoModelForCausalLM if is_torch_available() else None, <ide> "default": {"model": {"pt": "gpt2", "tf": "gpt2"}}, <ide> }, <ide> "zero-shot-classification": {
1
Text
Text
add direct link to event chart image
86de72fef244b3fe209e5793deb40fb26f5b318e
<ide><path>docs/reference/api/docker_remote_api.md <ide> wget --no-check-certificate --certificate=$DOCKER_CERT_PATH/cert.pem \ <ide> <ide> The following diagram depicts the container states accessible through the API. <ide> <del>![States](images/event_state.png) <add>[![States](images/event_state.png)](../images/event_state.png) <ide> <ide> Some container-related events are not affected by container state, so they are not included in this diagram. These events are: <ide>
1
Text
Text
fix doc format in active storage setup [ci skip]
1450abcd19cbdb66cd6226d63fb8288b6dead312
<ide><path>guides/source/development_dependencies_install.md <ide> sudo apt-get update && sudo apt-get install yarn <ide> <ide> On Fedora or CentOS, just run: <ide> <del>``bash <add>```bash <ide> sudo wget https://dl.yarnpkg.com/rpm/yarn.repo -O /etc/yum.repos.d/yarn.repo <ide> <ide> sudo yum install yarn
1