content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
Javascript
Javascript
apply type errors from code review
ae7b2c20347e246068d93e76573225f92ecad2a9
<ide><path>client/src/client-only-routes/ShowCertification.js <ide> const propTypes = { <ide> name: PropTypes.string, <ide> certName: PropTypes.string, <ide> certTitle: PropTypes.string, <del> completionTime: PropTypes.string, <add> completionTime: PropTypes.number, <ide> date: PropTypes.string <ide> }), <ide> certDashedName: PropTypes.string, <ide><path>client/src/components/profile/Profile.js <ide> const propTypes = { <ide> showTimeLine: PropTypes.bool <ide> }), <ide> calendar: PropTypes.object, <del> streak: PropTypes.shape, <add> streak: PropTypes.shape({ <add> current: PropTypes.number, <add> longest: PropTypes.number <add> }), <ide> completedChallenges: PropTypes.array, <ide> portfolio: PropTypes.array, <ide> about: PropTypes.string,
2
Javascript
Javascript
add example for the provider
36ab132b8288a515d2e75cbb445dac5a93643f15
<ide><path>src/ng/interpolate.js <ide> var $interpolateMinErr = minErr('$interpolate'); <ide> * @description <ide> * <ide> * Used for configuring the interpolation markup. Defaults to `{{` and `}}`. <add> * <add> * @example <add> <doc:example> <add> <doc:source> <add> <script> <add> var myApp = angular.module('App', [], function($interpolateProvider) { <add> $interpolateProvider.startSymbol('//'); <add> $interpolateProvider.endSymbol('//'); <add> }); <add> function Controller($scope) { <add> $scope.label = "Interpolation Provider Sample"; <add> } <add> </script> <add> <div ng-app="App" ng-controller="Controller"> <add> //label// <add> </div> <add> </doc:source> <add> <doc:scenario> <add> describe('provider', function() { <add> beforeEach(module(function($interpolateProvider) { <add> $interpolateProvider.startSymbol('//'); <add> $interpolateProvider.endSymbol('//'); <add> })); <add> <add> it('should not get confused with same markers', inject(function($interpolate) { <add> expect($interpolate('///').parts).toEqual(['///']); <add> expect($interpolate('////')()).toEqual(''); <add> expect($interpolate('//1//')()).toEqual('1'); <add> })); <add> }); <add> </doc:scenario> <add> </doc:example> <ide> */ <ide> function $InterpolateProvider() { <ide> var startSymbol = '{{';
1
PHP
PHP
replace schema with search_path in pgsql config
195a7e03746b71e91838b73d8ed763c16985fd1c
<ide><path>config/database.php <ide> 'charset' => 'utf8', <ide> 'prefix' => '', <ide> 'prefix_indexes' => true, <del> 'schema' => 'public', <add> 'search_path' => 'public', <ide> 'sslmode' => 'prefer', <ide> ], <ide>
1
Ruby
Ruby
rewrite associationcollection#find using relations
5565bab994af1e54f34df5891c635590d22feea0
<ide><path>activerecord/lib/active_record/associations/association_collection.rb <ide> def find(*args) <ide> load_target.select { |r| ids.include?(r.id) } <ide> end <ide> else <del> conditions = "#{@finder_sql}" <del> if sanitized_conditions = sanitize_sql(options[:conditions]) <del> conditions << " AND (#{sanitized_conditions})" <del> end <del> <del> options[:conditions] = conditions <add> merge_options_from_reflection!(options) <add> construct_find_options!(options) <add> <add> find_scope = construct_scope[:find].slice(:conditions, :order) <add> <add> with_scope(:find => find_scope) do <add> relation = @reflection.klass.send(:construct_finder_arel_with_includes, options) <ide> <del> if options[:order] && @reflection.options[:order] <del> options[:order] = "#{options[:order]}, #{@reflection.options[:order]}" <del> elsif @reflection.options[:order] <del> options[:order] = @reflection.options[:order] <add> case args.first <add> when :first, :last, :all <add> relation.send(args.first) <add> else <add> relation.find(*args) <add> end <ide> end <del> <del> # Build options specific to association <del> construct_find_options!(options) <del> <del> merge_options_from_reflection!(options) <del> <del> # Pass through args exactly as we received them. <del> args << options <del> @reflection.klass.find(*args) <ide> end <ide> end <ide>
1
Javascript
Javascript
treat dates as atomic values instead of objects
6cbbd966479448591f819cbf904e0a3b757613dc
<ide><path>src/Angular.js <ide> function baseExtend(dst, objs, deep) { <ide> var src = obj[key]; <ide> <ide> if (deep && isObject(src)) { <del> if (!isObject(dst[key])) dst[key] = isArray(src) ? [] : {}; <del> baseExtend(dst[key], [src], true); <add> if (isDate(src)) { <add> dst[key] = new Date(src.valueOf()); <add> } else { <add> if (!isObject(dst[key])) dst[key] = isArray(src) ? [] : {}; <add> baseExtend(dst[key], [src], true); <add> } <ide> } else { <ide> dst[key] = src; <ide> } <ide><path>test/AngularSpec.js <ide> describe('angular', function() { <ide> // make sure we retain the old key <ide> expect(hashKey(dst)).toEqual(h); <ide> }); <add> <add> <add> it('should copy dates by reference', function() { <add> var src = { date: new Date() }; <add> var dst = {}; <add> <add> extend(dst, src); <add> <add> expect(dst.date).toBe(src.date); <add> }); <ide> }); <ide> <ide> <ide> describe('angular', function() { <ide> }); <ide> expect(dst.foo).not.toBe(src.foo); <ide> }); <add> <add> <add> it('should copy dates by value', function() { <add> var src = { date: new Date() }; <add> var dst = {}; <add> <add> merge(dst, src); <add> <add> expect(dst.date).not.toBe(src.date); <add> expect(isDate(dst.date)).toBeTruthy(); <add> expect(dst.date.valueOf()).toEqual(src.date.valueOf()); <add> }); <ide> }); <ide> <ide>
2
PHP
PHP
fix method order
6b13ac843d5046a41e62cc4ff318caebd01eeef5
<ide><path>src/Illuminate/Foundation/helpers.php <ide> function get($uri, $action) <ide> } <ide> } <ide> <del>if ( ! function_exists('resource')) <del>{ <del> /** <del> * Route a resource to a controller. <del> * <del> * @param string $name <del> * @param string $controller <del> * @param array $options <del> * @return void <del> */ <del> function resource($name, $controller, array $options = []) <del> { <del> return app('router')->resource($name, $controller, $options); <del> } <del>} <del> <ide> if ( ! function_exists('info')) <ide> { <ide> /** <ide> function redirect($to = null, $status = 302, $headers = array(), $secure = null) <ide> } <ide> } <ide> <add>if ( ! function_exists('resource')) <add>{ <add> /** <add> * Route a resource to a controller. <add> * <add> * @param string $name <add> * @param string $controller <add> * @param array $options <add> * @return void <add> */ <add> function resource($name, $controller, array $options = []) <add> { <add> return app('router')->resource($name, $controller, $options); <add> } <add>} <add> <ide> if ( ! function_exists('response')) <ide> { <ide> /**
1
Ruby
Ruby
add a global flag to silent warning when auditing
d45ff9c0fdfa834c55c01f9c95fe18064fabd76a
<ide><path>Library/Homebrew/dev-cmd/audit.rb <ide> module Homebrew <ide> <ide> def audit <ide> Homebrew.inject_dump_stats!(FormulaAuditor, /^audit_/) if ARGV.switch? "D" <add> Homebrew.auditing = true <ide> <ide> formula_count = 0 <ide> problem_count = 0 <ide><path>Library/Homebrew/global.rb <ide> def failed? <ide> @failed == true <ide> end <ide> <del> attr_writer :raise_deprecation_exceptions <add> attr_writer :raise_deprecation_exceptions, :auditing <ide> <ide> def raise_deprecation_exceptions? <ide> @raise_deprecation_exceptions == true <ide> end <add> <add> def auditing? <add> @auditing == true <add> end <ide> end <ide> end <ide> <ide><path>Library/Homebrew/utils.rb <ide> def odeprecated(method, replacement = nil, disable: false, disable_on: nil, call <ide> if ARGV.homebrew_developer? || disable || <ide> Homebrew.raise_deprecation_exceptions? <ide> raise MethodDeprecatedError, message <del> else <add> elsif !Homebrew.auditing? <ide> opoo "#{message}\n" <ide> end <ide> end
3
Ruby
Ruby
add documentation for after_touch [ci skip]
72a3dd85e415c9ec0e51d4b812b7264d4f54c75b
<ide><path>activerecord/lib/active_record/callbacks.rb <ide> module ActiveRecord <ide> # Check out <tt>ActiveRecord::Transactions</tt> for more details about <tt>after_commit</tt> and <ide> # <tt>after_rollback</tt>. <ide> # <add> # Additionally, an <tt>after_touch</tt> callback is triggered whenever an <add> # object is touched. <add> # <ide> # Lastly an <tt>after_find</tt> and <tt>after_initialize</tt> callback is triggered for each object that <ide> # is found and instantiated by a finder, with <tt>after_initialize</tt> being triggered after new objects <ide> # are instantiated as well. <ide> # <del> # That's a total of twelve callbacks, which gives you immense power to react and prepare for each state in the <add> # There are nineteen callbacks in total, which give you immense power to react and prepare for each state in the <ide> # Active Record life cycle. The sequence for calling <tt>Base#save</tt> for an existing record is similar, <ide> # except that each <tt>_create</tt> callback is replaced by the corresponding <tt>_update</tt> callback. <ide> # <ide><path>activerecord/lib/active_record/persistence.rb <ide> def reload(options = nil) <ide> end <ide> <ide> # Saves the record with the updated_at/on attributes set to the current time. <del> # Please note that no validation is performed and no callbacks are executed. <add> # Please note that no validation is performed and only the +after_touch+ <add> # callback is executed. <ide> # If an attribute name is passed, that attribute is updated along with <ide> # updated_at/on attributes. <ide> #
2
Text
Text
fix pluralization in doc [ci skip]
f5d6aaa31bc86e01209af767d6a3c7b5617e6911
<ide><path>guides/source/active_record_migrations.md <ide> column names can not be derived from the table names, you can use the <ide> `:column` and `:primary_key` options. <ide> <ide> Rails will generate a name for every foreign key starting with <del>`fk_rails_` followed by 10 character which is deterministically <add>`fk_rails_` followed by 10 characters which are deterministically <ide> generated from the `from_table` and `column`. <ide> There is a `:name` option to specify a different name if needed. <ide>
1
Java
Java
add subjects for single, maybe and completable
d173b6d4f8713907f7a38e00da65c30abfb4a7e2
<ide><path>src/main/java/io/reactivex/subjects/CompletableSubject.java <add>/** <add> * Copyright 2016 Netflix, Inc. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in <add> * compliance with the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software distributed under the License is <add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See <add> * the License for the specific language governing permissions and limitations under the License. <add> */ <add> <add>package io.reactivex.subjects; <add> <add>import java.util.concurrent.atomic.*; <add> <add>import io.reactivex.*; <add>import io.reactivex.annotations.*; <add>import io.reactivex.disposables.Disposable; <add>import io.reactivex.plugins.RxJavaPlugins; <add> <add>/** <add> * Represents a hot Completable-like source and consumer of events similar to Subjects. <add> * <p> <add> * All methods are thread safe. Calling onComplete multiple <add> * times has no effect. Calling onError multiple times relays the Throwable to <add> * the RxJavaPlugins' error handler. <add> * <p> <add> * The CompletableSubject doesn't store the Disposables coming through onSubscribe but <add> * disposes them once the other onXXX methods were called (terminal state reached). <add> * @since 2.0.5 - experimental <add> */ <add>@Experimental <add>public final class CompletableSubject extends Completable implements CompletableObserver { <add> <add> final AtomicReference<CompletableDisposable[]> observers; <add> <add> static final CompletableDisposable[] EMPTY = new CompletableDisposable[0]; <add> <add> static final CompletableDisposable[] TERMINATED = new CompletableDisposable[0]; <add> <add> final AtomicBoolean once; <add> Throwable error; <add> <add> /** <add> * Creates a fresh CompletableSubject. <add> * @return the new CompletableSubject instance <add> */ <add> @CheckReturnValue <add> public static CompletableSubject create() { <add> return new CompletableSubject(); <add> } <add> <add> CompletableSubject() { <add> once = new AtomicBoolean(); <add> observers = new AtomicReference<CompletableDisposable[]>(EMPTY); <add> } <add> <add> @Override <add> public void onSubscribe(Disposable d) { <add> if (observers.get() == TERMINATED) { <add> d.dispose(); <add> } <add> } <add> <add> @Override <add> public void onError(Throwable e) { <add> if (e == null) { <add> e = new NullPointerException("Null errors are not allowed in 2.x"); <add> } <add> if (once.compareAndSet(false, true)) { <add> this.error = e; <add> for (CompletableDisposable md : observers.getAndSet(TERMINATED)) { <add> md.actual.onError(e); <add> } <add> } else { <add> RxJavaPlugins.onError(e); <add> } <add> } <add> <add> @Override <add> public void onComplete() { <add> if (once.compareAndSet(false, true)) { <add> for (CompletableDisposable md : observers.getAndSet(TERMINATED)) { <add> md.actual.onComplete(); <add> } <add> } <add> } <add> <add> @Override <add> protected void subscribeActual(CompletableObserver observer) { <add> CompletableDisposable md = new CompletableDisposable(observer, this); <add> observer.onSubscribe(md); <add> if (add(md)) { <add> if (md.isDisposed()) { <add> remove(md); <add> } <add> } else { <add> Throwable ex = error; <add> if (ex != null) { <add> observer.onError(ex); <add> } else { <add> observer.onComplete(); <add> } <add> } <add> } <add> <add> boolean add(CompletableDisposable inner) { <add> for (;;) { <add> CompletableDisposable[] a = observers.get(); <add> if (a == TERMINATED) { <add> return false; <add> } <add> <add> int n = a.length; <add> <add> CompletableDisposable[] b = new CompletableDisposable[n + 1]; <add> System.arraycopy(a, 0, b, 0, n); <add> b[n] = inner; <add> if (observers.compareAndSet(a, b)) { <add> return true; <add> } <add> } <add> } <add> <add> void remove(CompletableDisposable inner) { <add> for (;;) { <add> CompletableDisposable[] a = observers.get(); <add> int n = a.length; <add> if (n == 0) { <add> return; <add> } <add> <add> int j = -1; <add> <add> for (int i = 0; i < n; i++) { <add> if (a[i] == inner) { <add> j = i; <add> break; <add> } <add> } <add> <add> if (j < 0) { <add> return; <add> } <add> CompletableDisposable[] b; <add> if (n == 1) { <add> b = EMPTY; <add> } else { <add> b = new CompletableDisposable[n - 1]; <add> System.arraycopy(a, 0, b, 0, j); <add> System.arraycopy(a, j + 1, b, j, n - j - 1); <add> } <add> <add> if (observers.compareAndSet(a, b)) { <add> return; <add> } <add> } <add> } <add> <add> /** <add> * Returns the terminal error if this CompletableSubject has been terminated with an error, null otherwise. <add> * @return the terminal error or null if not terminated or not with an error <add> */ <add> public Throwable getThrowable() { <add> if (observers.get() == TERMINATED) { <add> return error; <add> } <add> return null; <add> } <add> <add> /** <add> * Returns true if this CompletableSubject has been terminated with an error. <add> * @return true if this CompletableSubject has been terminated with an error <add> */ <add> public boolean hasThrowable() { <add> return observers.get() == TERMINATED && error != null; <add> } <add> <add> /** <add> * Returns true if this CompletableSubject has been completed. <add> * @return true if this CompletableSubject has been completed <add> */ <add> public boolean hasComplete() { <add> return observers.get() == TERMINATED && error == null; <add> } <add> <add> /** <add> * Returns true if this CompletableSubject has observers. <add> * @return true if this CompletableSubject has observers <add> */ <add> public boolean hasObservers() { <add> return observers.get().length != 0; <add> } <add> <add> /** <add> * Returns the number of current observers. <add> * @return the number of current observers <add> */ <add> /* test */ int observerCount() { <add> return observers.get().length; <add> } <add> <add> static final class CompletableDisposable <add> extends AtomicReference<CompletableSubject> implements Disposable { <add> private static final long serialVersionUID = -7650903191002190468L; <add> <add> final CompletableObserver actual; <add> <add> CompletableDisposable(CompletableObserver actual, CompletableSubject parent) { <add> this.actual = actual; <add> lazySet(parent); <add> } <add> <add> @Override <add> public void dispose() { <add> CompletableSubject parent = getAndSet(null); <add> if (parent != null) { <add> parent.remove(this); <add> } <add> } <add> <add> @Override <add> public boolean isDisposed() { <add> return get() == null; <add> } <add> } <add>} <ide><path>src/main/java/io/reactivex/subjects/MaybeSubject.java <add>/** <add> * Copyright 2016 Netflix, Inc. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in <add> * compliance with the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software distributed under the License is <add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See <add> * the License for the specific language governing permissions and limitations under the License. <add> */ <add> <add>package io.reactivex.subjects; <add> <add>import java.util.concurrent.atomic.*; <add> <add>import io.reactivex.*; <add>import io.reactivex.annotations.*; <add>import io.reactivex.disposables.Disposable; <add>import io.reactivex.plugins.RxJavaPlugins; <add> <add>/** <add> * Represents a hot Maybe-like source and consumer of events similar to Subjects. <add> * <p> <add> * All methods are thread safe. Calling onSuccess or onComplete multiple <add> * times has no effect. Calling onError multiple times relays the Throwable to <add> * the RxJavaPlugins' error handler. <add> * <p> <add> * The MaybeSubject doesn't store the Disposables coming through onSubscribe but <add> * disposes them once the other onXXX methods were called (terminal state reached). <add> * @param <T> the value type received and emitted <add> * @since 2.0.5 - experimental <add> */ <add>@Experimental <add>public final class MaybeSubject<T> extends Maybe<T> implements MaybeObserver<T> { <add> <add> final AtomicReference<MaybeDisposable<T>[]> observers; <add> <add> @SuppressWarnings("rawtypes") <add> static final MaybeDisposable[] EMPTY = new MaybeDisposable[0]; <add> <add> @SuppressWarnings("rawtypes") <add> static final MaybeDisposable[] TERMINATED = new MaybeDisposable[0]; <add> <add> final AtomicBoolean once; <add> T value; <add> Throwable error; <add> <add> /** <add> * Creates a fresh MaybeSubject. <add> * @param <T> the value type received and emitted <add> * @return the new MaybeSubject instance <add> */ <add> @CheckReturnValue <add> public static <T> MaybeSubject<T> create() { <add> return new MaybeSubject<T>(); <add> } <add> <add> @SuppressWarnings("unchecked") <add> MaybeSubject() { <add> once = new AtomicBoolean(); <add> observers = new AtomicReference<MaybeDisposable<T>[]>(EMPTY); <add> } <add> <add> @Override <add> public void onSubscribe(Disposable d) { <add> if (observers.get() == TERMINATED) { <add> d.dispose(); <add> } <add> } <add> <add> @SuppressWarnings("unchecked") <add> @Override <add> public void onSuccess(T value) { <add> if (value == null) { <add> onError(new NullPointerException("Null values are not allowed in 2.x")); <add> return; <add> } <add> if (once.compareAndSet(false, true)) { <add> this.value = value; <add> for (MaybeDisposable<T> md : observers.getAndSet(TERMINATED)) { <add> md.actual.onSuccess(value); <add> } <add> } <add> } <add> <add> @SuppressWarnings("unchecked") <add> @Override <add> public void onError(Throwable e) { <add> if (e == null) { <add> e = new NullPointerException("Null errors are not allowed in 2.x"); <add> } <add> if (once.compareAndSet(false, true)) { <add> this.error = e; <add> for (MaybeDisposable<T> md : observers.getAndSet(TERMINATED)) { <add> md.actual.onError(e); <add> } <add> } else { <add> RxJavaPlugins.onError(e); <add> } <add> } <add> <add> @SuppressWarnings("unchecked") <add> @Override <add> public void onComplete() { <add> if (once.compareAndSet(false, true)) { <add> for (MaybeDisposable<T> md : observers.getAndSet(TERMINATED)) { <add> md.actual.onComplete(); <add> } <add> } <add> } <add> <add> @Override <add> protected void subscribeActual(MaybeObserver<? super T> observer) { <add> MaybeDisposable<T> md = new MaybeDisposable<T>(observer, this); <add> observer.onSubscribe(md); <add> if (add(md)) { <add> if (md.isDisposed()) { <add> remove(md); <add> } <add> } else { <add> Throwable ex = error; <add> if (ex != null) { <add> observer.onError(ex); <add> } else { <add> T v = value; <add> if (v == null) { <add> observer.onComplete(); <add> } else { <add> observer.onSuccess(v); <add> } <add> } <add> } <add> } <add> <add> boolean add(MaybeDisposable<T> inner) { <add> for (;;) { <add> MaybeDisposable<T>[] a = observers.get(); <add> if (a == TERMINATED) { <add> return false; <add> } <add> <add> int n = a.length; <add> @SuppressWarnings("unchecked") <add> MaybeDisposable<T>[] b = new MaybeDisposable[n + 1]; <add> System.arraycopy(a, 0, b, 0, n); <add> b[n] = inner; <add> if (observers.compareAndSet(a, b)) { <add> return true; <add> } <add> } <add> } <add> <add> @SuppressWarnings("unchecked") <add> void remove(MaybeDisposable<T> inner) { <add> for (;;) { <add> MaybeDisposable<T>[] a = observers.get(); <add> int n = a.length; <add> if (n == 0) { <add> return; <add> } <add> <add> int j = -1; <add> <add> for (int i = 0; i < n; i++) { <add> if (a[i] == inner) { <add> j = i; <add> break; <add> } <add> } <add> <add> if (j < 0) { <add> return; <add> } <add> MaybeDisposable<T>[] b; <add> if (n == 1) { <add> b = EMPTY; <add> } else { <add> b = new MaybeDisposable[n - 1]; <add> System.arraycopy(a, 0, b, 0, j); <add> System.arraycopy(a, j + 1, b, j, n - j - 1); <add> } <add> <add> if (observers.compareAndSet(a, b)) { <add> return; <add> } <add> } <add> } <add> <add> /** <add> * Returns the success value if this MaybeSubject was terminated with a success value. <add> * @return the success value or null <add> */ <add> public T getValue() { <add> if (observers.get() == TERMINATED) { <add> return value; <add> } <add> return null; <add> } <add> <add> /** <add> * Returns true if this MaybeSubject was terminated with a success value. <add> * @return true if this MaybeSubject was terminated with a success value <add> */ <add> public boolean hasValue() { <add> return observers.get() == TERMINATED && value != null; <add> } <add> <add> /** <add> * Returns the terminal error if this MaybeSubject has been terminated with an error, null otherwise. <add> * @return the terminal error or null if not terminated or not with an error <add> */ <add> public Throwable getThrowable() { <add> if (observers.get() == TERMINATED) { <add> return error; <add> } <add> return null; <add> } <add> <add> /** <add> * Returns true if this MaybeSubject has been terminated with an error. <add> * @return true if this MaybeSubject has been terminated with an error <add> */ <add> public boolean hasThrowable() { <add> return observers.get() == TERMINATED && error != null; <add> } <add> <add> /** <add> * Returns true if this MaybeSubject has been completed. <add> * @return true if this MaybeSubject has been completed <add> */ <add> public boolean hasComplete() { <add> return observers.get() == TERMINATED && value == null && error == null; <add> } <add> <add> /** <add> * Returns true if this MaybeSubject has observers. <add> * @return true if this MaybeSubject has observers <add> */ <add> public boolean hasObservers() { <add> return observers.get().length != 0; <add> } <add> <add> /** <add> * Returns the number of current observers. <add> * @return the number of current observers <add> */ <add> /* test */ int observerCount() { <add> return observers.get().length; <add> } <add> <add> static final class MaybeDisposable<T> <add> extends AtomicReference<MaybeSubject<T>> implements Disposable { <add> private static final long serialVersionUID = -7650903191002190468L; <add> <add> final MaybeObserver<? super T> actual; <add> <add> MaybeDisposable(MaybeObserver<? super T> actual, MaybeSubject<T> parent) { <add> this.actual = actual; <add> lazySet(parent); <add> } <add> <add> @Override <add> public void dispose() { <add> MaybeSubject<T> parent = getAndSet(null); <add> if (parent != null) { <add> parent.remove(this); <add> } <add> } <add> <add> @Override <add> public boolean isDisposed() { <add> return get() == null; <add> } <add> } <add>} <ide><path>src/main/java/io/reactivex/subjects/SingleSubject.java <add>/** <add> * Copyright 2016 Netflix, Inc. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in <add> * compliance with the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software distributed under the License is <add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See <add> * the License for the specific language governing permissions and limitations under the License. <add> */ <add> <add>package io.reactivex.subjects; <add> <add>import java.util.concurrent.atomic.*; <add> <add>import io.reactivex.*; <add>import io.reactivex.annotations.*; <add>import io.reactivex.disposables.Disposable; <add>import io.reactivex.plugins.RxJavaPlugins; <add> <add>/** <add> * Represents a hot Single-like source and consumer of events similar to Subjects. <add> * <p> <add> * All methods are thread safe. Calling onSuccess multiple <add> * times has no effect. Calling onError multiple times relays the Throwable to <add> * the RxJavaPlugins' error handler. <add> * <p> <add> * The SingleSubject doesn't store the Disposables coming through onSubscribe but <add> * disposes them once the other onXXX methods were called (terminal state reached). <add> * @param <T> the value type received and emitted <add> * @since 2.0.5 - experimental <add> */ <add>@Experimental <add>public final class SingleSubject<T> extends Single<T> implements SingleObserver<T> { <add> <add> final AtomicReference<SingleDisposable<T>[]> observers; <add> <add> @SuppressWarnings("rawtypes") <add> static final SingleDisposable[] EMPTY = new SingleDisposable[0]; <add> <add> @SuppressWarnings("rawtypes") <add> static final SingleDisposable[] TERMINATED = new SingleDisposable[0]; <add> <add> final AtomicBoolean once; <add> T value; <add> Throwable error; <add> <add> /** <add> * Creates a fresh SingleSubject. <add> * @param <T> the value type received and emitted <add> * @return the new SingleSubject instance <add> */ <add> @CheckReturnValue <add> public static <T> SingleSubject<T> create() { <add> return new SingleSubject<T>(); <add> } <add> <add> @SuppressWarnings("unchecked") <add> SingleSubject() { <add> once = new AtomicBoolean(); <add> observers = new AtomicReference<SingleDisposable<T>[]>(EMPTY); <add> } <add> <add> @Override <add> public void onSubscribe(Disposable d) { <add> if (observers.get() == TERMINATED) { <add> d.dispose(); <add> } <add> } <add> <add> @SuppressWarnings("unchecked") <add> @Override <add> public void onSuccess(T value) { <add> if (value == null) { <add> onError(new NullPointerException("Null values are not allowed in 2.x")); <add> return; <add> } <add> if (once.compareAndSet(false, true)) { <add> this.value = value; <add> for (SingleDisposable<T> md : observers.getAndSet(TERMINATED)) { <add> md.actual.onSuccess(value); <add> } <add> } <add> } <add> <add> @SuppressWarnings("unchecked") <add> @Override <add> public void onError(Throwable e) { <add> if (e == null) { <add> e = new NullPointerException("Null errors are not allowed in 2.x"); <add> } <add> if (once.compareAndSet(false, true)) { <add> this.error = e; <add> for (SingleDisposable<T> md : observers.getAndSet(TERMINATED)) { <add> md.actual.onError(e); <add> } <add> } else { <add> RxJavaPlugins.onError(e); <add> } <add> } <add> <add> @Override <add> protected void subscribeActual(SingleObserver<? super T> observer) { <add> SingleDisposable<T> md = new SingleDisposable<T>(observer, this); <add> observer.onSubscribe(md); <add> if (add(md)) { <add> if (md.isDisposed()) { <add> remove(md); <add> } <add> } else { <add> Throwable ex = error; <add> if (ex != null) { <add> observer.onError(ex); <add> } else { <add> observer.onSuccess(value); <add> } <add> } <add> } <add> <add> boolean add(SingleDisposable<T> inner) { <add> for (;;) { <add> SingleDisposable<T>[] a = observers.get(); <add> if (a == TERMINATED) { <add> return false; <add> } <add> <add> int n = a.length; <add> @SuppressWarnings("unchecked") <add> SingleDisposable<T>[] b = new SingleDisposable[n + 1]; <add> System.arraycopy(a, 0, b, 0, n); <add> b[n] = inner; <add> if (observers.compareAndSet(a, b)) { <add> return true; <add> } <add> } <add> } <add> <add> @SuppressWarnings("unchecked") <add> void remove(SingleDisposable<T> inner) { <add> for (;;) { <add> SingleDisposable<T>[] a = observers.get(); <add> int n = a.length; <add> if (n == 0) { <add> return; <add> } <add> <add> int j = -1; <add> <add> for (int i = 0; i < n; i++) { <add> if (a[i] == inner) { <add> j = i; <add> break; <add> } <add> } <add> <add> if (j < 0) { <add> return; <add> } <add> SingleDisposable<T>[] b; <add> if (n == 1) { <add> b = EMPTY; <add> } else { <add> b = new SingleDisposable[n - 1]; <add> System.arraycopy(a, 0, b, 0, j); <add> System.arraycopy(a, j + 1, b, j, n - j - 1); <add> } <add> <add> if (observers.compareAndSet(a, b)) { <add> return; <add> } <add> } <add> } <add> <add> /** <add> * Returns the success value if this SingleSubject was terminated with a success value. <add> * @return the success value or null <add> */ <add> public T getValue() { <add> if (observers.get() == TERMINATED) { <add> return value; <add> } <add> return null; <add> } <add> <add> /** <add> * Returns true if this SingleSubject was terminated with a success value. <add> * @return true if this SingleSubject was terminated with a success value <add> */ <add> public boolean hasValue() { <add> return observers.get() == TERMINATED && value != null; <add> } <add> <add> /** <add> * Returns the terminal error if this SingleSubject has been terminated with an error, null otherwise. <add> * @return the terminal error or null if not terminated or not with an error <add> */ <add> public Throwable getThrowable() { <add> if (observers.get() == TERMINATED) { <add> return error; <add> } <add> return null; <add> } <add> <add> /** <add> * Returns true if this SingleSubject has been terminated with an error. <add> * @return true if this SingleSubject has been terminated with an error <add> */ <add> public boolean hasThrowable() { <add> return observers.get() == TERMINATED && error != null; <add> } <add> <add> /** <add> * Returns true if this SingleSubject has observers. <add> * @return true if this SingleSubject has observers <add> */ <add> public boolean hasObservers() { <add> return observers.get().length != 0; <add> } <add> <add> /** <add> * Returns the number of current observers. <add> * @return the number of current observers <add> */ <add> /* test */ int observerCount() { <add> return observers.get().length; <add> } <add> <add> static final class SingleDisposable<T> <add> extends AtomicReference<SingleSubject<T>> implements Disposable { <add> private static final long serialVersionUID = -7650903191002190468L; <add> <add> final SingleObserver<? super T> actual; <add> <add> SingleDisposable(SingleObserver<? super T> actual, SingleSubject<T> parent) { <add> this.actual = actual; <add> lazySet(parent); <add> } <add> <add> @Override <add> public void dispose() { <add> SingleSubject<T> parent = getAndSet(null); <add> if (parent != null) { <add> parent.remove(this); <add> } <add> } <add> <add> @Override <add> public boolean isDisposed() { <add> return get() == null; <add> } <add> } <add>} <ide><path>src/test/java/io/reactivex/subjects/CompletableSubjectTest.java <add>/** <add> * Copyright 2016 Netflix, Inc. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in <add> * compliance with the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software distributed under the License is <add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See <add> * the License for the specific language governing permissions and limitations under the License. <add> */ <add> <add>package io.reactivex.subjects; <add> <add>import static org.junit.Assert.*; <add> <add>import java.io.IOException; <add>import java.util.List; <add> <add>import org.junit.Test; <add> <add>import io.reactivex.*; <add>import io.reactivex.disposables.*; <add>import io.reactivex.observers.TestObserver; <add>import io.reactivex.plugins.RxJavaPlugins; <add>import io.reactivex.schedulers.Schedulers; <add> <add>public class CompletableSubjectTest { <add> <add> @Test <add> public void once() { <add> CompletableSubject cs = CompletableSubject.create(); <add> <add> TestObserver<Void> to = cs.test(); <add> <add> cs.onComplete(); <add> <add> List<Throwable> errors = TestHelper.trackPluginErrors(); <add> try { <add> cs.onError(new IOException()); <add> <add> TestHelper.assertError(errors, 0, IOException.class); <add> } finally { <add> RxJavaPlugins.reset(); <add> } <add> cs.onComplete(); <add> <add> to.assertResult(); <add> } <add> <add> @Test <add> public void error() { <add> CompletableSubject cs = CompletableSubject.create(); <add> <add> assertFalse(cs.hasComplete()); <add> assertFalse(cs.hasThrowable()); <add> assertNull(cs.getThrowable()); <add> assertFalse(cs.hasObservers()); <add> assertEquals(0, cs.observerCount()); <add> <add> TestObserver<Void> to = cs.test(); <add> <add> to.assertEmpty(); <add> <add> assertTrue(cs.hasObservers()); <add> assertEquals(1, cs.observerCount()); <add> <add> cs.onError(new IOException()); <add> <add> assertFalse(cs.hasComplete()); <add> assertTrue(cs.hasThrowable()); <add> assertTrue(cs.getThrowable().toString(), cs.getThrowable() instanceof IOException); <add> assertFalse(cs.hasObservers()); <add> assertEquals(0, cs.observerCount()); <add> <add> to.assertFailure(IOException.class); <add> <add> cs.test().assertFailure(IOException.class); <add> <add> assertFalse(cs.hasComplete()); <add> assertTrue(cs.hasThrowable()); <add> assertTrue(cs.getThrowable().toString(), cs.getThrowable() instanceof IOException); <add> assertFalse(cs.hasObservers()); <add> assertEquals(0, cs.observerCount()); <add> } <add> <add> @Test <add> public void complete() { <add> CompletableSubject cs = CompletableSubject.create(); <add> <add> assertFalse(cs.hasComplete()); <add> assertFalse(cs.hasThrowable()); <add> assertNull(cs.getThrowable()); <add> assertFalse(cs.hasObservers()); <add> assertEquals(0, cs.observerCount()); <add> <add> TestObserver<Void> to = cs.test(); <add> <add> to.assertEmpty(); <add> <add> assertTrue(cs.hasObservers()); <add> assertEquals(1, cs.observerCount()); <add> <add> cs.onComplete(); <add> <add> assertTrue(cs.hasComplete()); <add> assertFalse(cs.hasThrowable()); <add> assertNull(cs.getThrowable()); <add> assertFalse(cs.hasObservers()); <add> assertEquals(0, cs.observerCount()); <add> <add> to.assertResult(); <add> <add> cs.test().assertResult(); <add> <add> assertTrue(cs.hasComplete()); <add> assertFalse(cs.hasThrowable()); <add> assertNull(cs.getThrowable()); <add> assertFalse(cs.hasObservers()); <add> assertEquals(0, cs.observerCount()); <add> } <add> <add> @Test <add> public void nullThrowable() { <add> CompletableSubject cs = CompletableSubject.create(); <add> <add> TestObserver<Void> to = cs.test(); <add> <add> cs.onError(null); <add> <add> to.assertFailure(NullPointerException.class); <add> } <add> <add> @Test <add> public void cancelOnArrival() { <add> CompletableSubject.create() <add> .test(true) <add> .assertEmpty(); <add> } <add> <add> @Test <add> public void cancelOnArrival2() { <add> CompletableSubject cs = CompletableSubject.create(); <add> <add> cs.test(); <add> <add> cs <add> .test(true) <add> .assertEmpty(); <add> } <add> <add> @Test <add> public void dispose() { <add> TestHelper.checkDisposed(CompletableSubject.create()); <add> } <add> <add> @Test <add> public void disposeTwice() { <add> CompletableSubject.create() <add> .subscribe(new CompletableObserver() { <add> @Override <add> public void onSubscribe(Disposable d) { <add> assertFalse(d.isDisposed()); <add> <add> d.dispose(); <add> d.dispose(); <add> <add> assertTrue(d.isDisposed()); <add> } <add> <add> @Override <add> public void onError(Throwable e) { <add> <add> } <add> <add> @Override <add> public void onComplete() { <add> <add> } <add> }); <add> } <add> <add> @Test <add> public void onSubscribeDispose() { <add> CompletableSubject cs = CompletableSubject.create(); <add> <add> Disposable d = Disposables.empty(); <add> <add> cs.onSubscribe(d); <add> <add> assertFalse(d.isDisposed()); <add> <add> cs.onComplete(); <add> <add> d = Disposables.empty(); <add> <add> cs.onSubscribe(d); <add> <add> assertTrue(d.isDisposed()); <add> } <add> <add> @Test <add> public void addRemoveRace() { <add> for (int i = 0; i < 500; i++) { <add> final CompletableSubject cs = CompletableSubject.create(); <add> <add> final TestObserver<Void> to = cs.test(); <add> <add> Runnable r1 = new Runnable() { <add> @Override <add> public void run() { <add> cs.test(); <add> } <add> }; <add> <add> Runnable r2 = new Runnable() { <add> @Override <add> public void run() { <add> to.cancel(); <add> } <add> }; <add> TestHelper.race(r1, r2, Schedulers.single()); <add> } <add> } <add>} <ide><path>src/test/java/io/reactivex/subjects/MaybeSubjectTest.java <add>/** <add> * Copyright 2016 Netflix, Inc. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in <add> * compliance with the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software distributed under the License is <add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See <add> * the License for the specific language governing permissions and limitations under the License. <add> */ <add> <add>package io.reactivex.subjects; <add> <add>import static org.junit.Assert.*; <add> <add>import java.io.IOException; <add>import java.util.List; <add> <add>import org.junit.Test; <add> <add>import io.reactivex.*; <add>import io.reactivex.disposables.*; <add>import io.reactivex.observers.TestObserver; <add>import io.reactivex.plugins.RxJavaPlugins; <add>import io.reactivex.schedulers.Schedulers; <add> <add>public class MaybeSubjectTest { <add> <add> @Test <add> public void success() { <add> MaybeSubject<Integer> ms = MaybeSubject.create(); <add> <add> assertFalse(ms.hasValue()); <add> assertNull(ms.getValue()); <add> assertFalse(ms.hasComplete()); <add> assertFalse(ms.hasThrowable()); <add> assertNull(ms.getThrowable()); <add> assertFalse(ms.hasObservers()); <add> assertEquals(0, ms.observerCount()); <add> <add> TestObserver<Integer> to = ms.test(); <add> <add> to.assertEmpty(); <add> <add> assertTrue(ms.hasObservers()); <add> assertEquals(1, ms.observerCount()); <add> <add> ms.onSuccess(1); <add> <add> assertTrue(ms.hasValue()); <add> assertEquals(1, ms.getValue().intValue()); <add> assertFalse(ms.hasComplete()); <add> assertFalse(ms.hasThrowable()); <add> assertNull(ms.getThrowable()); <add> assertFalse(ms.hasObservers()); <add> assertEquals(0, ms.observerCount()); <add> <add> to.assertResult(1); <add> <add> ms.test().assertResult(1); <add> <add> assertTrue(ms.hasValue()); <add> assertEquals(1, ms.getValue().intValue()); <add> assertFalse(ms.hasComplete()); <add> assertFalse(ms.hasThrowable()); <add> assertNull(ms.getThrowable()); <add> assertFalse(ms.hasObservers()); <add> assertEquals(0, ms.observerCount()); <add> } <add> <add> @Test <add> public void once() { <add> MaybeSubject<Integer> ms = MaybeSubject.create(); <add> <add> TestObserver<Integer> to = ms.test(); <add> <add> ms.onSuccess(1); <add> ms.onSuccess(2); <add> <add> List<Throwable> errors = TestHelper.trackPluginErrors(); <add> try { <add> ms.onError(new IOException()); <add> <add> TestHelper.assertError(errors, 0, IOException.class); <add> } finally { <add> RxJavaPlugins.reset(); <add> } <add> ms.onComplete(); <add> <add> to.assertResult(1); <add> } <add> <add> @Test <add> public void error() { <add> MaybeSubject<Integer> ms = MaybeSubject.create(); <add> <add> assertFalse(ms.hasValue()); <add> assertNull(ms.getValue()); <add> assertFalse(ms.hasComplete()); <add> assertFalse(ms.hasThrowable()); <add> assertNull(ms.getThrowable()); <add> assertFalse(ms.hasObservers()); <add> assertEquals(0, ms.observerCount()); <add> <add> TestObserver<Integer> to = ms.test(); <add> <add> to.assertEmpty(); <add> <add> assertTrue(ms.hasObservers()); <add> assertEquals(1, ms.observerCount()); <add> <add> ms.onError(new IOException()); <add> <add> assertFalse(ms.hasValue()); <add> assertNull(ms.getValue()); <add> assertFalse(ms.hasComplete()); <add> assertTrue(ms.hasThrowable()); <add> assertTrue(ms.getThrowable().toString(), ms.getThrowable() instanceof IOException); <add> assertFalse(ms.hasObservers()); <add> assertEquals(0, ms.observerCount()); <add> <add> to.assertFailure(IOException.class); <add> <add> ms.test().assertFailure(IOException.class); <add> <add> assertFalse(ms.hasValue()); <add> assertNull(ms.getValue()); <add> assertFalse(ms.hasComplete()); <add> assertTrue(ms.hasThrowable()); <add> assertTrue(ms.getThrowable().toString(), ms.getThrowable() instanceof IOException); <add> assertFalse(ms.hasObservers()); <add> assertEquals(0, ms.observerCount()); <add> } <add> <add> @Test <add> public void complete() { <add> MaybeSubject<Integer> ms = MaybeSubject.create(); <add> <add> assertFalse(ms.hasValue()); <add> assertNull(ms.getValue()); <add> assertFalse(ms.hasComplete()); <add> assertFalse(ms.hasThrowable()); <add> assertNull(ms.getThrowable()); <add> assertFalse(ms.hasObservers()); <add> assertEquals(0, ms.observerCount()); <add> <add> TestObserver<Integer> to = ms.test(); <add> <add> to.assertEmpty(); <add> <add> assertTrue(ms.hasObservers()); <add> assertEquals(1, ms.observerCount()); <add> <add> ms.onComplete(); <add> <add> assertFalse(ms.hasValue()); <add> assertNull(ms.getValue()); <add> assertTrue(ms.hasComplete()); <add> assertFalse(ms.hasThrowable()); <add> assertNull(ms.getThrowable()); <add> assertFalse(ms.hasObservers()); <add> assertEquals(0, ms.observerCount()); <add> <add> to.assertResult(); <add> <add> ms.test().assertResult(); <add> <add> assertFalse(ms.hasValue()); <add> assertNull(ms.getValue()); <add> assertTrue(ms.hasComplete()); <add> assertFalse(ms.hasThrowable()); <add> assertNull(ms.getThrowable()); <add> assertFalse(ms.hasObservers()); <add> assertEquals(0, ms.observerCount()); <add> } <add> <add> @Test <add> public void nullValue() { <add> MaybeSubject<Integer> ms = MaybeSubject.create(); <add> <add> TestObserver<Integer> to = ms.test(); <add> <add> ms.onSuccess(null); <add> <add> to.assertFailure(NullPointerException.class); <add> } <add> <add> @Test <add> public void nullThrowable() { <add> MaybeSubject<Integer> ms = MaybeSubject.create(); <add> <add> TestObserver<Integer> to = ms.test(); <add> <add> ms.onError(null); <add> <add> to.assertFailure(NullPointerException.class); <add> } <add> <add> @Test <add> public void cancelOnArrival() { <add> MaybeSubject.create() <add> .test(true) <add> .assertEmpty(); <add> } <add> <add> @Test <add> public void cancelOnArrival2() { <add> MaybeSubject<Integer> ms = MaybeSubject.create(); <add> <add> ms.test(); <add> <add> ms <add> .test(true) <add> .assertEmpty(); <add> } <add> <add> @Test <add> public void dispose() { <add> TestHelper.checkDisposed(MaybeSubject.create()); <add> } <add> <add> @Test <add> public void disposeTwice() { <add> MaybeSubject.create() <add> .subscribe(new MaybeObserver<Object>() { <add> @Override <add> public void onSubscribe(Disposable d) { <add> assertFalse(d.isDisposed()); <add> <add> d.dispose(); <add> d.dispose(); <add> <add> assertTrue(d.isDisposed()); <add> } <add> <add> @Override <add> public void onSuccess(Object value) { <add> <add> } <add> <add> @Override <add> public void onError(Throwable e) { <add> <add> } <add> <add> @Override <add> public void onComplete() { <add> <add> } <add> }); <add> } <add> <add> @Test <add> public void onSubscribeDispose() { <add> MaybeSubject<Integer> ms = MaybeSubject.create(); <add> <add> Disposable d = Disposables.empty(); <add> <add> ms.onSubscribe(d); <add> <add> assertFalse(d.isDisposed()); <add> <add> ms.onComplete(); <add> <add> d = Disposables.empty(); <add> <add> ms.onSubscribe(d); <add> <add> assertTrue(d.isDisposed()); <add> } <add> <add> @Test <add> public void addRemoveRace() { <add> for (int i = 0; i < 500; i++) { <add> final MaybeSubject<Integer> ms = MaybeSubject.create(); <add> <add> final TestObserver<Integer> to = ms.test(); <add> <add> Runnable r1 = new Runnable() { <add> @Override <add> public void run() { <add> ms.test(); <add> } <add> }; <add> <add> Runnable r2 = new Runnable() { <add> @Override <add> public void run() { <add> to.cancel(); <add> } <add> }; <add> TestHelper.race(r1, r2, Schedulers.single()); <add> } <add> } <add>} <ide><path>src/test/java/io/reactivex/subjects/SingleSubjectTest.java <add>/** <add> * Copyright 2016 Netflix, Inc. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in <add> * compliance with the License. You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software distributed under the License is <add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See <add> * the License for the specific language governing permissions and limitations under the License. <add> */ <add> <add>package io.reactivex.subjects; <add> <add>import static org.junit.Assert.*; <add> <add>import java.io.IOException; <add>import java.util.List; <add> <add>import org.junit.Test; <add> <add>import io.reactivex.*; <add>import io.reactivex.disposables.*; <add>import io.reactivex.observers.TestObserver; <add>import io.reactivex.plugins.RxJavaPlugins; <add>import io.reactivex.schedulers.Schedulers; <add> <add>public class SingleSubjectTest { <add> <add> @Test <add> public void success() { <add> SingleSubject<Integer> ss = SingleSubject.create(); <add> <add> assertFalse(ss.hasValue()); <add> assertNull(ss.getValue()); <add> assertFalse(ss.hasThrowable()); <add> assertNull(ss.getThrowable()); <add> assertFalse(ss.hasObservers()); <add> assertEquals(0, ss.observerCount()); <add> <add> TestObserver<Integer> to = ss.test(); <add> <add> to.assertEmpty(); <add> <add> assertTrue(ss.hasObservers()); <add> assertEquals(1, ss.observerCount()); <add> <add> ss.onSuccess(1); <add> <add> assertTrue(ss.hasValue()); <add> assertEquals(1, ss.getValue().intValue()); <add> assertFalse(ss.hasThrowable()); <add> assertNull(ss.getThrowable()); <add> assertFalse(ss.hasObservers()); <add> assertEquals(0, ss.observerCount()); <add> <add> to.assertResult(1); <add> <add> ss.test().assertResult(1); <add> <add> assertTrue(ss.hasValue()); <add> assertEquals(1, ss.getValue().intValue()); <add> assertFalse(ss.hasThrowable()); <add> assertNull(ss.getThrowable()); <add> assertFalse(ss.hasObservers()); <add> assertEquals(0, ss.observerCount()); <add> } <add> <add> @Test <add> public void once() { <add> SingleSubject<Integer> ss = SingleSubject.create(); <add> <add> TestObserver<Integer> to = ss.test(); <add> <add> ss.onSuccess(1); <add> ss.onSuccess(2); <add> <add> List<Throwable> errors = TestHelper.trackPluginErrors(); <add> try { <add> ss.onError(new IOException()); <add> <add> TestHelper.assertError(errors, 0, IOException.class); <add> } finally { <add> RxJavaPlugins.reset(); <add> } <add> <add> to.assertResult(1); <add> } <add> <add> @Test <add> public void error() { <add> SingleSubject<Integer> ss = SingleSubject.create(); <add> <add> assertFalse(ss.hasValue()); <add> assertNull(ss.getValue()); <add> assertFalse(ss.hasThrowable()); <add> assertNull(ss.getThrowable()); <add> assertFalse(ss.hasObservers()); <add> assertEquals(0, ss.observerCount()); <add> <add> TestObserver<Integer> to = ss.test(); <add> <add> to.assertEmpty(); <add> <add> assertTrue(ss.hasObservers()); <add> assertEquals(1, ss.observerCount()); <add> <add> ss.onError(new IOException()); <add> <add> assertFalse(ss.hasValue()); <add> assertNull(ss.getValue()); <add> assertTrue(ss.hasThrowable()); <add> assertTrue(ss.getThrowable().toString(), ss.getThrowable() instanceof IOException); <add> assertFalse(ss.hasObservers()); <add> assertEquals(0, ss.observerCount()); <add> <add> to.assertFailure(IOException.class); <add> <add> ss.test().assertFailure(IOException.class); <add> <add> assertFalse(ss.hasValue()); <add> assertNull(ss.getValue()); <add> assertTrue(ss.hasThrowable()); <add> assertTrue(ss.getThrowable().toString(), ss.getThrowable() instanceof IOException); <add> assertFalse(ss.hasObservers()); <add> assertEquals(0, ss.observerCount()); <add> } <add> <add> @Test <add> public void nullValue() { <add> SingleSubject<Integer> ss = SingleSubject.create(); <add> <add> TestObserver<Integer> to = ss.test(); <add> <add> ss.onSuccess(null); <add> <add> to.assertFailure(NullPointerException.class); <add> } <add> <add> @Test <add> public void nullThrowable() { <add> SingleSubject<Integer> ss = SingleSubject.create(); <add> <add> TestObserver<Integer> to = ss.test(); <add> <add> ss.onError(null); <add> <add> to.assertFailure(NullPointerException.class); <add> } <add> <add> @Test <add> public void cancelOnArrival() { <add> SingleSubject.create() <add> .test(true) <add> .assertEmpty(); <add> } <add> <add> @Test <add> public void cancelOnArrival2() { <add> SingleSubject<Integer> ss = SingleSubject.create(); <add> <add> ss.test(); <add> <add> ss <add> .test(true) <add> .assertEmpty(); <add> } <add> <add> @Test <add> public void dispose() { <add> TestHelper.checkDisposed(SingleSubject.create()); <add> } <add> <add> @Test <add> public void disposeTwice() { <add> SingleSubject.create() <add> .subscribe(new SingleObserver<Object>() { <add> @Override <add> public void onSubscribe(Disposable d) { <add> assertFalse(d.isDisposed()); <add> <add> d.dispose(); <add> d.dispose(); <add> <add> assertTrue(d.isDisposed()); <add> } <add> <add> @Override <add> public void onSuccess(Object value) { <add> <add> } <add> <add> @Override <add> public void onError(Throwable e) { <add> <add> } <add> }); <add> } <add> <add> @Test <add> public void onSubscribeDispose() { <add> SingleSubject<Integer> ss = SingleSubject.create(); <add> <add> Disposable d = Disposables.empty(); <add> <add> ss.onSubscribe(d); <add> <add> assertFalse(d.isDisposed()); <add> <add> ss.onSuccess(1); <add> <add> d = Disposables.empty(); <add> <add> ss.onSubscribe(d); <add> <add> assertTrue(d.isDisposed()); <add> } <add> <add> @Test <add> public void addRemoveRace() { <add> for (int i = 0; i < 500; i++) { <add> final SingleSubject<Integer> ss = SingleSubject.create(); <add> <add> final TestObserver<Integer> to = ss.test(); <add> <add> Runnable r1 = new Runnable() { <add> @Override <add> public void run() { <add> ss.test(); <add> } <add> }; <add> <add> Runnable r2 = new Runnable() { <add> @Override <add> public void run() { <add> to.cancel(); <add> } <add> }; <add> TestHelper.race(r1, r2, Schedulers.single()); <add> } <add> } <add>}
6
Javascript
Javascript
handle cases where the item may be destroyed
d646f70f12c68d47ffc319e47d90e45672419d5b
<ide><path>src/workspace.js <ide> module.exports = class Workspace extends Model { <ide> // It's important to call handleGrammarUsed after emitting the did-add event: <ide> // if we activate a package between adding the editor to the registry and emitting <ide> // the package may receive the editor twice from `observeTextEditors`. <del> subscriptions.add( <del> item.observeGrammar(this.handleGrammarUsed.bind(this)) <del> ) <add> // (Note that the item can be destroyed by an `observeTextEditors` handler.) <add> if (!item.isDestroyed()) { <add> subscriptions.add( <add> item.observeGrammar(this.handleGrammarUsed.bind(this)) <add> ) <add> } <ide> } <ide> }) <ide> }
1
Text
Text
fix changelog formatting
1a6c9640001f319fffbcf77fbd934738dfa4ff87
<ide><path>CHANGELOG.md <ide> <ide> - [#19472](https://github.com/emberjs/ember.js/pull/19472) [BUGFIX] Prevent transformation of block params called `attrs` <ide> <del>## v3.27.5 (June 10, 2021) <add>### v3.27.5 (June 10, 2021) <ide> <ide> - [#19597](https://github.com/emberjs/ember.js/pull/19597) [BIGFIX] Fix `<LinkTo>` with nested children <ide> <del>## v3.27.4 (June 9, 2021) <add>### v3.27.4 (June 9, 2021) <ide> <ide> - [#19594](https://github.com/emberjs/ember.js/pull/19594) [BUGFIX] Revert lazy hash changes <ide> - [#19596](https://github.com/emberjs/ember.js/pull/19596) [DOC] Fix "Dormant" addon warning typo <ide> <del>## v3.27.3 (June 3, 2021) <add>### v3.27.3 (June 3, 2021) <ide> <ide> - [#19565](https://github.com/emberjs/ember.js/pull/19565) [BUGFIX] Ensures that `computed` can depend on dynamic `(hash` keys <ide> - [#19571](https://github.com/emberjs/ember.js/pull/19571) [BUGFIX] Extend `Route.prototype.transitionTo` deprecation until 5.0.0
1
Javascript
Javascript
fix commandinstaller tests
0d6b5d9e733948e9ca2bc6ff5b12ccf3d7e805d7
<ide><path>spec/command-installer-spec.js <ide> const path = require('path') <ide> const fs = require('fs-plus') <ide> const temp = require('temp').track() <add>const {it, fit, ffit, fffit, beforeEach, afterEach} = require('./async-spec-helpers'); <ide> const CommandInstaller = require('../src/command-installer') <ide> <ide> describe('CommandInstaller on #darwin', () => { <ide> describe('CommandInstaller on #darwin', () => { <ide> const appDelegate = jasmine.createSpyObj('appDelegate', ['confirm']) <ide> installer = new CommandInstaller(appDelegate) <ide> installer.initialize('2.0.2') <del> spyOn(installer, 'installAtomCommand').andCallFake((__, callback) => callback()) <del> spyOn(installer, 'installApmCommand').andCallFake((__, callback) => callback()) <add> spyOn(installer, 'installAtomCommand').andCallFake((__, callback) => callback(undefined, 'atom')) <add> spyOn(installer, 'installApmCommand').andCallFake((__, callback) => callback(undefined, 'apm')) <ide> <ide> installer.installShellCommandsInteractively() <ide> <ide> describe('CommandInstaller on #darwin', () => { <ide> }) <ide> }) <ide> }) <add> <add> describe('when using a nightly version of atom', () => { <add> beforeEach(() => { <add> installer = new CommandInstaller() <add> installer.initialize('2.2.0-nightly0') <add> }) <add> <add> it("symlinks the atom command as 'atom-nightly'", () => { <add> const installedAtomPath = path.join(installationPath, 'atom-nightly') <add> expect(fs.isFileSync(installedAtomPath)).toBeFalsy() <add> <add> waitsFor(done => { <add> installer.installAtomCommand(false, error => { <add> expect(error).toBeNull() <add> expect(fs.realpathSync(installedAtomPath)).toBe(fs.realpathSync(atomBinPath)) <add> expect(fs.isExecutableSync(installedAtomPath)).toBe(true) <add> expect(fs.isFileSync(path.join(installationPath, 'atom'))).toBe(false) <add> done() <add> }) <add> }) <add> }) <add> <add> it("symlinks the apm command as 'apm-nightly'", () => { <add> const installedApmPath = path.join(installationPath, 'apm-nightly') <add> expect(fs.isFileSync(installedApmPath)).toBeFalsy() <add> <add> waitsFor(done => { <add> installer.installApmCommand(false, error => { <add> expect(error).toBeNull() <add> expect(fs.realpathSync(installedApmPath)).toBe(fs.realpathSync(apmBinPath)) <add> expect(fs.isExecutableSync(installedApmPath)).toBeTruthy() <add> expect(fs.isFileSync(path.join(installationPath, 'nightly'))).toBe(false) <add> done() <add> }) <add> }) <add> }) <add> }) <ide> }) <ide><path>src/command-installer.js <ide> class CommandInstaller { <ide> if (error) return showErrorDialog(error) <ide> this.applicationDelegate.confirm({ <ide> message: 'Commands installed.', <del> detail: `The shell commands '${atomCommandName}' and '${apmCommandName}' are installed.` <add> detail: `The shell commands \`${atomCommandName}\` and \`${apmCommandName}\` are installed.` <ide> }, () => {}) <ide> }) <ide> }) <ide> } <ide> <ide> getCommandNameForChannel (commandName) { <del> switch (atom.getReleaseChannel()) { <add> let channelMatch = this.appVersion.match(/beta|nightly/) <add> let channel = channelMatch ? channelMatch[0] : '' <add> <add> switch (channel) { <ide> case 'beta': <ide> return `${commandName}-beta` <ide> case 'nightly': <ide> return `${commandName}-nightly` <del> case 'dev': <del> return `${commandName}-dev` <ide> default: <ide> return commandName <ide> }
2
Javascript
Javascript
add speced event.defaultprevented
f3166ad082612d156fb36fbec173105939f6c609
<ide><path>src/js/events.js <ide> vjs.fixEvent = function(event) { <ide> } <ide> event.returnValue = false; <ide> event.isDefaultPrevented = returnTrue; <add> event.defaultPrevented = true; <ide> }; <ide> <ide> event.isDefaultPrevented = returnFalse; <add> event.defaultPrevented = false; <ide> <ide> // Stop the event from bubbling <ide> event.stopPropagation = function () { <ide> vjs.trigger = function(elem, event) { <ide> vjs.trigger(parent, event); <ide> <ide> // If at the top of the DOM, triggers the default action unless disabled. <del> } else if (!parent && !event.isDefaultPrevented()) { <add> } else if (!parent && !event.defaultPrevented) { <ide> var targetData = vjs.getData(event.target); <ide> <ide> // Checks if the target has a default action for this event. <ide> vjs.trigger = function(elem, event) { <ide> } <ide> <ide> // Inform the triggerer if the default was prevented by returning false <del> return !event.isDefaultPrevented(); <add> return !event.defaultPrevented; <ide> /* Original version of js ninja events wasn't complete. <ide> * We've since updated to the latest version, but keeping this around <ide> * for now just in case.
1
Javascript
Javascript
pass parsed request-url into the run-method
f6510c05b26934e544fad2c48c93caed7ac2de58
<ide><path>examples/custom-server-hapi/next-wrapper.js <ide> const pathWrapper = (app, pathName, opts) => ({ raw, query }, hapiReply) => <ide> app.renderToHTML(raw.req, raw.res, pathName, query, opts) <ide> .then(hapiReply) <ide> <del>const defaultHandlerWrapper = app => ({ raw }, hapiReply) => <del>app.run(raw.req, raw.res) <add>const defaultHandlerWrapper = app => ({ raw, url }, hapiReply) => <add>app.run(raw.req, raw.res, url) <ide> <ide> module.exports = { pathWrapper, defaultHandlerWrapper }
1
PHP
PHP
add polymorphic many to many tests
e04ed2899eba1c7b04d00d99504f0fb9a2c8e450
<ide><path>src/Illuminate/Database/Eloquent/Relations/MorphToMany.php <ide> protected function setWhere() <ide> { <ide> parent::setWhere(); <ide> <del> $this->query->where($this->morphType, get_class($this->parent)); <add> $this->query->where($this->table.'.'.$this->morphType, get_class($this->parent)); <ide> <ide> return $this; <ide> } <ide> public function addEagerConstraints(array $models) <ide> { <ide> parent::addEagerConstraints($models); <ide> <del> $this->query->where($this->morphType, get_class($this->parent)); <add> $this->query->where($this->table.'.'.$this->morphType, get_class($this->parent)); <ide> } <ide> <ide> /** <ide> protected function createAttachRecord($id, $timed) <ide> */ <ide> protected function newPivotQuery() <ide> { <del> $query = $this->newPivotStatement(); <add> $query = parent::newPivotQuery(); <ide> <del> return $query->where($this->foreignKey, $this->parent->getKey()); <del> } <del> <del> /** <del> * Get a new plain query builder for the pivot table. <del> * <del> * @return \Illuminate\Database\Query\Builder <del> */ <del> public function newPivotStatement() <del> { <del> return parent::newPivotStatement()->where($this->morphType, get_class($this->parent)); <add> return $query->where($this->morphType, get_class($this->parent)); <ide> } <ide> <ide> /** <ide><path>tests/Database/DatabaseEloquentMorphToManyTest.php <add><?php <add> <add>use Mockery as m; <add>use Illuminate\Database\Eloquent\Collection; <add>use Illuminate\Database\Eloquent\Relations\MorphToMany; <add> <add>class DatabaseEloquentMorphToManyTest extends PHPUnit_Framework_TestCase { <add> <add> public function tearDown() <add> { <add> m::close(); <add> } <add> <add> <add> public function testEagerConstraintsAreProperlyAdded() <add> { <add> $relation = $this->getRelation(); <add> $relation->getQuery()->shouldReceive('whereIn')->once()->with('taggables.taggable_id', array(1, 2)); <add> $relation->getQuery()->shouldReceive('where')->once()->with('taggables.taggable_type', get_class($relation->getParent())); <add> $model1 = new EloquentMorphToManyModelStub; <add> $model1->id = 1; <add> $model2 = new EloquentMorphToManyModelStub; <add> $model2->id = 2; <add> $relation->addEagerConstraints(array($model1, $model2)); <add> } <add> <add> <add> public function testAttachInsertsPivotTableRecord() <add> { <add> $relation = $this->getMock('Illuminate\Database\Eloquent\Relations\MorphToMany', array('touchIfTouching'), $this->getRelationArguments()); <add> $query = m::mock('stdClass'); <add> $query->shouldReceive('from')->once()->with('taggables')->andReturn($query); <add> $query->shouldReceive('insert')->once()->with(array(array('taggable_id' => 1, 'taggable_type' => get_class($relation->getParent()), 'tag_id' => 2, 'foo' => 'bar')))->andReturn(true); <add> $relation->getQuery()->shouldReceive('getQuery')->andReturn($mockQueryBuilder = m::mock('StdClass')); <add> $mockQueryBuilder->shouldReceive('newQuery')->once()->andReturn($query); <add> $relation->expects($this->once())->method('touchIfTouching'); <add> <add> $relation->attach(2, array('foo' => 'bar')); <add> } <add> <add> <add> public function testDetachRemovesPivotTableRecord() <add> { <add> $relation = $this->getMock('Illuminate\Database\Eloquent\Relations\MorphToMany', array('touchIfTouching'), $this->getRelationArguments()); <add> $query = m::mock('stdClass'); <add> $query->shouldReceive('from')->once()->with('taggables')->andReturn($query); <add> $query->shouldReceive('where')->once()->with('taggable_id', 1)->andReturn($query); <add> $query->shouldReceive('where')->once()->with('taggable_type', get_class($relation->getParent()))->andReturn($query); <add> $query->shouldReceive('whereIn')->once()->with('tag_id', array(1, 2, 3)); <add> $query->shouldReceive('delete')->once()->andReturn(true); <add> $relation->getQuery()->shouldReceive('getQuery')->andReturn($mockQueryBuilder = m::mock('StdClass')); <add> $mockQueryBuilder->shouldReceive('newQuery')->once()->andReturn($query); <add> $relation->expects($this->once())->method('touchIfTouching'); <add> <add> $this->assertTrue($relation->detach(array(1, 2, 3))); <add> } <add> <add> <add> public function testDetachMethodClearsAllPivotRecordsWhenNoIDsAreGiven() <add> { <add> $relation = $this->getMock('Illuminate\Database\Eloquent\Relations\MorphToMany', array('touchIfTouching'), $this->getRelationArguments()); <add> $query = m::mock('stdClass'); <add> $query->shouldReceive('from')->once()->with('taggables')->andReturn($query); <add> $query->shouldReceive('where')->once()->with('taggable_id', 1)->andReturn($query); <add> $query->shouldReceive('where')->once()->with('taggable_type', get_class($relation->getParent()))->andReturn($query); <add> $query->shouldReceive('whereIn')->never(); <add> $query->shouldReceive('delete')->once()->andReturn(true); <add> $relation->getQuery()->shouldReceive('getQuery')->andReturn($mockQueryBuilder = m::mock('StdClass')); <add> $mockQueryBuilder->shouldReceive('newQuery')->once()->andReturn($query); <add> $relation->expects($this->once())->method('touchIfTouching'); <add> <add> $this->assertTrue($relation->detach()); <add> } <add> <add> <add> public function getRelation() <add> { <add> list($builder, $parent) = $this->getRelationArguments(); <add> <add> return new MorphToMany($builder, $parent, 'taggable', 'taggables', 'taggable_id', 'tag_id'); <add> } <add> <add> <add> public function getRelationArguments() <add> { <add> $parent = m::mock('Illuminate\Database\Eloquent\Model'); <add> $parent->shouldReceive('getKey')->andReturn(1); <add> $parent->shouldReceive('getCreatedAtColumn')->andReturn('created_at'); <add> $parent->shouldReceive('getUpdatedAtColumn')->andReturn('updated_at'); <add> <add> $builder = m::mock('Illuminate\Database\Eloquent\Builder'); <add> $related = m::mock('Illuminate\Database\Eloquent\Model'); <add> $builder->shouldReceive('getModel')->andReturn($related); <add> <add> $related->shouldReceive('getTable')->andReturn('tags'); <add> $related->shouldReceive('getKeyName')->andReturn('id'); <add> <add> $builder->shouldReceive('join')->once()->with('taggables', 'tags.id', '=', 'taggables.tag_id'); <add> $builder->shouldReceive('where')->once()->with('taggables.taggable_id', '=', 1); <add> $builder->shouldReceive('where')->once()->with('taggables.taggable_type', get_class($parent)); <add> <add> return array($builder, $parent, 'taggable', 'taggables', 'taggable_id', 'tag_id', 'relation_name'); <add> } <add> <add>} <add> <add>class EloquentMorphToManyModelStub extends Illuminate\Database\Eloquent\Model { <add> protected $guarded = array(); <add>} <ide>\ No newline at end of file
2
Python
Python
fix wrong type in docstring
a68b3d013c3115f9374b0a24ed99960aabf25d32
<ide><path>celery/canvas.py <ide> class group(Signature): <ide> [4, 8] <ide> <ide> Arguments: <del> *tasks (Signature): A list of signatures that this group will call. <del> If there's only one argument, and that argument is an iterable, <del> then that'll define the list of signatures instead. <add> *tasks (List[Signature]): A list of signatures that this group will <add> call. If there's only one argument, and that argument is an <add> iterable, then that'll define the list of signatures instead. <ide> **options (Any): Execution options applied to all tasks <ide> in the group. <ide>
1
PHP
PHP
form validation working now
a558a1e92240be7a681494ab5a00c76595f44af4
<ide><path>libs/controller.php <ide> function validateErrors () <ide> <ide> function render($action=null, $layout=null, $file=null) <ide> { <del> <ide> $view =& View::getInstance(); <ide> $view->_viewVars =& $this->_viewVars; <ide> $view->action =& $this->action; <ide> function render($action=null, $layout=null, $file=null) <ide> $view->parent =& $this->parent; <ide> $view->viewPath =& $this->viewPath; <ide> $view->params =& $this->params; <del> $view->data =& $this->data; <ide> <add> foreach ($this->models as $key => $value) <add> { <add> if(!empty($this->models[$key]->validationErrors)) <add> { <add> $view->validationErrors[$key] =& $this->models[$key]->validationErrors; <add> } <add> } <add> <ide> return $view->render($action, $layout, $file); <ide> } <ide> <ide><path>libs/helpers/html.php <ide> function submitTag($caption='Submit', $html_options=null) <ide> */ <ide> function inputTag($tag_name, $size=20, $html_options=null) <ide> { <add> $elements = explode("/", $tag_name); <add> <ide> $html_options['size'] = $size; <del> $html_options['value'] = isset($html_options['value'])? $html_options['value']: $this->tagValue($tag_name); <del> $this->tagIsInvalid($tag_name)? $html_options['class'] = 'form_error': null; <del> return sprintf(TAG_INPUT, $tag_name, $this->parseHtmlOptions($html_options, null, '', ' ')); <add> $html_options['value'] = isset($html_options['value'])? $html_options['value']: $this->tagValue($elements[1]); <add> $this->tagIsInvalid($elements[0],$elements[1])? $html_options['class'] = 'form_error': null; <add> return sprintf(TAG_INPUT, $elements[1], $this->parseHtmlOptions($html_options, null, '', ' ')); <ide> } <ide> <ide> /** <ide> function tagValue ($tag_name) <ide> * @param unknown_type $field <ide> * @return unknown <ide> */ <del> function tagIsInvalid ($field) <add> function tagIsInvalid ($model, $field) <ide> { <del> return empty($this->validationErrors[$field])? 0: $this->validationErrors[$field]; <add> return empty($this->validationErrors[$model][$field])? 0: $this->validationErrors[$model][$field]; <ide> } <ide> <ide> /** <ide> function validateErrors () <ide> /** <ide> * Returns a formatted error message for given FORM field, NULL if no errors. <ide> * <add> * @param string $name <ide> * @param string $field <ide> * @param string $text <ide> * @return string If there are errors this method returns an error message, else NULL. <ide> */ <ide> function tagErrorMsg ($field, $text) <ide> { <del> $error = $this->tagIsInvalid($field); <del> <del> if (0 == $error) <add> $elements = explode("/", $field); <add> $error = 1; <add> if ($error == $this->tagIsInvalid($elements[0], $elements[1])) <ide> { <ide> return sprintf(SHORT_ERROR_MESSAGE, is_array($text)? (empty($text[$error-1])? 'Error in field': $text[$error-1]): $text); <ide> } <ide><path>libs/view.php <ide> function _render($___viewFn, $___data_for_view, $___play_safe = true) <ide> ${$helper}->params = $this->params; <ide> ${$helper}->action = $this->action; <ide> ${$helper}->data = $this->data; <add> ${$helper}->validationErrors = $this->validationErrors; <ide> } <ide> } <ide> }
3
Javascript
Javascript
fix memory leak in settimeout
f2f30286bf5e5332b1e788f3c745c5231e1b098b
<ide><path>lib/timers.js <ide> exports.setTimeout = function(callback, after) { <ide> timer = new Timer(); <ide> <ide> if (arguments.length <= 2) { <del> timer._onTimeout = callback; <add> timer._onTimeout = function() { <add> callback(); <add> timer.close(); <add> } <ide> } else { <ide> var args = Array.prototype.slice.call(arguments, 2); <ide> timer._onTimeout = function() { <ide><path>test/simple/test-timers-zero-timeout.js <ide> var assert = require('assert'); <ide> var ncalled = 0; <ide> <ide> setTimeout(f, 0, 'foo', 'bar', 'baz'); <add> var timer = setTimeout(function(){}, 0); <ide> <ide> function f(a, b, c) { <ide> assert.equal(a, 'foo'); <ide> var assert = require('assert'); <ide> <ide> process.on('exit', function() { <ide> assert.equal(ncalled, 1); <add> // timer should be already closed <add> assert.equal(timer.close(), -1); <ide> }); <ide> })(); <ide>
2
Java
Java
set heartbeat to 0,0 on connect to message broker
a109d6adc701e981eede518fd4b707b888ea4623
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/stomp/StompBrokerRelayMessageHandler.java <ide> public void handleMessage(Message<?> message) { <ide> <ide> try { <ide> if ((destination == null) || supportsDestination(destination)) { <add> <ide> if (logger.isTraceEnabled()) { <ide> logger.trace("Processing message: " + message); <ide> } <del> handleInternal(message, messageType, sessionId); <add> <add> if (SimpMessageType.CONNECT.equals(messageType)) { <add> headers.setHeartbeat(0, 0); // TODO: disable for now <add> message = MessageBuilder.withPayloadAndHeaders(message.getPayload(), headers).build(); <add> RelaySession session = new RelaySession(sessionId); <add> this.relaySessions.put(sessionId, session); <add> session.open(message); <add> } <add> else if (SimpMessageType.DISCONNECT.equals(messageType)) { <add> RelaySession session = this.relaySessions.remove(sessionId); <add> if (session == null) { <add> if (logger.isTraceEnabled()) { <add> logger.trace("Session already removed, sessionId=" + sessionId); <add> } <add> return; <add> } <add> session.forward(message); <add> } <add> else { <add> RelaySession session = this.relaySessions.get(sessionId); <add> if (session == null) { <add> logger.warn("Session id=" + sessionId + " not found. Ignoring message: " + message); <add> return; <add> } <add> session.forward(message); <add> } <ide> } <ide> } <ide> catch (Throwable t) { <ide> protected boolean supportsDestination(String destination) { <ide> return false; <ide> } <ide> <del> protected void handleInternal(Message<?> message, SimpMessageType messageType, String sessionId) { <del> if (SimpMessageType.CONNECT.equals(messageType)) { <del> RelaySession session = new RelaySession(sessionId); <del> this.relaySessions.put(sessionId, session); <del> session.open(message); <del> } <del> else if (SimpMessageType.DISCONNECT.equals(messageType)) { <del> RelaySession session = this.relaySessions.remove(sessionId); <del> if (session == null) { <del> if (logger.isTraceEnabled()) { <del> logger.trace("Session already removed, sessionId=" + sessionId); <del> } <del> return; <del> } <del> session.forward(message); <del> } <del> else { <del> RelaySession session = this.relaySessions.get(sessionId); <del> if (session == null) { <del> logger.warn("Session id=" + sessionId + " not found. Ignoring message: " + message); <del> return; <del> } <del> session.forward(message); <del> } <del> } <del> <ide> <ide> private class RelaySession { <ide>
1
Ruby
Ruby
simplify restore state condition
48e606f25af80cf1b774c659ed444322fa11106c
<ide><path>activerecord/lib/active_record/transactions.rb <ide> def force_clear_transaction_record_state <ide> <ide> # Restore the new record state and id of a record that was previously saved by a call to save_record_state. <ide> def restore_transaction_record_state(force_restore_state = false) <del> if @_start_transaction_state <del> transaction_level = (@_start_transaction_state[:level] || 0) - 1 <del> if transaction_level < 1 || force_restore_state <del> restore_state = @_start_transaction_state <add> if restore_state = @_start_transaction_state <add> if force_restore_state || restore_state[:level] <= 1 <ide> @new_record = restore_state[:new_record] <ide> @destroyed = restore_state[:destroyed] <ide> @attributes = restore_state[:attributes].map do |attr|
1
Javascript
Javascript
create `userefeffect` utility
cb25638a0ec5e639afbd4bf23386783c675003f0
<ide><path>Libraries/Utilities/__tests__/useRefEffect-test.js <add>/** <add> * Copyright (c) Facebook, Inc. and its affiliates. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @emails oncall+react_native <add> * @flow strict-local <add> * @format <add> */ <add> <add>import useRefEffect from '../useRefEffect'; <add>import * as React from 'react'; <add>import {View} from 'react-native'; <add>import {act, create} from 'react-test-renderer'; <add> <add>/** <add> * TestView provide a component execution environment to test hooks. <add> */ <add>function TestView({childKey = null, effect}) { <add> const ref = useRefEffect(effect); <add> return <View key={childKey} ref={ref} testID={childKey} />; <add>} <add> <add>/** <add> * TestEffect represents an effect invocation. <add> */ <add>class TestEffect { <add> name: string; <add> key: ?string; <add> constructor(name: string, key: ?string) { <add> this.name = name; <add> this.key = key; <add> } <add> static called(name: string, key: ?string) { <add> // $FlowIssue[prop-missing] - Flow does not support type augmentation. <add> return expect.effect(name, key); <add> } <add>} <add> <add>/** <add> * TestEffectCleanup represents an effect cleanup invocation. <add> */ <add>class TestEffectCleanup { <add> name: string; <add> key: ?string; <add> constructor(name: string, key: ?string) { <add> this.name = name; <add> this.key = key; <add> } <add> static called(name: string, key: ?string) { <add> // $FlowIssue[prop-missing] - Flow does not support type augmentation. <add> return expect.effectCleanup(name, key); <add> } <add>} <add> <add>/** <add> * extend.effect and expect.extendCleanup make it easier to assert expected <add> * values. But use TestEffect.called and TestEffectCleanup.called instead of <add> * extend.effect and expect.extendCleanup because of Flow. <add> */ <add>expect.extend({ <add> effect(received, name, key) { <add> const pass = <add> received instanceof TestEffect && <add> received.name === name && <add> received.key === key; <add> return {pass}; <add> }, <add> effectCleanup(received, name, key) { <add> const pass = <add> received instanceof TestEffectCleanup && <add> received.name === name && <add> received.key === key; <add> return {pass}; <add> }, <add>}); <add> <add>function mockEffectRegistry(): { <add> mockEffect: string => () => () => void, <add> mockEffectWithoutCleanup: string => () => void, <add> registry: $ReadOnlyArray<TestEffect | TestEffectCleanup>, <add>} { <add> const registry = []; <add> return { <add> mockEffect(name: string): () => () => void { <add> return instance => { <add> const key = instance?.props?.testID; <add> registry.push(new TestEffect(name, key)); <add> return () => { <add> registry.push(new TestEffectCleanup(name, key)); <add> }; <add> }; <add> }, <add> mockEffectWithoutCleanup(name: string): () => void { <add> return instance => { <add> const key = instance?.props?.testID; <add> registry.push(new TestEffect(name, key)); <add> }; <add> }, <add> registry, <add> }; <add>} <add> <add>test('calls effect without cleanup', () => { <add> let root; <add> <add> const {mockEffectWithoutCleanup, registry} = mockEffectRegistry(); <add> const effectA = mockEffectWithoutCleanup('A'); <add> <add> act(() => { <add> root = create(<TestView childKey="foo" effect={effectA} />); <add> }); <add> <add> expect(registry).toEqual([TestEffect.called('A', 'foo')]); <add> <add> act(() => { <add> root.unmount(); <add> }); <add> <add> expect(registry).toEqual([TestEffect.called('A', 'foo')]); <add>}); <add> <add>test('calls effect and cleanup', () => { <add> let root; <add> <add> const {mockEffect, registry} = mockEffectRegistry(); <add> const effectA = mockEffect('A'); <add> <add> act(() => { <add> root = create(<TestView childKey="foo" effect={effectA} />); <add> }); <add> <add> expect(registry).toEqual([TestEffect.called('A', 'foo')]); <add> <add> act(() => { <add> root.unmount(); <add> }); <add> <add> expect(registry).toEqual([ <add> TestEffect.called('A', 'foo'), <add> TestEffectCleanup.called('A', 'foo'), <add> ]); <add>}); <add> <add>test('cleans up old effect before calling new effect', () => { <add> let root; <add> <add> const {mockEffect, registry} = mockEffectRegistry(); <add> const effectA = mockEffect('A'); <add> const effectB = mockEffect('B'); <add> <add> act(() => { <add> root = create(<TestView childKey="foo" effect={effectA} />); <add> }); <add> <add> act(() => { <add> root.update(<TestView childKey="foo" effect={effectB} />); <add> }); <add> <add> expect(registry).toEqual([ <add> TestEffect.called('A', 'foo'), <add> TestEffectCleanup.called('A', 'foo'), <add> TestEffect.called('B', 'foo'), <add> ]); <add> <add> act(() => { <add> root.unmount(); <add> }); <add> <add> expect(registry).toEqual([ <add> TestEffect.called('A', 'foo'), <add> TestEffectCleanup.called('A', 'foo'), <add> TestEffect.called('B', 'foo'), <add> TestEffectCleanup.called('B', 'foo'), <add> ]); <add>}); <add> <add>test('calls cleanup and effect on new instance', () => { <add> let root; <add> <add> const {mockEffect, registry} = mockEffectRegistry(); <add> const effectA = mockEffect('A'); <add> <add> act(() => { <add> root = create(<TestView childKey="foo" effect={effectA} />); <add> }); <add> <add> act(() => { <add> root.update(<TestView childKey="bar" effect={effectA} />); <add> }); <add> <add> expect(registry).toEqual([ <add> TestEffect.called('A', 'foo'), <add> TestEffectCleanup.called('A', 'foo'), <add> TestEffect.called('A', 'bar'), <add> ]); <add> <add> act(() => { <add> root.unmount(); <add> }); <add> <add> expect(registry).toEqual([ <add> TestEffect.called('A', 'foo'), <add> TestEffectCleanup.called('A', 'foo'), <add> TestEffect.called('A', 'bar'), <add> TestEffectCleanup.called('A', 'bar'), <add> ]); <add>}); <add> <add>test('cleans up old effect before calling new effect with new instance', () => { <add> let root; <add> <add> const {mockEffect, registry} = mockEffectRegistry(); <add> const effectA = mockEffect('A'); <add> const effectB = mockEffect('B'); <add> <add> act(() => { <add> root = create(<TestView childKey="foo" effect={effectA} />); <add> }); <add> <add> act(() => { <add> root.update(<TestView childKey="bar" effect={effectB} />); <add> }); <add> <add> expect(registry).toEqual([ <add> TestEffect.called('A', 'foo'), <add> TestEffectCleanup.called('A', 'foo'), <add> TestEffect.called('B', 'bar'), <add> ]); <add> <add> act(() => { <add> root.unmount(); <add> }); <add> <add> expect(registry).toEqual([ <add> TestEffect.called('A', 'foo'), <add> TestEffectCleanup.called('A', 'foo'), <add> TestEffect.called('B', 'bar'), <add> TestEffectCleanup.called('B', 'bar'), <add> ]); <add>}); <ide><path>Libraries/Utilities/useRefEffect.js <add>/** <add> * Copyright (c) Facebook, Inc. and its affiliates. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @flow strict <add> * @format <add> */ <add> <add>import {useCallback, useRef} from 'react'; <add> <add>type CallbackRef<T> = T => mixed; <add> <add>/** <add> * Constructs a callback ref that provides similar semantics as `useEffect`. The <add> * supplied `effect` callback will be called with non-null component instances. <add> * The `effect` callback can also optionally return a cleanup function. <add> * <add> * When a component is updated or unmounted, the cleanup function is called. The <add> * `effect` callback will then be called again, if applicable. <add> * <add> * When a new `effect` callback is supplied, the previously returned cleanup <add> * function will be called before the new `effect` callback is called with the <add> * same instance. <add> * <add> * WARNING: The `effect` callback should be stable (e.g. using `useCallback`). <add> */ <add>export default function useRefEffect<TInstance>( <add> effect: TInstance => (() => void) | void, <add>): CallbackRef<TInstance | null> { <add> const cleanupRef = useRef<(() => void) | void>(undefined); <add> return useCallback( <add> instance => { <add> if (cleanupRef.current) { <add> cleanupRef.current(); <add> cleanupRef.current = undefined; <add> } <add> if (instance != null) { <add> cleanupRef.current = effect(instance); <add> } <add> }, <add> [effect], <add> ); <add>}
2
Javascript
Javascript
add another resampling test for points near poles
ad30a774f03123746372ae4c45ad7c1ee6cf8da2
<ide><path>test/geo/path-test.js <ide> suite.addBatch({ <ide> .rotate([0, 0]) <ide> .precision(1)); <ide> }, <del> "doesn't introduce artefacts in areas of high distortion": function(path) { <add> "resampling near poles": function(path) { <ide> path({type: "LineString", coordinates: [[0, 88], [180, 89]]}); <ide> assert.isTrue(testContext.buffer().filter(function(d) { return d.type === "lineTo"; }).length > 1); <add> path({type: "LineString", coordinates: [[180, 90], [1, 89.5]]}); <add> assert.isTrue(testContext.buffer().filter(function(d) { return d.type === "lineTo"; }).length > 1); <ide> } <ide> }, <ide> "rotate([0, 0, 0])": {
1
Ruby
Ruby
expose some keg methods
da0df8eabea0caf939af80ed7f0226a4b1c0fefd
<ide><path>Library/Homebrew/keg_fix_install_names.rb <ide> def each_unique_file_matching string <ide> end <ide> end <ide> <del> private <del> <ide> def install_name_tool(*args) <ide> system(MacOS.locate("install_name_tool"), *args) <ide> end
1
PHP
PHP
fix more links
4e04ef566f1f73f6b67e38f4cb8ac3899750d2a8
<ide><path>src/Utility/Text.php <ide> protected static function _wordWrap($text, $width = 72, $break = "\n", $cut = fa <ide> * @param string|array $phrase The phrase or phrases that will be searched. <ide> * @param array $options An array of HTML attributes and options. <ide> * @return string The highlighted text <del> * @link https://book.cakephp.org/3.0/en/core-libraries/string.html#highlighting-substrings <add> * @link https://book.cakephp.org/3.0/en/core-libraries/text.html#highlighting-substrings <ide> */ <ide> public static function highlight($text, $phrase, array $options = []) <ide> { <ide> public static function tail($text, $length = 100, array $options = []) <ide> * @param int $length Length of returned string, including ellipsis. <ide> * @param array $options An array of HTML attributes and options. <ide> * @return string Trimmed string. <del> * @link https://book.cakephp.org/3.0/en/core-libraries/string.html#truncating-text <add> * @link https://book.cakephp.org/3.0/en/core-libraries/text.html#truncating-text <ide> */ <ide> public static function truncate($text, $length = 100, array $options = []) <ide> { <ide> protected static function _removeLastWord($text) <ide> * @param int $radius The amount of characters that will be returned on each side of the founded phrase <ide> * @param string $ellipsis Ending that will be appended <ide> * @return string Modified string <del> * @link https://book.cakephp.org/3.0/en/core-libraries/string.html#extracting-an-excerpt <add> * @link https://book.cakephp.org/3.0/en/core-libraries/text.html#extracting-an-excerpt <ide> */ <ide> public static function excerpt($text, $phrase, $radius = 100, $ellipsis = '...') <ide> { <ide> public static function excerpt($text, $phrase, $radius = 100, $ellipsis = '...') <ide> * @param string|null $and The word used to join the last and second last items together with. Defaults to 'and'. <ide> * @param string $separator The separator used to join all the other items together. Defaults to ', '. <ide> * @return string The glued together string. <del> * @link https://book.cakephp.org/3.0/en/core-libraries/string.html#converting-an-array-to-sentence-form <add> * @link https://book.cakephp.org/3.0/en/core-libraries/text.html#converting-an-array-to-sentence-form <ide> */ <ide> public static function toList(array $list, $and = null, $separator = ', ') <ide> { <ide> public static function ascii(array $array) <ide> * @param mixed $default Value to be returned when invalid size was used, for example 'Unknown type' <ide> * @return mixed Number of bytes as integer on success, `$default` on failure if not false <ide> * @throws \InvalidArgumentException On invalid Unit type. <del> * @link https://book.cakephp.org/3.0/en/core-libraries/helpers/text.html <add> * @link https://book.cakephp.org/3.0/en/core-libraries/text.html#Cake\Utility\Text::parseFileSize <ide> */ <ide> public static function parseFileSize($size, $default = false) <ide> {
1
Text
Text
add note about not capitalizing php in tags
93c092d5e7a2b8528f3d6fd2668c471baee03197
<ide><path>guide/english/php/php-tags/index.md <ide> title: PHP tags <ide> <ide> When PHP parses a file, it looks for opening and closing tags, which are `<?php` and `?>` which tell PHP to start and stop interpreting the code between them. Parsing in this manner allows PHP to be embedded in all sorts of different documents, as everything outside of a pair of opening and closing tags is ignored by the PHP parser. <ide> <add>Note: using capital letters in tag declaration `<?PHP` goes against the PSR-1 basic coding standard and should not be used. <add> <ide> PHP also allows for short open tag `<?` (which is discouraged since it is only available if enabled using the `short_open_tag php.ini` configuration file directive, or if PHP was configured with the `--enable-short-tags` option). <ide> <ide> If a file is pure PHP code, it is preferable to omit the PHP closing tag at the end of the file. This prevents accidental whitespace or new lines being added after the PHP closing tag, which may cause unwanted effects because PHP will start output buffering when there is no intention from the programmer to send any output at that point in the script.
1
Ruby
Ruby
add check for insecure hackage urls
58cf0ec2b908d4bcd12d77036ef8eac60ee95697
<ide><path>Library/Homebrew/cmd/audit.rb <ide> def audit_urls <ide> %r{^http://www\.mirrorservice\.org/}, <ide> %r{^http://launchpad\.net/}, <ide> %r{^http://bitbucket\.org/}, <add> %r{^http://hackage\.haskell\.org/}, <ide> %r{^http://(?:[^/]*\.)?archive\.org} <ide> problem "Please use https:// for #{p}" <ide> when %r{^http://search\.mcpan\.org/CPAN/(.*)}i
1
PHP
PHP
update validation for new mastercard card numbers
319f66a00bd9d61120e5b62dc92dccedf8140160
<ide><path>src/Validation/Validation.php <ide> public static function cc($check, $type = 'fast', $deep = false, $regex = null) <ide> 'jcb' => '/^(3\\d{4}|2100|1800)\\d{11}$/', <ide> 'maestro' => '/^(?:5020|6\\d{3})\\d{12}$/', <ide> 'mc' => '/^5[1-5]\\d{14}$/', <add> 'mc' => '/^(5[1-5]\\d{14})|(2(?:22[1-9]|2[3-9][0-9]|[3-6][0-9]{2}|7[0-1][0-9]|720)\\d{12})$/', <ide> 'solo' => '/^(6334[5-9][0-9]|6767[0-9]{2})\\d{10}(\\d{2,3})?$/', <ide> 'switch' => '/^(?:49(03(0[2-9]|3[5-9])|11(0[1-2]|7[4-9]|8[1-2])|36[0-9]{2})\\d{10}(\\d{2,3})?)|(?:564182\\d{10}(\\d{2,3})?)|(6(3(33[0-4][0-9])|759[0-9]{2})\\d{10}(\\d{2,3})?)$/', <ide> 'visa' => '/^4\\d{12}(\\d{3})?$/', <ide><path>tests/TestCase/Validation/ValidationTest.php <ide> public function testCc() <ide> $this->assertTrue(Validation::cc('5467639122779531', ['mc'])); <ide> $this->assertTrue(Validation::cc('5297350261550024', ['mc'])); <ide> $this->assertTrue(Validation::cc('5162739131368058', ['mc'])); <add> //Mastercard (additional 2016 BIN) <add> $this->assertTrue(Validation::cc('2221000000000009', ['mc'])); <add> $this->assertTrue(Validation::cc('2720999999999996', ['mc'])); <add> $this->assertTrue(Validation::cc('2223000010005798', ['mc'])); <add> $this->assertTrue(Validation::cc('2623430710235708', ['mc'])); <add> $this->assertTrue(Validation::cc('2420452519835723', ['mc'])); <ide> //Solo 16 <ide> $this->assertTrue(Validation::cc('6767432107064987', ['solo'])); <ide> $this->assertTrue(Validation::cc('6334667758225411', ['solo']));
2
Text
Text
change allow_none to allow_null
080fa4f5f863609f5647ce1424f13b01e9f427ad
<ide><path>docs/topics/3.0-announcement.md <ide> We now use the following: <ide> * `Field` is the base class for all fields. It does not include any default implementation for either serializing or deserializing data. <ide> * `ReadOnlyField` is a concrete implementation for read-only fields that simply returns the attribute value without modification. <ide> <del>#### The `required`, `allow_none`, `allow_blank` and `default` arguments. <add>#### The `required`, `allow_null`, `allow_blank` and `default` arguments. <ide> <ide> REST framework now has more explicit and clear control over validating empty values for fields. <ide> <ide> Previously the meaning of the `required=False` keyword argument was underspecified. In practice its use meant that a field could either be not included in the input, or it could be included, but be `None` or the empty string. <ide> <del>We now have a better separation, with separate `required`, `allow_none` and `allow_blank` arguments. <add>We now have a better separation, with separate `required`, `allow_null` and `allow_blank` arguments. <ide> <ide> The following set of arguments are used to control validation of empty values: <ide> <ide> * `required=False`: The value does not need to be present in the input, and will not be passed to `.create()` or `.update()` if it is not seen. <ide> * `default=<value>`: The value does not need to be present in the input, and a default value will be passed to `.create()` or `.update()` if it is not seen. <del>* `allow_none=True`: `None` is a valid input. <add>* `allow_null=True`: `None` is a valid input. <ide> * `allow_blank=True`: `''` is valid input. For `CharField` and subclasses only. <ide> <del>Typically you'll want to use `required=False` if the corresponding model field has a default value, and additionally set either `allow_none=True` or `allow_blank=True` if required. <add>Typically you'll want to use `required=False` if the corresponding model field has a default value, and additionally set either `allow_null=True` or `allow_blank=True` if required. <ide> <ide> The `default` argument is also available and always implies that the field is not required to be in the input. It is unnecessary to use the `required` argument when a default is specified, and doing so will result in an error. <ide>
1
Go
Go
use errors.wrap() in daemon/config
a8d2b29e8d515296e559228a72895daba138c8be
<ide><path>daemon/config/config.go <ide> package config // import "github.com/docker/docker/daemon/config" <ide> import ( <ide> "bytes" <ide> "encoding/json" <del> "errors" <ide> "fmt" <ide> "io" <ide> "io/ioutil" <ide> import ( <ide> "github.com/docker/docker/pkg/discovery" <ide> "github.com/docker/docker/registry" <ide> "github.com/imdario/mergo" <add> "github.com/pkg/errors" <ide> "github.com/sirupsen/logrus" <ide> "github.com/spf13/pflag" <ide> ) <ide> func ParseClusterAdvertiseSettings(clusterStore, clusterAdvertise string) (strin <ide> <ide> advertise, err := discovery.ParseAdvertise(clusterAdvertise) <ide> if err != nil { <del> return "", fmt.Errorf("discovery advertise parsing failed (%v)", err) <add> return "", errors.Wrap(err, "discovery advertise parsing failed") <ide> } <ide> return advertise, nil <ide> } <ide> func Reload(configFile string, flags *pflag.FlagSet, reload func(*Config)) error <ide> newConfig, err := getConflictFreeConfiguration(configFile, flags) <ide> if err != nil { <ide> if flags.Changed("config-file") || !os.IsNotExist(err) { <del> return fmt.Errorf("unable to configure the Docker daemon with file %s: %v", configFile, err) <add> return errors.Wrapf(err, "unable to configure the Docker daemon with file %s", configFile) <ide> } <ide> newConfig = New() <ide> } <ide> <ide> if err := Validate(newConfig); err != nil { <del> return fmt.Errorf("file configuration validation failed (%v)", err) <add> return errors.Wrap(err, "file configuration validation failed") <ide> } <ide> <ide> // Check if duplicate label-keys with different values are found <ide> func MergeDaemonConfigurations(flagsConfig *Config, flags *pflag.FlagSet, config <ide> } <ide> <ide> if err := Validate(fileConfig); err != nil { <del> return nil, fmt.Errorf("configuration validation from file failed (%v)", err) <add> return nil, errors.Wrap(err, "configuration validation from file failed") <ide> } <ide> <ide> // merge flags configuration on top of the file configuration <ide> func MergeDaemonConfigurations(flagsConfig *Config, flags *pflag.FlagSet, config <ide> // We need to validate again once both fileConfig and flagsConfig <ide> // have been merged <ide> if err := Validate(fileConfig); err != nil { <del> return nil, fmt.Errorf("merged configuration validation from file and command line flags failed (%v)", err) <add> return nil, errors.Wrap(err, "merged configuration validation from file and command line flags failed") <ide> } <ide> <ide> return fileConfig, nil <ide> func getConflictFreeConfiguration(configFile string, flags *pflag.FlagSet) (*Con <ide> logrus.Warn(`The "graph" config file option is deprecated. Please use "data-root" instead.`) <ide> <ide> if config.Root != "" { <del> return nil, fmt.Errorf(`cannot specify both "graph" and "data-root" config file options`) <add> return nil, errors.New(`cannot specify both "graph" and "data-root" config file options`) <ide> } <ide> <ide> config.Root = config.RootDeprecated <ide><path>integration-cli/docker_cli_daemon_test.go <ide> func (s *DockerDaemonSuite) TestRunWithRuntimeFromConfigFile(c *check.C) { <ide> <ide> content, err := s.d.ReadLogFile() <ide> c.Assert(err, checker.IsNil) <del> c.Assert(string(content), checker.Contains, `file configuration validation failed (runtime name 'runc' is reserved)`) <add> c.Assert(string(content), checker.Contains, `file configuration validation failed: runtime name 'runc' is reserved`) <ide> <ide> // Check that we can select a default runtime <ide> config = `
2
Python
Python
change disable_verify_ssl behaviour
2071519e7462cfc7613c50dc42acb4672dbca4a7
<ide><path>airflow/kubernetes/kube_client.py <ide> has_kubernetes = True <ide> <ide> def _disable_verify_ssl() -> None: <del> configuration = Configuration() <add> if hasattr(Configuration, 'get_default_copy'): <add> configuration = Configuration.get_default_copy() <add> else: <add> configuration = Configuration() <ide> configuration.verify_ssl = False <ide> Configuration.set_default(configuration) <ide> <ide> def get_kube_client( <ide> if conf.getboolean('kubernetes', 'enable_tcp_keepalive'): <ide> _enable_tcp_keepalive() <ide> <del> if not conf.getboolean('kubernetes', 'verify_ssl'): <del> _disable_verify_ssl() <del> <ide> if in_cluster: <ide> config.load_incluster_config() <ide> else: <ide> def get_kube_client( <ide> config_file = conf.get('kubernetes', 'config_file', fallback=None) <ide> config.load_kube_config(config_file=config_file, context=cluster_context) <ide> <add> if not conf.getboolean('kubernetes', 'verify_ssl'): <add> _disable_verify_ssl() <add> <ide> return client.CoreV1Api() <ide><path>tests/kubernetes/test_client.py <ide> def test_load_file_config(self, config): <ide> assert config.load_incluster_config.not_called <ide> assert config.load_kube_config.called <ide> <add> @mock.patch('airflow.kubernetes.kube_client.config') <add> @mock.patch('airflow.kubernetes.kube_client.conf') <add> def test_load_config_disable_ssl(self, conf, config): <add> conf.getboolean.return_value = False <add> get_kube_client(in_cluster=False) <add> conf.getboolean.assert_called_with('kubernetes', 'verify_ssl') <add> # Support wide range of kube client libraries <add> if hasattr(Configuration, 'get_default_copy'): <add> configuration = Configuration.get_default_copy() <add> else: <add> configuration = Configuration() <add> self.assertFalse(configuration.verify_ssl) <add> <ide> def test_enable_tcp_keepalive(self): <ide> socket_options = [ <ide> (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
2
Javascript
Javascript
add htmlbars version of makeboundhelper
6ebca89f578125c617e9b0770d694730649c2d5e
<ide><path>packages/ember-htmlbars/lib/main.js <ide> import { DOMHelper } from "morph"; <ide> import template from "ember-htmlbars/system/template"; <ide> import compile from "ember-htmlbars/system/compile"; <ide> import makeViewHelper from "ember-htmlbars/system/make-view-helper"; <add>import makeBoundHelper from "ember-htmlbars/system/make_bound_helper"; <ide> <ide> import { <ide> registerHelper, <ide> if (Ember.FEATURES.isEnabled('ember-htmlbars')) { <ide> registerHelper: registerHelper, <ide> template: template, <ide> compile: compile, <del> makeViewHelper: makeViewHelper <add> makeViewHelper: makeViewHelper, <add> makeBoundHelper: makeBoundHelper <ide> }; <ide> <ide> } <ide><path>packages/ember-htmlbars/lib/system/make_bound_helper.js <add>/** <add>@module ember <add>@submodule ember-htmlbars <add>*/ <add> <add>import Ember from "ember-metal/core"; // Ember.FEATURES, Ember.assert, Ember.Handlebars, Ember.lookup <add>import { IS_BINDING } from "ember-metal/mixin"; <add>import { appendSimpleBoundView } from "ember-views/views/simple_bound_view"; <add>import Helper from "ember-htmlbars/system/helper"; <add> <add>import Stream from "ember-metal/streams/stream"; <add>import { <add> readArray, <add> readHash, <add> subscribe <add>} from "ember-metal/streams/utils"; <add> <add>/** <add> Create a bound helper. Accepts a function that receives the ordered and hash parameters <add> from the template. If a bound property was provided in the template it will be resolved to its <add> value and any changes to the bound property cause the helper function to be re-ran with the updated <add> values. <add> <add> * `params` - An array of resolved ordered parameters. <add> * `hash` - An object containing the hash parameters. <add> <add> For example: <add> <add> * With an unqouted ordered parameter: <add> <add> ```javascript <add> {{x-capitalize foo}} <add> ``` <add> <add> Assuming `foo` was set to `"bar"`, the bound helper would receive `["bar"]` as its first argument, and <add> an empty hash as its second. <add> <add> * With a quoted ordered parameter: <add> <add> ```javascript <add> {{x-capitalize "foo"}} <add> ``` <add> <add> The bound helper would receive `["foo"]` as its first argument, and an empty hash as its second. <add> <add> * With an unquoted hash parameter: <add> <add> ```javascript <add> {{x-repeat "foo" count=repeatCount}} <add> ``` <add> <add> Assuming that `repeatCount` resolved to 2, the bound helper would receive `["foo"]` as its first argument, <add> and { count: 2 } as its second. <add> <add> @method makeBoundHelper <add> @for Ember.HTMLBars <add> @param {Function} function <add> @since 1.10.0 <add>*/ <add>export default function makeBoundHelper(fn) { <add> function helperFunc(params, hash, options, env) { <add> var view = this; <add> <add> Ember.assert("makeBoundHelper generated helpers do not support use with blocks", !options.render); <add> <add> for (var prop in hash) { <add> if (IS_BINDING.test(prop)) { <add> hash[prop.slice(0, -7)] = view._getBindingForStream(hash[prop]); <add> <add> delete hash[prop]; <add> } <add> } <add> <add> function valueFn() { <add> return fn.call(view, readArray(params), readHash(hash), options, env); <add> } <add> <add> if (env.data.isUnbound) { <add> return valueFn(); <add> } else { <add> var lazyValue = new Stream(valueFn); <add> <add> appendSimpleBoundView(this, options.morph, lazyValue); <add> <add> var param; <add> for (var i = 0, l = params.length; i < l; i++) { <add> param = params[i]; <add> subscribe(param, lazyValue.notify, lazyValue); <add> } <add> <add> for (prop in hash) { <add> param = hash[prop]; <add> subscribe(param, lazyValue.notify, lazyValue); <add> } <add> } <add> } <add> <add> return new Helper(helperFunc); <add>} <ide><path>packages/ember-htmlbars/tests/system/make_bound_helper_test.js <add>import EmberView from "ember-views/views/view"; <add>import run from "ember-metal/run_loop"; <add>import Container from "container"; <add>import makeBoundHelper from "ember-htmlbars/system/make_bound_helper"; <add>import compile from "ember-htmlbars/system/compile"; <add> <add>var view, container; <add> <add>function appendView(view) { <add> run(view, 'appendTo', '#qunit-fixture'); <add>} <add> <add>function registerRepeatHelper() { <add> container.register('helper:x-repeat', makeBoundHelper(function(params, hash, options, env) { <add> return new Array(hash.times + 1).join( params[0] ); <add> })); <add>} <add> <add>if (Ember.FEATURES.isEnabled('ember-htmlbars')) { <add> <add>QUnit.module("ember-htmlbars: makeBoundHelper", { <add> setup: function() { <add> container = new Container(); <add> container.optionsForType('helper', { instantiate: false }); <add> }, <add> <add> teardown: function() { <add> if (view) { <add> run(view, 'destroy'); <add> } <add> <add> container.destroy(); <add> } <add>}); <add> <add>test("should update bound helpers when properties change", function() { <add> container.register('helper:x-capitalize', makeBoundHelper(function(params, hash, options, env) { <add> return params[0].toUpperCase(); <add> })); <add> <add> view = EmberView.create({ <add> container: container, <add> controller: {name: "Brogrammer"}, <add> template: compile("{{x-capitalize name}}") <add> }); <add> <add> appendView(view); <add> <add> equal(view.$().text(), 'BROGRAMMER', "helper output is correct"); <add> <add> run(view, 'set', 'controller.name', 'wes'); <add> <add> equal(view.$().text(), 'WES', "helper output updated"); <add>}); <add> <add>test("should update bound helpers when hash properties change", function() { <add> registerRepeatHelper(); <add> <add> view = EmberView.create({ <add> container: container, <add> controller: { <add> phrase: "Yo", <add> repeatCount: 1 <add> }, <add> template: compile("{{x-repeat phrase times=repeatCount}}") <add> }); <add> <add> appendView(view); <add> <add> equal(view.$().text(), 'Yo', "initial helper output is correct"); <add> <add> run(view, 'set', 'controller.repeatCount', 5); <add> <add> equal(view.$().text(), 'YoYoYoYoYo', "helper output updated"); <add>}); <add> <add>test("bound helpers should support keywords", function() { <add> container.register('helper:x-capitalize', makeBoundHelper(function(params, hash, options, env) { <add> return params[0].toUpperCase(); <add> })); <add> <add> view = EmberView.create({ <add> container: container, <add> text: 'ab', <add> template: compile("{{x-capitalize view.text}}") <add> }); <add> <add> appendView(view); <add> <add> equal(view.$().text(), 'AB', "helper output is correct"); <add>}); <add> <add>test("bound helpers should support bound options", function() { <add> registerRepeatHelper(); <add> <add> view = EmberView.create({ <add> container: container, <add> controller: { <add> text: 'ab', <add> numRepeats: 3 <add> }, <add> template: compile('{{x-repeat text timesBinding="numRepeats"}}') <add> }); <add> <add> appendView(view); <add> <add> equal(view.$().text(), 'ababab', "helper output is correct"); <add> <add> run(view, 'set', 'controller.numRepeats', 4); <add> <add> equal(view.$().text(), 'abababab', "helper correctly re-rendered after bound option was changed"); <add> <add> run(function() { <add> view.set('controller.numRepeats', 2); <add> view.set('controller.text', "YES"); <add> }); <add> <add> equal(view.$().text(), 'YESYES', "helper correctly re-rendered after both bound option and property changed"); <add>}); <add> <add>test("bound helpers should support multiple bound properties", function() { <add> <add> container.register('helper:x-combine', makeBoundHelper(function(params, hash, options, env) { <add> return params.join(''); <add> })); <add> <add> view = EmberView.create({ <add> container: container, <add> controller: { <add> thing1: 'ZOID', <add> thing2: 'BERG' <add> }, <add> template: compile('{{x-combine thing1 thing2}}') <add> }); <add> <add> appendView(view); <add> <add> equal(view.$().text(), 'ZOIDBERG', "helper output is correct"); <add> <add> run(view, 'set', 'controller.thing2', "NERD"); <add> <add> equal(view.$().text(), 'ZOIDNERD', "helper correctly re-rendered after second bound helper property changed"); <add> <add> run(function() { <add> view.set('controller.thing1', 'WOOT'); <add> view.set('controller.thing2', 'YEAH'); <add> }); <add> <add> equal(view.$().text(), 'WOOTYEAH', "helper correctly re-rendered after both bound helper properties changed"); <add>}); <add> <add>test("bound helpers can be invoked with zero args", function() { <add> container.register('helper:x-troll', makeBoundHelper(function(params, hash) { <add> return hash.text || "TROLOLOL"; <add> })); <add> <add> view = EmberView.create({ <add> container: container, <add> controller: { <add> trollText: "yumad" <add> }, <add> template: compile('{{x-troll}} and {{x-troll text="bork"}}') <add> }); <add> <add> appendView(view); <add> <add> equal(view.$().text(), 'TROLOLOL and bork', "helper output is correct"); <add>}); <add> <add>test("bound helpers should not be invoked with blocks", function() { <add> registerRepeatHelper(); <add> view = EmberView.create({ <add> container: container, <add> controller: {}, <add> template: compile("{{#x-repeat}}Sorry, Charlie{{/x-repeat}}") <add> }); <add> <add> expectAssertion(function() { <add> appendView(view); <add> }, /makeBoundHelper generated helpers do not support use with blocks/i); <add>}); <add> <add>test("shouldn't treat raw numbers as bound paths", function() { <add> container.register('helper:x-sum', makeBoundHelper(function(params) { <add> return params[0] + params[1]; <add> })); <add> <add> view = EmberView.create({ <add> container: container, <add> controller: {aNumber: 1}, <add> template: compile("{{x-sum aNumber 1}} {{x-sum 0 aNumber}} {{x-sum 5 6}}") <add> }); <add> <add> appendView(view); <add> <add> equal(view.$().text(), '2 1 11', "helper output is correct"); <add> <add> run(view, 'set', 'controller.aNumber', 5); <add> <add> equal(view.$().text(), '6 5 11', "helper still updates as expected"); <add>}); <add> <add>test("should have correct argument types", function() { <add> container.register('helper:get-type', makeBoundHelper(function(params) { <add> return typeof params[0]; <add> })); <add> <add> view = EmberView.create({ <add> container: container, <add> controller: {}, <add> template: compile('{{get-type null}}, {{get-type undefProp}}, {{get-type "string"}}, {{get-type 1}}, {{get-type this}}') <add> }); <add> <add> appendView(view); <add> <add> equal(view.$().text(), 'undefined, undefined, string, number, object', "helper output is correct"); <add>}); <add> <add>}
3
Javascript
Javascript
remove rollbar from loading in client
a2762fe4c3401bd9104051b59860ee57c8ee5b5e
<ide><path>client/gatsby-ssr.js <del>/* global ROLLBAR_CLIENT_ID ENVIRONMENT */ <ide> import React from 'react'; <ide> import PropTypes from 'prop-types'; <ide> import { Provider } from 'react-redux'; <ide> export const onRenderBody = ({ setHeadComponents, setPostBodyComponents }) => { <ide> setHeadComponents([...headComponents]); <ide> setPostBodyComponents( <ide> [ <del> /* eslint-disable max-len */ <del> ENVIRONMENT === 'production' ? ( <del> <script <del> dangerouslySetInnerHTML={{ <del> __html: ` <del> var _rollbarConfig = { <del> accessToken: "${ROLLBAR_CLIENT_ID}", <del> captureUncaught: true, <del> captureUnhandledRejections: true, <del> payload: { <del> environment: "${ENVIRONMENT}" <del> } <del> }; <del> // Rollbar Snippet <del> !function(r){function e(n){if(o[n])return o[n].exports;var t=o[n]={exports:{},id:n,loaded:!1};return r[n].call(t.exports,t,t.exports,e),t.loaded=!0,t.exports}var o={};return e.m=r,e.c=o,e.p="",e(0)}([function(r,e,o){"use strict";var n=o(1),t=o(4);_rollbarConfig=_rollbarConfig||{},_rollbarConfig.rollbarJsUrl=_rollbarConfig.rollbarJsUrl||"https://cdnjs.cloudflare.com/ajax/libs/rollbar.js/2.4.6/rollbar.min.js",_rollbarConfig.async=void 0===_rollbarConfig.async||_rollbarConfig.async;var a=n.setupShim(window,_rollbarConfig),l=t(_rollbarConfig);window.rollbar=n.Rollbar,a.loadFull(window,document,!_rollbarConfig.async,_rollbarConfig,l)},function(r,e,o){"use strict";function n(r){return function(){try{return r.apply(this,arguments)}catch(r){try{console.error("[Rollbar]: Internal error",r)}catch(r){}}}}function t(r,e){this.options=r,this._rollbarOldOnError=null;var o=s++;this.shimId=function(){return o},"undefined"!=typeof window&&window._rollbarShims&&(window._rollbarShims[o]={handler:e,messages:[]})}function a(r,e){if(r){var o=e.globalAlias||"Rollbar";if("object"==typeof r[o])return r[o];r._rollbarShims={},r._rollbarWrappedError=null;var t=new p(e);return n(function(){e.captureUncaught&&(t._rollbarOldOnError=r.onerror,i.captureUncaughtExceptions(r,t,!0),i.wrapGlobals(r,t,!0)),e.captureUnhandledRejections&&i.captureUnhandledRejections(r,t,!0);var n=e.autoInstrument;return e.enabled!==!1&&(void 0===n||n===!0||"object"==typeof n&&n.network)&&r.addEventListener&&(r.addEventListener("load",t.captureLoad.bind(t)),r.addEventListener("DOMContentLoaded",t.captureDomContentLoaded.bind(t))),r[o]=t,t})()}}function l(r){return n(function(){var e=this,o=Array.prototype.slice.call(arguments,0),n={shim:e,method:r,args:o,ts:new Date};window._rollbarShims[this.shimId()].messages.push(n)})}var i=o(2),s=0,d=o(3),c=function(r,e){return new t(r,e)},p=function(r){return new d(c,r)};t.prototype.loadFull=function(r,e,o,t,a){var l=function(){var e;if(void 0===r._rollbarDidLoad){e=new Error("rollbar.js did not load");for(var o,n,t,l,i=0;o=r._rollbarShims[i++];)for(o=o.messages||[];n=o.shift();)for(t=n.args||[],i=0;i<t.length;++i)if(l=t[i],"function"==typeof l){l(e);break}}"function"==typeof a&&a(e)},i=!1,s=e.createElement("script"),d=e.getElementsByTagName("script")[0],c=d.parentNode;s.crossOrigin="",s.src=t.rollbarJsUrl,o||(s.async=!0),s.onload=s.onreadystatechange=n(function(){if(!(i||this.readyState&&"loaded"!==this.readyState&&"complete"!==this.readyState)){s.onload=s.onreadystatechange=null;try{c.removeChild(s)}catch(r){}i=!0,l()}}),c.insertBefore(s,d)},t.prototype.wrap=function(r,e,o){try{var n;if(n="function"==typeof e?e:function(){return e||{}},"function"!=typeof r)return r;if(r._isWrap)return r;if(!r._rollbar_wrapped&&(r._rollbar_wrapped=function(){o&&"function"==typeof o&&o.apply(this,arguments);try{return r.apply(this,arguments)}catch(o){var e=o;throw e&&("string"==typeof e&&(e=new String(e)),e._rollbarContext=n()||{},e._rollbarContext._wrappedSource=r.toString(),window._rollbarWrappedError=e),e}},r._rollbar_wrapped._isWrap=!0,r.hasOwnProperty))for(var t in r)r.hasOwnProperty(t)&&(r._rollbar_wrapped[t]=r[t]);return r._rollbar_wrapped}catch(e){return r}};for(var u="log,debug,info,warn,warning,error,critical,global,configure,handleUncaughtException,handleUnhandledRejection,captureEvent,captureDomContentLoaded,captureLoad".split(","),f=0;f<u.length;++f)t.prototype[u[f]]=l(u[f]);r.exports={setupShim:a,Rollbar:p}},function(r,e){"use strict";function o(r,e,o){if(r){var t;if("function"==typeof e._rollbarOldOnError)t=e._rollbarOldOnError;else if(r.onerror){for(t=r.onerror;t._rollbarOldOnError;)t=t._rollbarOldOnError;e._rollbarOldOnError=t}var a=function(){var o=Array.prototype.slice.call(arguments,0);n(r,e,t,o)};o&&(a._rollbarOldOnError=t),r.onerror=a}}function n(r,e,o,n){r._rollbarWrappedError&&(n[4]||(n[4]=r._rollbarWrappedError),n[5]||(n[5]=r._rollbarWrappedError._rollbarContext),r._rollbarWrappedError=null),e.handleUncaughtException.apply(e,n),o&&o.apply(r,n)}function t(r,e,o){if(r){"function"==typeof r._rollbarURH&&r._rollbarURH.belongsToShim&&r.removeEventListener("unhandledrejection",r._rollbarURH);var n=function(r){var o,n,t;try{o=r.reason}catch(r){o=void 0}try{n=r.promise}catch(r){n="[unhandledrejection] error getting \`promise\` from event"}try{t=r.detail,!o&&t&&(o=t.reason,n=t.promise)}catch(r){t="[unhandledrejection] error getting \`detail\` from event"}o||(o="[unhandledrejection] error getting \`reason\` from event"),e&&e.handleUnhandledRejection&&e.handleUnhandledRejection(o,n)};n.belongsToShim=o,r._rollbarURH=n,r.addEventListener("unhandledrejection",n)}}function a(r,e,o){if(r){var n,t,a="EventTarget,Window,Node,ApplicationCache,AudioTrackList,ChannelMergerNode,CryptoOperation,EventSource,FileReader,HTMLUnknownElement,IDBDatabase,IDBRequest,IDBTransaction,KeyOperation,MediaController,MessagePort,ModalWindow,Notification,SVGElementInstance,Screen,TextTrack,TextTrackCue,TextTrackList,WebSocket,WebSocketWorker,Worker,XMLHttpRequest,XMLHttpRequestEventTarget,XMLHttpRequestUpload".split(",");for(n=0;n<a.length;++n)t=a[n],r[t]&&r[t].prototype&&l(e,r[t].prototype,o)}}function l(r,e,o){if(e.hasOwnProperty&&e.hasOwnProperty("addEventListener")){for(var n=e.addEventListener;n._rollbarOldAdd&&n.belongsToShim;)n=n._rollbarOldAdd;var t=function(e,o,t){n.call(this,e,r.wrap(o),t)};t._rollbarOldAdd=n,t.belongsToShim=o,e.addEventListener=t;for(var a=e.removeEventListener;a._rollbarOldRemove&&a.belongsToShim;)a=a._rollbarOldRemove;var l=function(r,e,o){a.call(this,r,e&&e._rollbar_wrapped||e,o)};l._rollbarOldRemove=a,l.belongsToShim=o,e.removeEventListener=l}}r.exports={captureUncaughtExceptions:o,captureUnhandledRejections:t,wrapGlobals:a}},function(r,e){"use strict";function o(r,e){this.impl=r(e,this),this.options=e,n(o.prototype)}function n(r){for(var e=function(r){return function(){var e=Array.prototype.slice.call(arguments,0);if(this.impl[r])return this.impl[r].apply(this.impl,e)}},o="log,debug,info,warn,warning,error,critical,global,configure,handleUncaughtException,handleUnhandledRejection,_createItem,wrap,loadFull,shimId,captureEvent,captureDomContentLoaded,captureLoad".split(","),n=0;n<o.length;n++)r[o[n]]=e(o[n])}o.prototype._swapAndProcessMessages=function(r,e){this.impl=r(this.options);for(var o,n,t;o=e.shift();)n=o.method,t=o.args,this[n]&&"function"==typeof this[n]&&("captureDomContentLoaded"===n||"captureLoad"===n?this[n].apply(this,[t[0],o.ts]):this[n].apply(this,t));return this},r.exports=o},function(r,e){"use strict";r.exports=function(r){return function(e){if(!e&&!window._rollbarInitialized){r=r||{};for(var o,n,t=r.globalAlias||"Rollbar",a=window.rollbar,l=function(r){return new a(r)},i=0;o=window._rollbarShims[i++];)n||(n=o.handler),o.handler._swapAndProcessMessages(l,o.messages);window[t]=n,window._rollbarInitialized=!0}}}}]); <del> // End Rollbar Snippet <del> ` <del> }} <del> key='rollbar-config' <del> /> <del> ) : null, <del> /* eslint-enable max-len */ <ide> <script <ide> async={true} <ide> key='gtag-script' <ide> export const onRenderBody = ({ setHeadComponents, setPostBodyComponents }) => { <ide> <script <ide> async={true} <ide> id='stripe-js' <del> key='strip-js' <add> key='stripe-js' <ide> src='https://js.stripe.com/v3/' <ide> /> <ide> ].filter(Boolean) <ide><path>client/src/head/scripts.js <del>import React from 'react'; <del> <del>const scripts = [ <del> <script <del> async='' <del> key='https://www.googletagmanager.com/gtag/js?id=AW-795617839' <del> src='https://www.googletagmanager.com/gtag/js?id=AW-795617839' <del> /> <del>]; <add>const scripts = []; <ide> <ide> export default scripts;
2
Javascript
Javascript
increase coverage for diagnostics_channel
51dfb869f345ec5eb658fab4d8fd5500f8bbfb1e
<ide><path>test/parallel/test-diagnostics-channel-object-channel-pub-sub.js <ide> channel.publish(input); <ide> // Should not publish after subscriber is unsubscribed <ide> channel.unsubscribe(subscriber); <ide> assert.ok(!channel.hasSubscribers); <add> <add>assert.throws(() => { <add> channel.subscribe(null); <add>}, { code: 'ERR_INVALID_ARG_TYPE' }); <ide><path>test/parallel/test-diagnostics-channel-symbol-named.js <ide> channel.subscribe(common.mustCall((message, name) => { <ide> })); <ide> <ide> channel.publish(input); <add> <add>{ <add> assert.throws(() => { <add> dc.channel(null); <add> }, /ERR_INVALID_ARG_TYPE/); <add>}
2
Javascript
Javascript
generalize upgrade support
31c16f0fffdb1caebd8ae86c5dd05db5ef1e58b2
<ide><path>packages/ember-metal/lib/meta.js <ide> export function meta(obj, writable) { <ide> let newRet = Object.create(ret); <ide> newRet.parentMeta = ret; <ide> ret = newRet; <del> ret._cache = undefined; <add> for (let i = 0; i < memberNames.length; i++) { <add> let name = memberNames[i]; <add> ret['_' + name] = undefined; <add> } <ide> // end temporary dance <ide> <ide> ret.watching = Object.create(ret.watching);
1
Javascript
Javascript
fix modal import in graph.js
b8868fa4744d42f5c3dd972376c605505157182f
<ide><path>airflow/www/static/js/graph.js <ide> */ <ide> <ide> /* <del> global d3, document, call_modal, nodes, taskInstances, tasks, edges, dagreD3, localStorage, $ <add> global d3, document, nodes, taskInstances, tasks, edges, dagreD3, localStorage, $ <ide> */ <ide> <ide> import getMetaValue from './meta_value'; <ide> import { escapeHtml } from './main'; <ide> import tiTooltip, { taskNoInstanceTooltip } from './task_instances'; <add>import { callModal } from './dag'; <ide> <ide> // dagId comes from dag.html <ide> const dagId = getMetaValue('dag_id'); <ide> function draw() { <ide> if (nodeId in taskInstances) tryNumber = taskInstances[nodeId].tryNumber; <ide> else tryNumber = 0; <ide> <del> if (task.task_type === 'SubDagOperator') call_modal(nodeId, executionDate, task.extra_links, tryNumber, true); <del> else call_modal(nodeId, executionDate, task.extra_links, tryNumber, undefined); <add> if (task.task_type === 'SubDagOperator') callModal(nodeId, executionDate, task.extra_links, tryNumber, true); <add> else callModal(nodeId, executionDate, task.extra_links, tryNumber, undefined); <ide> } else { <ide> // join node between TaskGroup. Ignore. <ide> }
1
Go
Go
fix race between dispatchers.run and todisk
f17410da5e4b5fa0e0dd2614f9b6e00dbcf66da6
<ide><path>builder/internals.go <ide> func (b *Builder) commit(id string, autoCmd []string, comment string) error { <ide> return nil <ide> } <ide> <del> container, warnings, err := b.Daemon.Create(b.Config, "") <add> container, err := b.create() <ide> if err != nil { <ide> return err <ide> } <del> for _, warning := range warnings { <del> fmt.Fprintf(b.OutStream, " ---> [Warning] %s\n", warning) <del> } <del> b.TmpContainers[container.ID] = struct{}{} <del> fmt.Fprintf(b.OutStream, " ---> Running in %s\n", utils.TruncateID(container.ID)) <ide> id = container.ID <ide> <ide> if err := container.Mount(); err != nil { <ide> func (b *Builder) create() (*daemon.Container, error) { <ide> } <ide> b.Config.Image = b.image <ide> <add> config := *b.Config <add> <ide> // Create the container <del> c, _, err := b.Daemon.Create(b.Config, "") <add> c, warnings, err := b.Daemon.Create(b.Config, "") <ide> if err != nil { <ide> return nil, err <ide> } <add> for _, warning := range warnings { <add> fmt.Fprintf(b.OutStream, " ---> [Warning] %s\n", warning) <add> } <ide> <ide> b.TmpContainers[c.ID] = struct{}{} <ide> fmt.Fprintf(b.OutStream, " ---> Running in %s\n", utils.TruncateID(c.ID)) <ide> <ide> // override the entry point that may have been picked up from the base image <del> c.Path = b.Config.Cmd[0] <del> c.Args = b.Config.Cmd[1:] <add> c.Path = config.Cmd[0] <add> c.Args = config.Cmd[1:] <ide> <ide> return c, nil <ide> }
1
Javascript
Javascript
move legacy hidden api to new internal fiber type
914b57be27a8697d8ed83266466b30378af379f0
<ide><path>packages/react-reconciler/src/ReactFiber.new.js <ide> import { <ide> ScopeComponent, <ide> Block, <ide> OffscreenComponent, <add> LegacyHiddenComponent, <ide> } from './ReactWorkTags'; <ide> import getComponentName from 'shared/getComponentName'; <ide> <ide> import { <ide> REACT_SCOPE_TYPE, <ide> REACT_BLOCK_TYPE, <ide> REACT_OFFSCREEN_TYPE, <add> REACT_LEGACY_HIDDEN_TYPE, <ide> } from 'shared/ReactSymbols'; <ide> <ide> export type {Fiber}; <ide> export function createFiberFromTypeAndProps( <ide> expirationTime, <ide> key, <ide> ); <add> case REACT_LEGACY_HIDDEN_TYPE: <add> return createFiberFromLegacyHidden( <add> pendingProps, <add> mode, <add> expirationTime, <add> key, <add> ); <ide> default: { <ide> if (typeof type === 'object' && type !== null) { <ide> switch (type.$$typeof) { <ide> export function createFiberFromOffscreen( <ide> return fiber; <ide> } <ide> <add>export function createFiberFromLegacyHidden( <add> pendingProps: OffscreenProps, <add> mode: TypeOfMode, <add> expirationTime: ExpirationTimeOpaque, <add> key: null | string, <add>) { <add> const fiber = createFiber(LegacyHiddenComponent, pendingProps, key, mode); <add> // TODO: The LegacyHidden fiber shouldn't have a type. It has a tag. <add> // This needs to be fixed in getComponentName so that it relies on the tag <add> // instead. <add> if (__DEV__) { <add> fiber.type = REACT_LEGACY_HIDDEN_TYPE; <add> } <add> fiber.elementType = REACT_LEGACY_HIDDEN_TYPE; <add> fiber.expirationTime_opaque = expirationTime; <add> return fiber; <add>} <add> <ide> export function createFiberFromText( <ide> content: string, <ide> mode: TypeOfMode, <ide><path>packages/react-reconciler/src/ReactFiberBeginWork.new.js <ide> import { <ide> ScopeComponent, <ide> Block, <ide> OffscreenComponent, <add> LegacyHiddenComponent, <ide> } from './ReactWorkTags'; <ide> import { <ide> NoEffect, <ide> import invariant from 'shared/invariant'; <ide> import shallowEqual from 'shared/shallowEqual'; <ide> import getComponentName from 'shared/getComponentName'; <ide> import ReactStrictModeWarnings from './ReactStrictModeWarnings.new'; <del>import {REACT_LAZY_TYPE, getIteratorFn} from 'shared/ReactSymbols'; <add>import { <add> REACT_ELEMENT_TYPE, <add> REACT_LAZY_TYPE, <add> REACT_LEGACY_HIDDEN_TYPE, <add> getIteratorFn, <add>} from 'shared/ReactSymbols'; <ide> import { <ide> getCurrentFiberOwnerNameInDevOrNull, <ide> setIsRendering, <ide> import { <ide> renderDidSuspendDelayIfPossible, <ide> markUnprocessedUpdateTime, <ide> getWorkInProgressRoot, <add> pushRenderExpirationTime, <ide> } from './ReactFiberWorkLoop.new'; <ide> <ide> import {disableLogs, reenableLogs} from 'shared/ConsolePatchingDev'; <ide> function updateOffscreenComponent( <ide> const nextProps: OffscreenProps = workInProgress.pendingProps; <ide> const nextChildren = nextProps.children; <ide> <del> if (current !== null) { <del> if (nextProps.mode === 'hidden') { <del> // TODO: Should currently be unreachable because Offscreen is only used as <del> // an implementation detail of Suspense. Once this is a public API, it <del> // will need to create an OffscreenState. <add> const prevState: OffscreenState | null = <add> current !== null ? current.memoizedState : null; <add> <add> if (nextProps.mode === 'hidden') { <add> if ( <add> !isSameExpirationTime(renderExpirationTime, (Never: ExpirationTimeOpaque)) <add> ) { <add> let nextBaseTime; <add> if (prevState !== null) { <add> const prevBaseTime = prevState.baseTime; <add> nextBaseTime = !isSameOrHigherPriority( <add> prevBaseTime, <add> renderExpirationTime, <add> ) <add> ? prevBaseTime <add> : renderExpirationTime; <add> } else { <add> nextBaseTime = renderExpirationTime; <add> } <add> <add> // Schedule this fiber to re-render at offscreen priority. Then bailout. <add> if (enableSchedulerTracing) { <add> markSpawnedWork((Never: ExpirationTimeOpaque)); <add> } <add> workInProgress.expirationTime_opaque = workInProgress.childExpirationTime_opaque = Never; <add> const nextState: OffscreenState = { <add> baseTime: nextBaseTime, <add> }; <add> workInProgress.memoizedState = nextState; <add> // We're about to bail out, but we need to push this to the stack anyway <add> // to avoid a push/pop misalignment. <add> pushRenderExpirationTime(workInProgress, nextBaseTime); <add> return null; <ide> } else { <del> // Clear the offscreen state. <add> // Rendering at offscreen, so we can clear the base time. <add> const nextState: OffscreenState = { <add> baseTime: NoWork, <add> }; <add> workInProgress.memoizedState = nextState; <add> pushRenderExpirationTime(workInProgress, renderExpirationTime); <add> } <add> } else { <add> let subtreeRenderTime; <add> if (prevState !== null) { <add> const baseTime = prevState.baseTime; <add> subtreeRenderTime = !isSameOrHigherPriority( <add> baseTime, <add> renderExpirationTime, <add> ) <add> ? baseTime <add> : renderExpirationTime; <add> <add> // Since we're not hidden anymore, reset the state <ide> workInProgress.memoizedState = null; <add> } else { <add> // We weren't previously hidden, and we still aren't, so there's nothing <add> // special to do. Need to push to the stack regardless, though, to avoid <add> // a push/pop misalignment. <add> subtreeRenderTime = renderExpirationTime; <ide> } <add> pushRenderExpirationTime(workInProgress, subtreeRenderTime); <ide> } <ide> <ide> reconcileChildren( <ide> function updateOffscreenComponent( <ide> return workInProgress.child; <ide> } <ide> <add>// Note: These happen to have identical begin phases, for now. We shouldn't hold <add>// ourselves to this constraint, though. If the behavior diverges, we should <add>// fork the function. <add>const updateLegacyHiddenComponent = updateOffscreenComponent; <add> <ide> function updateFragment( <ide> current: Fiber | null, <ide> workInProgress: Fiber, <ide> function updateHostComponent( <ide> <ide> markRef(current, workInProgress); <ide> <del> // Check the host config to see if the children are offscreen/hidden. <ide> if ( <del> workInProgress.mode & ConcurrentMode && <del> !isSameExpirationTime( <del> renderExpirationTime, <del> (Never: ExpirationTimeOpaque), <del> ) && <del> shouldDeprioritizeSubtree(type, nextProps) <add> (workInProgress.mode & ConcurrentMode) !== NoMode && <add> nextProps.hasOwnProperty('hidden') <ide> ) { <del> if (enableSchedulerTracing) { <del> markSpawnedWork((Never: ExpirationTimeOpaque)); <del> } <del> // Schedule this fiber to re-render at offscreen priority. Then bailout. <del> workInProgress.expirationTime_opaque = workInProgress.childExpirationTime_opaque = Never; <del> return null; <add> const wrappedChildren = { <add> $$typeof: REACT_ELEMENT_TYPE, <add> type: REACT_LEGACY_HIDDEN_TYPE, <add> key: null, <add> ref: null, <add> props: { <add> children: nextChildren, <add> // Check the host config to see if the children are offscreen/hidden. <add> mode: shouldDeprioritizeSubtree(type, nextProps) ? 'hidden' : 'visible', <add> }, <add> _owner: __DEV__ ? {} : null, <add> }; <add> nextChildren = wrappedChildren; <ide> } <ide> <ide> reconcileChildren( <ide> function validateFunctionComponentInDev(workInProgress: Fiber, Component: any) { <ide> } <ide> } <ide> <del>function mountSuspenseState( <add>const SUSPENDED_MARKER: SuspenseState = { <add> dehydrated: null, <add> retryTime: NoWork, <add>}; <add> <add>function mountSuspenseOffscreenState( <ide> renderExpirationTime: ExpirationTimeOpaque, <del>): SuspenseState { <add>): OffscreenState { <ide> return { <del> dehydrated: null, <ide> baseTime: renderExpirationTime, <del> retryTime: NoWork, <ide> }; <ide> } <ide> <del>function updateSuspenseState( <del> prevSuspenseState: SuspenseState, <add>function updateSuspenseOffscreenState( <add> prevOffscreenState: OffscreenState, <ide> renderExpirationTime: ExpirationTimeOpaque, <del>): SuspenseState { <del> const prevSuspendedTime = prevSuspenseState.baseTime; <add>): OffscreenState { <add> const prevBaseTime = prevOffscreenState.baseTime; <ide> return { <del> dehydrated: null, <add> // Choose whichever time is inclusive of the other one. This represents <add> // the union of all the levels that suspended. <ide> baseTime: <del> // Choose whichever time is inclusive of the other one. This represents <del> // the union of all the levels that suspended. <del> !isSameExpirationTime( <del> prevSuspendedTime, <del> (NoWork: ExpirationTimeOpaque), <del> ) && !isSameOrHigherPriority(prevSuspendedTime, renderExpirationTime) <del> ? prevSuspendedTime <add> !isSameExpirationTime(prevBaseTime, (NoWork: ExpirationTimeOpaque)) && <add> !isSameOrHigherPriority(prevBaseTime, renderExpirationTime) <add> ? prevBaseTime <ide> : renderExpirationTime, <del> retryTime: NoWork, <ide> }; <ide> } <ide> <ide> function shouldRemainOnFallback( <ide> // For example, SuspenseList coordinates when nested content appears. <ide> if (current !== null) { <ide> const suspenseState: SuspenseState = current.memoizedState; <del> if (suspenseState !== null) { <del> // Currently showing a fallback. If the current render includes <del> // the level that triggered the fallback, we must continue showing it, <del> // regardless of what the Suspense context says. <del> const baseTime = suspenseState.baseTime; <del> if ( <del> !isSameExpirationTime(baseTime, (NoWork: ExpirationTimeOpaque)) && <del> !isSameOrHigherPriority(baseTime, renderExpirationTime) <del> ) { <del> return true; <del> } <del> // Otherwise, fall through to check the Suspense context. <del> } else { <add> if (suspenseState === null) { <ide> // Currently showing content. Don't hide it, even if ForceSuspenseFallack <ide> // is true. More precise name might be "ForceRemainSuspenseFallback". <ide> // Note: This is a factoring smell. Can't remain on a fallback if there's <ide> // no fallback to remain on. <ide> return false; <ide> } <ide> } <add> <ide> // Not currently showing content. Consult the Suspense context. <ide> return hasSuspenseContext( <ide> suspenseContext, <ide> function getRemainingWorkInPrimaryTree( <ide> renderExpirationTime, <ide> ) { <ide> const currentChildExpirationTime = current.childExpirationTime_opaque; <del> const currentSuspenseState: SuspenseState = current.memoizedState; <del> if (currentSuspenseState !== null) { <del> // This boundary already timed out. Check if this render includes the level <del> // that previously suspended. <del> const baseTime = currentSuspenseState.baseTime; <del> if ( <del> !isSameExpirationTime(baseTime, (NoWork: ExpirationTimeOpaque)) && <del> !isSameOrHigherPriority(baseTime, renderExpirationTime) <del> ) { <del> // There's pending work at a lower level that might now be unblocked. <del> return baseTime; <del> } <del> } <del> <ide> if ( <ide> !isSameOrHigherPriority(currentChildExpirationTime, renderExpirationTime) <ide> ) { <ide> function updateSuspenseComponent( <ide> renderExpirationTime, <ide> ); <ide> const primaryChildFragment: Fiber = (workInProgress.child: any); <del> primaryChildFragment.memoizedState = ({baseTime: NoWork}: OffscreenState); <del> workInProgress.memoizedState = mountSuspenseState(renderExpirationTime); <add> primaryChildFragment.memoizedState = mountSuspenseOffscreenState( <add> renderExpirationTime, <add> ); <add> workInProgress.memoizedState = SUSPENDED_MARKER; <ide> return fallbackFragment; <ide> } else { <ide> const nextPrimaryChildren = nextProps.children; <ide> function updateSuspenseComponent( <ide> renderExpirationTime, <ide> ); <ide> const primaryChildFragment: Fiber = (workInProgress.child: any); <del> primaryChildFragment.memoizedState = ({ <del> baseTime: NoWork, <del> }: OffscreenState); <del> workInProgress.memoizedState = updateSuspenseState( <del> current.memoizedState, <add> primaryChildFragment.memoizedState = mountSuspenseOffscreenState( <ide> renderExpirationTime, <ide> ); <del> <add> workInProgress.memoizedState = SUSPENDED_MARKER; <ide> return fallbackChildFragment; <ide> } <ide> } <ide> function updateSuspenseComponent( <ide> renderExpirationTime, <ide> ); <ide> const primaryChildFragment: Fiber = (workInProgress.child: any); <del> primaryChildFragment.memoizedState = ({ <del> baseTime: NoWork, <del> }: OffscreenState); <add> const prevOffscreenState: OffscreenState | null = (current.child: any) <add> .memoizedState; <add> primaryChildFragment.memoizedState = <add> prevOffscreenState === null <add> ? mountSuspenseOffscreenState(renderExpirationTime) <add> : updateSuspenseOffscreenState( <add> prevOffscreenState, <add> renderExpirationTime, <add> ); <ide> primaryChildFragment.childExpirationTime_opaque = getRemainingWorkInPrimaryTree( <ide> current, <ide> workInProgress, <ide> renderExpirationTime, <ide> ); <del> workInProgress.memoizedState = updateSuspenseState( <del> current.memoizedState, <del> renderExpirationTime, <del> ); <add> workInProgress.memoizedState = SUSPENDED_MARKER; <ide> return fallbackChildFragment; <ide> } else { <ide> const nextPrimaryChildren = nextProps.children; <ide> function updateSuspenseComponent( <ide> renderExpirationTime, <ide> ); <ide> const primaryChildFragment: Fiber = (workInProgress.child: any); <del> primaryChildFragment.memoizedState = ({ <del> baseTime: NoWork, <del> }: OffscreenState); <add> const prevOffscreenState: OffscreenState | null = (current.child: any) <add> .memoizedState; <add> primaryChildFragment.memoizedState = <add> prevOffscreenState === null <add> ? mountSuspenseOffscreenState(renderExpirationTime) <add> : updateSuspenseOffscreenState( <add> prevOffscreenState, <add> renderExpirationTime, <add> ); <ide> primaryChildFragment.childExpirationTime_opaque = getRemainingWorkInPrimaryTree( <ide> current, <ide> workInProgress, <ide> renderExpirationTime, <ide> ); <ide> // Skip the primary children, and continue working on the <ide> // fallback children. <del> workInProgress.memoizedState = mountSuspenseState(renderExpirationTime); <add> workInProgress.memoizedState = SUSPENDED_MARKER; <ide> return fallbackChildFragment; <ide> } else { <ide> // Still haven't timed out. Continue rendering the children, like we <ide> function beginWork( <ide> break; <ide> case HostComponent: <ide> pushHostContext(workInProgress); <del> if ( <del> workInProgress.mode & ConcurrentMode && <del> !isSameExpirationTime( <del> renderExpirationTime, <del> (Never: ExpirationTimeOpaque), <del> ) && <del> shouldDeprioritizeSubtree(workInProgress.type, newProps) <del> ) { <del> if (enableSchedulerTracing) { <del> markSpawnedWork((Never: ExpirationTimeOpaque)); <del> } <del> // Schedule this fiber to re-render at offscreen priority. Then bailout. <del> workInProgress.expirationTime_opaque = workInProgress.childExpirationTime_opaque = Never; <del> return null; <del> } <ide> break; <ide> case ClassComponent: { <ide> const Component = workInProgress.type; <ide> function beginWork( <ide> return null; <ide> } <ide> } <add> case OffscreenComponent: <add> case LegacyHiddenComponent: { <add> // Need to check if the tree still needs to be deferred. This is <add> // almost identical to the logic used in the normal update path, <add> // so we'll just enter that. The only difference is we'll bail out <add> // at the next level instead of this one, because the child props <add> // have not changed. Which is fine. <add> // TODO: Probably should refactor `beginWork` to split the bailout <add> // path from the normal path. I'm tempted to do a labeled break here <add> // but I won't :) <add> workInProgress.expirationTime_opaque = NoWork; <add> return updateOffscreenComponent( <add> current, <add> workInProgress, <add> renderExpirationTime, <add> ); <add> } <ide> } <ide> return bailoutOnAlreadyFinishedWork( <ide> current, <ide> function beginWork( <ide> renderExpirationTime, <ide> ); <ide> } <del> case OffscreenComponent: { <del> return updateOffscreenComponent( <del> current, <del> workInProgress, <del> renderExpirationTime, <del> ); <del> } <ide> case SimpleMemoComponent: { <ide> return updateSimpleMemoComponent( <ide> current, <ide> function beginWork( <ide> } <ide> break; <ide> } <add> case OffscreenComponent: { <add> return updateOffscreenComponent( <add> current, <add> workInProgress, <add> renderExpirationTime, <add> ); <add> } <add> case LegacyHiddenComponent: { <add> return updateLegacyHiddenComponent( <add> current, <add> workInProgress, <add> renderExpirationTime, <add> ); <add> } <ide> } <ide> invariant( <ide> false, <ide><path>packages/react-reconciler/src/ReactFiberCommitWork.new.js <ide> import { <ide> ScopeComponent, <ide> Block, <ide> OffscreenComponent, <add> LegacyHiddenComponent, <ide> } from './ReactWorkTags'; <ide> import { <ide> invokeGuardedCallback, <ide> function commitLifeCycles( <ide> case FundamentalComponent: <ide> case ScopeComponent: <ide> case OffscreenComponent: <add> case LegacyHiddenComponent: <ide> return; <ide> } <ide> invariant( <ide> function hideOrUnhideAllChildren(finishedWork, isHidden) { <ide> unhideTextInstance(instance, node.memoizedProps); <ide> } <ide> } else if ( <del> node.tag === OffscreenComponent && <add> (node.tag === OffscreenComponent || <add> node.tag === LegacyHiddenComponent) && <ide> (node.memoizedState: OffscreenState) !== null && <ide> node !== finishedWork <ide> ) { <ide> function commitWork(current: Fiber | null, finishedWork: Fiber): void { <ide> } <ide> break; <ide> } <del> case OffscreenComponent: { <add> case OffscreenComponent: <add> case LegacyHiddenComponent: { <ide> return; <ide> } <ide> } <ide> function commitWork(current: Fiber | null, finishedWork: Fiber): void { <ide> } <ide> break; <ide> } <del> case OffscreenComponent: { <add> case OffscreenComponent: <add> case LegacyHiddenComponent: { <ide> const newState: OffscreenState | null = finishedWork.memoizedState; <ide> const isHidden = newState !== null; <ide> hideOrUnhideAllChildren(finishedWork, isHidden); <ide><path>packages/react-reconciler/src/ReactFiberCompleteWork.new.js <ide> import { <ide> ScopeComponent, <ide> Block, <ide> OffscreenComponent, <add> LegacyHiddenComponent, <ide> } from './ReactWorkTags'; <ide> import {NoMode, BlockingMode} from './ReactTypeOfMode'; <ide> import { <ide> import { <ide> renderDidSuspend, <ide> renderDidSuspendDelayIfPossible, <ide> renderHasNotSuspendedYet, <add> popRenderExpirationTime, <ide> } from './ReactFiberWorkLoop.new'; <ide> import {createFundamentalStateInstance} from './ReactFiberFundamental.new'; <ide> import {Never, isSameOrHigherPriority} from './ReactFiberExpirationTime.new'; <ide> function completeWork( <ide> return null; <ide> } <ide> break; <del> case OffscreenComponent: { <add> case OffscreenComponent: <add> case LegacyHiddenComponent: { <add> popRenderExpirationTime(workInProgress); <ide> if (current !== null) { <ide> const nextState: OffscreenState | null = workInProgress.memoizedState; <ide> const prevState: OffscreenState | null = current.memoizedState; <ide><path>packages/react-reconciler/src/ReactFiberHydrationContext.new.js <ide> import { <ide> didNotFindHydratableSuspenseInstance, <ide> } from './ReactFiberHostConfig'; <ide> import {enableSuspenseServerRenderer} from 'shared/ReactFeatureFlags'; <del>import {Never, NoWork} from './ReactFiberExpirationTime.new'; <add>import {Never} from './ReactFiberExpirationTime.new'; <ide> <ide> // The deepest Fiber on the stack involved in a hydration context. <ide> // This may have been an insertion or a hydration. <ide> function tryHydrate(fiber, nextInstance) { <ide> if (suspenseInstance !== null) { <ide> const suspenseState: SuspenseState = { <ide> dehydrated: suspenseInstance, <del> baseTime: NoWork, <ide> retryTime: Never, <ide> }; <ide> fiber.memoizedState = suspenseState; <ide><path>packages/react-reconciler/src/ReactFiberSuspenseComponent.new.js <ide> export type SuspenseState = {| <ide> // here to indicate that it is dehydrated (flag) and for quick access <ide> // to check things like isSuspenseInstancePending. <ide> dehydrated: null | SuspenseInstance, <del> // Represents the work that was deprioritized when we committed the fallback. <del> // The work outside the boundary already committed at this level, so we cannot <del> // unhide the content without including it. <del> baseTime: ExpirationTimeOpaque, <ide> // Represents the earliest expiration time we should attempt to hydrate <ide> // a dehydrated boundary at. <ide> // Never is the default for dehydrated boundaries. <ide><path>packages/react-reconciler/src/ReactFiberUnwindWork.new.js <ide> import { <ide> ContextProvider, <ide> SuspenseComponent, <ide> SuspenseListComponent, <add> OffscreenComponent, <add> LegacyHiddenComponent, <ide> } from './ReactWorkTags'; <ide> import {DidCapture, NoEffect, ShouldCapture} from './ReactSideEffectTags'; <ide> import {enableSuspenseServerRenderer} from 'shared/ReactFeatureFlags'; <ide> import { <ide> popTopLevelContextObject as popTopLevelLegacyContextObject, <ide> } from './ReactFiberContext.new'; <ide> import {popProvider} from './ReactFiberNewContext.new'; <add>import {popRenderExpirationTime} from './ReactFiberWorkLoop.new'; <ide> <ide> import invariant from 'shared/invariant'; <ide> <ide> function unwindWork( <ide> case ContextProvider: <ide> popProvider(workInProgress); <ide> return null; <add> case OffscreenComponent: <add> case LegacyHiddenComponent: <add> popRenderExpirationTime(workInProgress); <add> return null; <ide> default: <ide> return null; <ide> } <ide> function unwindInterruptedWork(interruptedWork: Fiber) { <ide> case ContextProvider: <ide> popProvider(interruptedWork); <ide> break; <add> case OffscreenComponent: <add> case LegacyHiddenComponent: <add> popRenderExpirationTime(interruptedWork); <add> break; <ide> default: <ide> break; <ide> } <ide><path>packages/react-reconciler/src/ReactFiberWorkLoop.new.js <ide> import type {Interaction} from 'scheduler/src/Tracing'; <ide> import type {SuspenseConfig} from './ReactFiberSuspenseConfig'; <ide> import type {SuspenseState} from './ReactFiberSuspenseComponent.new'; <ide> import type {Effect as HookEffect} from './ReactFiberHooks.new'; <add>import type {StackCursor} from './ReactFiberStack.new'; <ide> <ide> import { <ide> warnAboutDeprecatedLifecycles, <ide> import { <ide> getIsUpdatingOpaqueValueInRenderPhaseInDEV, <ide> } from './ReactFiberHooks.new'; <ide> import {createCapturedValue} from './ReactCapturedValue'; <add>import { <add> push as pushToStack, <add> pop as popFromStack, <add> createCursor, <add>} from './ReactFiberStack.new'; <ide> <ide> import { <ide> recordCommitTime, <ide> let workInProgressRoot: FiberRoot | null = null; <ide> let workInProgress: Fiber | null = null; <ide> // The expiration time we're rendering <ide> let renderExpirationTime: ExpirationTimeOpaque = NoWork; <add> <add>// Stack that allows components to channge renderExpirationTime for its subtree <add>const renderExpirationTimeCursor: StackCursor<ExpirationTimeOpaque> = createCursor( <add> NoWork, <add>); <add> <ide> // Whether to root completed, errored, suspended, etc. <ide> let workInProgressRootExitStatus: RootExitStatus = RootIncomplete; <ide> // A fatal error, if one is thrown <ide> export function flushControlled(fn: () => mixed): void { <ide> } <ide> } <ide> <add>export function pushRenderExpirationTime( <add> fiber: Fiber, <add> subtreeRenderTime: ExpirationTimeOpaque, <add>) { <add> pushToStack(renderExpirationTimeCursor, renderExpirationTime, fiber); <add> renderExpirationTime = subtreeRenderTime; <add>} <add> <add>export function popRenderExpirationTime(fiber: Fiber) { <add> renderExpirationTime = renderExpirationTimeCursor.current; <add> popFromStack(renderExpirationTimeCursor, fiber); <add>} <add> <ide> function prepareFreshStack(root, expirationTime) { <ide> root.finishedWork = null; <ide> root.finishedExpirationTime_opaque = NoWork; <ide><path>packages/react-reconciler/src/ReactWorkTags.js <ide> export type WorkTag = <ide> | 20 <ide> | 21 <ide> | 22 <del> | 23; <add> | 23 <add> | 24; <ide> <ide> export const FunctionComponent = 0; <ide> export const ClassComponent = 1; <ide> export const FundamentalComponent = 20; <ide> export const ScopeComponent = 21; <ide> export const Block = 22; <ide> export const OffscreenComponent = 23; <add>export const LegacyHiddenComponent = 24; <ide><path>packages/react-reconciler/src/__tests__/ReactSuspenseWithNoopRenderer-test.js <ide> describe('ReactSuspenseWithNoopRenderer', () => { <ide> }); <ide> setFallbackText('Still loading...'); <ide> <del> expect(Scheduler).toFlushAndYield([ <del> // First try to render the high pri update. We won't try to re-render <del> // the suspended tree during this pass, because it still has unfinished <del> // updates at a lower priority. <del> 'Loading...', <del> <del> // Now try the suspended update again. It's still suspended. <del> 'Suspend! [C]', <del> <del> // Then complete the update to the fallback. <del> 'Still loading...', <del> ]); <add> expect(Scheduler).toFlushAndYield( <add> gate(flags => <add> flags.new <add> ? [ <add> // First try to render the high pri update. Still suspended. <add> 'Suspend! [C]', <add> 'Loading...', <add> <add> // In the expiration times model, once the high pri update <add> // suspends, we can't be sure if there's additional work at a <add> // lower priority that might unblock the tree. We do know that <add> // there's a lower priority update *somehwere* in the entire <add> // root, though (the update to the fallback). So we try <add> // rendering one more time, just in case. <add> // TODO: We shouldn't need to do this with lanes, because we <add> // always know exactly which lanes have pending work in <add> // each tree. <add> 'Suspend! [C]', <add> <add> // Then complete the update to the fallback. <add> 'Still loading...', <add> ] <add> : [ <add> // In the old reconciler, we don't attempt to unhdie the <add> // Suspense boundary at high priority. Instead, we bailout, <add> // then try again at the original priority that the component <add> // suspended. This is mostly an implementation compromise, <add> // though there are some advantages to this behavior, because <add> // attempt to unhide could slow down the rest of the update. <add> // <add> // Render that only includes the fallback, since we bailed <add> // out on the primary tree. <add> 'Loading...', <add> <add> // Now try the suspended update again at the original <add> // priority. It's still suspended. <add> 'Suspend! [C]', <add> <add> // Then complete the update to the fallback. <add> 'Still loading...', <add> ], <add> ), <add> ); <ide> expect(root).toMatchRenderedOutput( <ide> <> <ide> <span hidden={true} prop="A" /> <ide> describe('ReactSuspenseWithNoopRenderer', () => { <ide> root.render(<Parent step={1} />); <ide> }); <ide> }); <add> <ide> // Only the outer part can update. The inner part should still show a <ide> // fallback because we haven't finished loading B yet. Otherwise, the <ide> // inner text would be inconsistent with the outer text. <del> expect(Scheduler).toHaveYielded([ <del> 'Outer text: B', <del> 'Outer step: 1', <del> 'Loading...', <del> <del> 'Suspend! [Inner text: B]', <del> 'Inner step: 1', <del> ]); <add> expect(Scheduler).toHaveYielded( <add> gate(flags => <add> flags.new <add> ? [ <add> 'Outer text: B', <add> 'Outer step: 1', <add> 'Suspend! [Inner text: B]', <add> 'Inner step: 1', <add> 'Loading...', <add> ] <add> : [ <add> // In the old reconciler, we first complete the outside of the <add> // Suspense boundary, then attempt to unhide it in a separate <add> // render at the original priority at which it suspended. <add> // First render: <add> 'Outer text: B', <add> 'Outer step: 1', <add> 'Loading...', <add> // Second render: <add> 'Suspend! [Inner text: B]', <add> 'Inner step: 1', <add> ], <add> ), <add> ); <ide> expect(root).toMatchRenderedOutput( <ide> <> <ide> <span prop="Outer text: B" /> <ide> describe('ReactSuspenseWithNoopRenderer', () => { <ide> }); <ide> }); <ide> <del> expect(Scheduler).toHaveYielded([ <del> // First the outer part of the tree updates, at high pri. <del> 'Outer: B1', <del> 'Loading...', <del> <del> // Then we retry the boundary. <del> 'Inner: B1', <del> 'Commit Child', <del> ]); <add> expect(Scheduler).toHaveYielded( <add> gate(flags => <add> flags.new <add> ? ['Outer: B1', 'Inner: B1', 'Commit Child'] <add> : [ <add> // In the old reconciler, we first complete the outside of the <add> // Suspense boundary, then attempt to unhide it in a separate <add> // render at the original priority at which it suspended. <add> // First render: <add> 'Outer: B1', <add> 'Loading...', <add> // Second render: <add> 'Inner: B1', <add> 'Commit Child', <add> ], <add> ), <add> ); <ide> expect(root).toMatchRenderedOutput( <ide> <> <ide> <span prop="Outer: B1" /> <ide><path>packages/shared/ReactSymbols.js <ide> export let REACT_SCOPE_TYPE = 0xead7; <ide> export let REACT_OPAQUE_ID_TYPE = 0xeae0; <ide> export let REACT_DEBUG_TRACING_MODE_TYPE = 0xeae1; <ide> export let REACT_OFFSCREEN_TYPE = 0xeae2; <add>export let REACT_LEGACY_HIDDEN_TYPE = 0xeae3; <ide> <ide> if (typeof Symbol === 'function' && Symbol.for) { <ide> const symbolFor = Symbol.for; <ide> if (typeof Symbol === 'function' && Symbol.for) { <ide> REACT_OPAQUE_ID_TYPE = symbolFor('react.opaque.id'); <ide> REACT_DEBUG_TRACING_MODE_TYPE = symbolFor('react.debug_trace_mode'); <ide> REACT_OFFSCREEN_TYPE = symbolFor('react.offscreen'); <add> REACT_LEGACY_HIDDEN_TYPE = symbolFor('react.legacy_hidden'); <ide> } <ide> <ide> const MAYBE_ITERATOR_SYMBOL = typeof Symbol === 'function' && Symbol.iterator;
11
Text
Text
add major version note to release guide
58cfb329627fabdfa988c3cf7bdb13407cb12809
<ide><path>doc/contributing/releases.md <ide> Revert all changes that were made to `src/node_version.h`: <ide> $ git checkout --ours HEAD -- src/node_version.h <ide> ``` <ide> <add><details> <add><summary>Major version release</summary> <add> <add>On the main branch, instead of reverting changes made to `src/node_version.h` <add>edit it instead and: <add> <add>* Increment `NODE_MAJOR_VERSION` by one <add>* Reset `NODE_PATCH_VERSION` to `0` <add>* Change `NODE_VERSION_IS_RELEASE` back to `0` <add> <add>Amend the current commit to apply the changes: <add> <add>```console <add>$ git commit --amend <add>``` <add> <add></details> <add> <ide> Even if there are no conflicts, ensure that you revert all the changes that were <ide> made to `src/node_version.h`. <ide>
1
Ruby
Ruby
upgrade virtualenv to 16.6.0
0bb29e82533ceba1fa3bdc4b4c38d4201928f53d
<ide><path>Library/Homebrew/language/python_virtualenv_constants.rb <ide> # frozen_string_literal: true <ide> <ide> PYTHON_VIRTUALENV_URL = <del> "https://files.pythonhosted.org/packages/6a/56" \ <del> "/74dce1fdeeabbcc0a3fcf299115f6814bd9c39fc4161658b513240a75ea7" \ <del> "/virtualenv-16.5.0.tar.gz" <add> "https://files.pythonhosted.org/packages/53/c0" \ <add> "/c7819f0bb2cf83e1b4b0d96c901b85191f598a7b534d297c2ef6dc80e2d3" \ <add> "/virtualenv-16.6.0.tar.gz" <ide> PYTHON_VIRTUALENV_SHA256 = <del> "15ee248d13e4001a691d9583948ad3947bcb8a289775102e4c4aa98a8b7a6d73" <add> "99acaf1e35c7ccf9763db9ba2accbca2f4254d61d1912c5ee364f9cc4a8942a0"
1
Text
Text
add digitalinfinity to collaborators
734fa44000cfe3fceabf19c801d4972fb280ca5d
<ide><path>README.md <ide> For more information about the governance of the Node.js project, see <ide> **Jamie Davis** &lt;davisjam@vt.edu&gt; (he/him) <ide> * [devsnek](https://github.com/devsnek) - <ide> **Gus Caplan** &lt;me@gus.host&gt; (he/him) <add>* [digitalinfinity](https://github.com/digitalinfinity) - <add>**Hitesh Kanwathirtha** &lt;digitalinfinity@gmail.com&gt; (he/him) <ide> * [edsadr](https://github.com/edsadr) - <ide> **Adrian Estrada** &lt;edsadr@gmail.com&gt; (he/him) <ide> * [eljefedelrodeodeljefe](https://github.com/eljefedelrodeodeljefe) -
1
Text
Text
add process for pr acceptance, review, rejection
e4a09cd79d0ee3fbb6a33552a21c53491d9f3953
<ide><path>hack/MAINTAINERS.md <ide> All decisions affecting docker, big and small, follow the same 3 steps: <ide> <ide> * Step 3: Accept (`LGTM`) or refuse a pull request. The relevant maintainers do <ide> this (see below "Who decides what?") <del> <add> + Accepting pull requests <add> - If the pull request appears to be ready to merge, give it a `LGTM`, which <add> stands for "Looks Good To Me". <add> - If the pull request has some small problems that need to be changed, make <add> a comment adressing the issues. <add> - If the changes needed to a PR are small, you can add a "LGTM once the <add> following comments are adressed..." this will reduce needless back and <add> forth. <add> - If the PR only needs a few changes before being merged, any MAINTAINER can <add> make a replacement PR that incorporates the existing commits and fixes the <add> problems before a fast track merge. <add> + Closing pull requests <add> - If a PR appears to be abandoned, after having attempted to contact the <add> original contributor, then a replacement PR may be made. Once the <add> replacement PR is made, any contributor may close the original one. <add> - If you are not sure if the pull request implements a good feature or you <add> do not understand the purpose of the PR, ask the contributor to provide <add> more documentation. If the contributor is not able to adequately explain <add> the purpose of the PR, the PR may be closed by any MAINTAINER. <add> - If a MAINTAINER feels that the pull request is sufficiently architecturally <add> flawed, or if the pull request needs significantly more design discussion <add> before being considered, the MAINTAINER should close the pull request with <add> a short explanation of what discussion still needs to be had. It is <add> important not to leave such pull requests open, as this will waste both the <add> MAINTAINER's time and the contributor's time. It is not good to string a <add> contributor on for weeks or months, having them make many changes to a PR <add> that will eventually be rejected. <ide> <ide> ## Who decides what? <ide>
1
Go
Go
remove ansiescape package
3f829aa6e436dc741785abd795fdcd28afd2da40
<ide><path>pkg/ansiescape/split.go <del>package ansiescape <del> <del>import "bytes" <del> <del>// dropCR drops a leading or terminal \r from the data. <del>func dropCR(data []byte) []byte { <del> if len(data) > 0 && data[len(data)-1] == '\r' { <del> data = data[0 : len(data)-1] <del> } <del> if len(data) > 0 && data[0] == '\r' { <del> data = data[1:] <del> } <del> return data <del>} <del> <del>// escapeSequenceLength calculates the length of an ANSI escape sequence <del>// If there is not enough characters to match a sequence, -1 is returned, <del>// if there is no valid sequence 0 is returned, otherwise the number <del>// of bytes in the sequence is returned. Only returns length for <del>// line moving sequences. <del>func escapeSequenceLength(data []byte) int { <del> next := 0 <del> if len(data) <= next { <del> return -1 <del> } <del> if data[next] != '[' { <del> return 0 <del> } <del> for { <del> next = next + 1 <del> if len(data) <= next { <del> return -1 <del> } <del> if (data[next] > '9' || data[next] < '0') && data[next] != ';' { <del> break <del> } <del> } <del> if len(data) <= next { <del> return -1 <del> } <del> // Only match line moving codes <del> switch data[next] { <del> case 'A', 'B', 'E', 'F', 'H', 'h': <del> return next + 1 <del> } <del> <del> return 0 <del>} <del> <del>// ScanANSILines is a scanner function which splits the <del>// input based on ANSI escape codes and new lines. <del>func ScanANSILines(data []byte, atEOF bool) (advance int, token []byte, err error) { <del> if atEOF && len(data) == 0 { <del> return 0, nil, nil <del> } <del> <del> // Look for line moving escape sequence <del> if i := bytes.IndexByte(data, '\x1b'); i >= 0 { <del> last := 0 <del> for i >= 0 { <del> last = last + i <del> <del> // get length of ANSI escape sequence <del> sl := escapeSequenceLength(data[last+1:]) <del> if sl == -1 { <del> return 0, nil, nil <del> } <del> if sl == 0 { <del> // If no relevant sequence was found, skip <del> last = last + 1 <del> i = bytes.IndexByte(data[last:], '\x1b') <del> continue <del> } <del> <del> return last + 1 + sl, dropCR(data[0:(last)]), nil <del> } <del> } <del> if i := bytes.IndexByte(data, '\n'); i >= 0 { <del> // No escape sequence, check for new line <del> return i + 1, dropCR(data[0:i]), nil <del> } <del> <del> // If we're at EOF, we have a final, non-terminated line. Return it. <del> if atEOF { <del> return len(data), dropCR(data), nil <del> } <del> // Request more data. <del> return 0, nil, nil <del>} <ide><path>pkg/ansiescape/split_test.go <del>package ansiescape <del> <del>import ( <del> "bufio" <del> "strings" <del> "testing" <del>) <del> <del>func TestSplit(t *testing.T) { <del> lines := []string{ <del> "test line 1", <del> "another test line", <del> "some test line", <del> "line with non-cursor moving sequence \x1b[1T", // Scroll Down <del> "line with \x1b[31;1mcolor\x1b[0m then reset", // "color" in Bold Red <del> "cursor forward \x1b[1C and backward \x1b[1D", <del> "invalid sequence \x1babcd", <del> "", <del> "after empty", <del> } <del> splitSequences := []string{ <del> "\x1b[1A", // Cursor up <del> "\x1b[1B", // Cursor down <del> "\x1b[1E", // Cursor next line <del> "\x1b[1F", // Cursor previous line <del> "\x1b[1;1H", // Move cursor to position <del> "\x1b[1;1h", // Move cursor to position <del> "\n", <del> "\r\n", <del> "\n\r", <del> "\x1b[1A\r", <del> "\r\x1b[1A", <del> } <del> <del> for _, sequence := range splitSequences { <del> scanner := bufio.NewScanner(strings.NewReader(strings.Join(lines, sequence))) <del> scanner.Split(ScanANSILines) <del> i := 0 <del> for scanner.Scan() { <del> if i >= len(lines) { <del> t.Fatalf("Too many scanned lines") <del> } <del> scanned := scanner.Text() <del> if scanned != lines[i] { <del> t.Fatalf("Wrong line scanned with sequence %q\n\tExpected: %q\n\tActual: %q", sequence, lines[i], scanned) <del> } <del> i++ <del> } <del> if i < len(lines) { <del> t.Errorf("Wrong number of lines for sequence %q: %d, expected %d", sequence, i, len(lines)) <del> } <del> } <del>}
2
Ruby
Ruby
convert dsl test to spec
d8e515004c68b82532401a26ae0d6a4f27844d91
<ide><path>Library/Homebrew/cask/spec/cask/dsl_spec.rb <add>require "spec_helper" <add> <add>describe Hbc::DSL do <add> let(:cask) { Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/#{token}.rb") } <add> let(:token) { "basic-cask" } <add> <add> context "stanzas" do <add> it "lets you set url, homepage, and version" do <add> expect(cask.url.to_s).to eq("http://example.com/TestCask.dmg") <add> expect(cask.homepage).to eq("http://example.com/") <add> expect(cask.version.to_s).to eq("1.2.3") <add> end <add> end <add> <add> describe "when a Cask includes an unknown method" do <add> let(:attempt_unknown_method) { <add> lambda do <add> Hbc::Cask.new("unexpected-method-cask") do <add> future_feature :not_yet_on_your_machine <add> end <add> end <add> } <add> <add> it "prints a warning that it has encountered an unexpected method" do <add> expected = Regexp.compile(<<-EOS.undent.lines.map(&:chomp).join("")) <add> (?m) <add> Warning: <add> .* <add> Unexpected method 'future_feature' called on Cask unexpected-method-cask\\. <add> .* <add> https://github.com/caskroom/homebrew-cask/blob/master/doc/reporting_bugs/pre_bug_report.md <add> .* <add> https://github.com/caskroom/homebrew-cask#reporting-bugs <add> EOS <add> <add> expect { <add> expect(attempt_unknown_method).not_to output.to_stdout <add> }.to output(expected).to_stderr <add> end <add> <add> it "will simply warn, not throw an exception" do <add> expect { <add> shutup do <add> attempt_unknown_method.call <add> end <add> }.not_to raise_error <add> end <add> end <add> <add> describe "header line" do <add> context "when invalid" do <add> let(:token) { "invalid/invalid-header-format" } <add> it "raises an error" do <add> expect { cask }.to raise_error(SyntaxError) <add> end <add> end <add> <add> context "when token does not match the file name" do <add> let(:token) { "invalid/invalid-header-token-mismatch" } <add> <add> it "raises an error" do <add> expect { <add> cask <add> }.to raise_error(Hbc::CaskTokenDoesNotMatchError, /Bad header line:.*does not match file name/) <add> end <add> end <add> <add> context "when it contains no DSL version" do <add> let(:token) { "no-dsl-version" } <add> <add> it "does not require a DSL version in the header" do <add> expect(cask.token).to eq("no-dsl-version") <add> expect(cask.url.to_s).to eq("http://example.com/TestCask.dmg") <add> expect(cask.homepage).to eq("http://example.com/") <add> expect(cask.version.to_s).to eq("1.2.3") <add> end <add> end <add> <add> context "when it contains a deprecated DSL version" do <add> let(:token) { "with-dsl-version" } <add> <add> it "may use deprecated DSL version hash syntax" do <add> allow(ENV).to receive(:[]).with("HOMEBREW_DEVELOPER").and_return(nil) <add> <add> shutup do <add> expect(cask.token).to eq("with-dsl-version") <add> expect(cask.url.to_s).to eq("http://example.com/TestCask.dmg") <add> expect(cask.homepage).to eq("http://example.com/") <add> expect(cask.version.to_s).to eq("1.2.3") <add> end <add> end <add> end <add> end <add> <add> describe "name stanza" do <add> it "lets you set the full name via a name stanza" do <add> cask = Hbc::Cask.new("name-cask") do <add> name "Proper Name" <add> end <add> <add> expect(cask.name).to eq([ <add> "Proper Name", <add> ]) <add> end <add> <add> it "Accepts an array value to the name stanza" do <add> cask = Hbc::Cask.new("array-name-cask") do <add> name ["Proper Name", "Alternate Name"] <add> end <add> <add> expect(cask.name).to eq([ <add> "Proper Name", <add> "Alternate Name", <add> ]) <add> end <add> <add> it "Accepts multiple name stanzas" do <add> cask = Hbc::Cask.new("multi-name-cask") do <add> name "Proper Name" <add> name "Alternate Name" <add> end <add> <add> expect(cask.name).to eq([ <add> "Proper Name", <add> "Alternate Name", <add> ]) <add> end <add> end <add> <add> describe "sha256 stanza" do <add> it "lets you set checksum via sha256" do <add> cask = Hbc::Cask.new("checksum-cask") do <add> sha256 "imasha2" <add> end <add> <add> expect(cask.sha256).to eq("imasha2") <add> end <add> end <add> <add> describe "language stanza" do <add> it "allows multilingual casks" do <add> cask = lambda do <add> Hbc::Cask.new("cask-with-apps") do <add> language "zh" do <add> sha256 "abc123" <add> "zh-CN" <add> end <add> <add> language "en-US", default: true do <add> sha256 "xyz789" <add> "en-US" <add> end <add> <add> url "https://example.org/#{language}.zip" <add> end <add> end <add> <add> allow(MacOS).to receive(:languages).and_return(["zh"]) <add> expect(cask.call.language).to eq("zh-CN") <add> expect(cask.call.sha256).to eq("abc123") <add> expect(cask.call.url.to_s).to eq("https://example.org/zh-CN.zip") <add> <add> allow(MacOS).to receive(:languages).and_return(["zh-XX"]) <add> expect(cask.call.language).to eq("zh-CN") <add> expect(cask.call.sha256).to eq("abc123") <add> expect(cask.call.url.to_s).to eq("https://example.org/zh-CN.zip") <add> <add> allow(MacOS).to receive(:languages).and_return(["en"]) <add> expect(cask.call.language).to eq("en-US") <add> expect(cask.call.sha256).to eq("xyz789") <add> expect(cask.call.url.to_s).to eq("https://example.org/en-US.zip") <add> <add> allow(MacOS).to receive(:languages).and_return(["xx-XX"]) <add> expect(cask.call.language).to eq("en-US") <add> expect(cask.call.sha256).to eq("xyz789") <add> expect(cask.call.url.to_s).to eq("https://example.org/en-US.zip") <add> <add> allow(MacOS).to receive(:languages).and_return(["xx-XX", "zh", "en"]) <add> expect(cask.call.language).to eq("zh-CN") <add> expect(cask.call.sha256).to eq("abc123") <add> expect(cask.call.url.to_s).to eq("https://example.org/zh-CN.zip") <add> <add> allow(MacOS).to receive(:languages).and_return(["xx-XX", "en-US", "zh"]) <add> expect(cask.call.language).to eq("en-US") <add> expect(cask.call.sha256).to eq("xyz789") <add> expect(cask.call.url.to_s).to eq("https://example.org/en-US.zip") <add> end <add> end <add> <add> describe "app stanza" do <add> it "allows you to specify app stanzas" do <add> cask = Hbc::Cask.new("cask-with-apps") do <add> app "Foo.app" <add> app "Bar.app" <add> end <add> <add> expect(Array(cask.artifacts[:app])).to eq([["Foo.app"], ["Bar.app"]]) <add> end <add> <add> it "allow app stanzas to be empty" do <add> cask = Hbc::Cask.new("cask-with-no-apps") <add> expect(Array(cask.artifacts[:app])).to eq([]) <add> end <add> end <add> <add> describe "caveats stanza" do <add> it "allows caveats to be specified via a method define" do <add> cask = Hbc::Cask.new("plain-cask") <add> <add> expect(cask.caveats).to be_empty <add> <add> cask = Hbc::Cask.new("cask-with-caveats") do <add> def caveats; <<-EOS.undent <add> When you install this Cask, you probably want to know this. <add> EOS <add> end <add> end <add> <add> expect(cask.caveats).to eq("When you install this Cask, you probably want to know this.\n") <add> end <add> end <add> <add> describe "pkg stanza" do <add> it "allows installable pkgs to be specified" do <add> cask = Hbc::Cask.new("cask-with-pkgs") do <add> pkg "Foo.pkg" <add> pkg "Bar.pkg" <add> end <add> <add> expect(Array(cask.artifacts[:pkg])).to eq([["Foo.pkg"], ["Bar.pkg"]]) <add> end <add> end <add> <add> describe "url stanza" do <add> let(:token) { "invalid/invalid-two-url" } <add> <add> it "prevents defining multiple urls" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError, /'url' stanza may only appear once/) <add> end <add> end <add> <add> describe "homepage stanza" do <add> let(:token) { "invalid/invalid-two-homepage" } <add> <add> it "prevents defining multiple homepages" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError, /'homepage' stanza may only appear once/) <add> end <add> end <add> <add> describe "version stanza" do <add> let(:token) { "invalid/invalid-two-version" } <add> it "prevents defining multiple versions" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError, /'version' stanza may only appear once/) <add> end <add> end <add> <add> describe "appcast stanza" do <add> let(:token) { "with-appcast" } <add> <add> it "allows appcasts to be specified" do <add> expect(cask.appcast.to_s).to match(/^http/) <add> end <add> <add> context "when multiple appcasts are defined" do <add> let(:token) { "invalid/invalid-appcast-multiple" } <add> <add> it "raises an error" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError, /'appcast' stanza may only appear once/) <add> end <add> end <add> <add> context "when appcast URL is invalid" do <add> let(:token) { "invalid/invalid-appcast-url" } <add> <add> it "refuses to load" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError) <add> end <add> end <add> end <add> <add> describe "GPG stanza" do <add> context "valid" do <add> let(:token) { "with-gpg" } <add> <add> it "is allowed to be specified" do <add> expect(cask.gpg.to_s).to match(/\S/) <add> end <add> end <add> <add> context "with :key_url" do <add> let(:token) { "with-gpg-key-url" } <add> it "is allowed to be specified" do <add> expect(cask.gpg.to_s).to match(/\S/) <add> end <add> end <add> <add> context "specifying mmultiple times" do <add> let(:token) { "invalid/invalid-gpg-multiple-stanzas" } <add> <add> it "is not allowed" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError, /'gpg' stanza may only appear once/) <add> end <add> end <add> <add> context "missing GPG key parameters" do <add> let(:token) { "invalid/invalid-gpg-missing-key" } <add> <add> it "refuses to load" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError, /'gpg' stanza must include exactly one/) <add> end <add> end <add> <add> context "conflicting GPG key parameters" do <add> let(:token) { "invalid/invalid-gpg-conflicting-keys" } <add> <add> it "refuses to load" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError, /'gpg' stanza must include exactly one/) <add> end <add> end <add> <add> context "invalid GPG signature URLs" do <add> let(:token) { "invalid/invalid-gpg-signature-url" } <add> <add> it "refuses to load" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError) <add> end <add> end <add> <add> context "invalid GPG key URLs" do <add> let(:token) { "invalid/invalid-gpg-key-url" } <add> <add> it "refuses to load" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError) <add> end <add> end <add> <add> context "invalid GPG key IDs" do <add> let(:token) { "invalid/invalid-gpg-key-id" } <add> <add> it "refuses to load" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError) <add> end <add> end <add> <add> context "GPG parameter is unknown" do <add> let(:token) { "invalid/invalid-gpg-parameter" } <add> <add> it "refuses to load" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError) <add> end <add> end <add> end <add> <add> describe "depends_on stanza" do <add> let(:token) { "invalid/invalid-depends-on-key" } <add> <add> it "refuses to load with an invalid depends_on key" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError) <add> end <add> end <add> <add> describe "depends_on formula" do <add> context "with one Formula" do <add> let(:token) { "with-depends-on-formula" } <add> <add> it "allows depends_on formula to be specified" do <add> expect(cask.depends_on.formula).not_to be nil <add> end <add> end <add> <add> context "with multiple Formulae" do <add> let(:token) { "with-depends-on-formula-multiple" } <add> <add> it "allows multiple depends_on formula to be specified" do <add> expect(cask.depends_on.formula).not_to be nil <add> end <add> end <add> end <add> <add> describe "depends_on cask" do <add> context "specifying one" do <add> let(:token) { "with-depends-on-cask" } <add> it "is allowed" do <add> expect(cask.depends_on.cask).not_to be nil <add> end <add> end <add> <add> context "specifying multiple" do <add> let(:token) { "with-depends-on-cask-multiple" } <add> <add> it "is allowed" do <add> expect(cask.depends_on.cask).not_to be nil <add> end <add> end <add> end <add> <add> describe "depends_on macos" do <add> context "valid" do <add> let(:token) { "with-depends-on-macos-string" } <add> <add> it "allows depends_on macos to be specified" do <add> expect(cask.depends_on.macos).not_to be nil <add> end <add> end <add> <add> context "invalid depends_on macos value" do <add> let(:token) { "invalid/invalid-depends-on-macos-bad-release" } <add> <add> it "refuses to load" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError) <add> end <add> end <add> <add> context "conflicting depends_on macos forms" do <add> let(:token) { "invalid/invalid-depends-on-macos-conflicting-forms" } <add> <add> it "refuses to load" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError) <add> end <add> end <add> end <add> <add> describe "depends_on arch" do <add> context "valid" do <add> let(:token) { "with-depends-on-arch" } <add> <add> it "is allowed to be specified" do <add> expect(cask.depends_on.arch).not_to be nil <add> end <add> end <add> <add> context "invalid depends_on arch value" do <add> let(:token) { "invalid/invalid-depends-on-arch-value" } <add> <add> it "refuses to load" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError) <add> end <add> end <add> end <add> <add> describe "depends_on x11" do <add> context "valid" do <add> let(:token) { "with-depends-on-x11" } <add> <add> it "is allowed to be specified" do <add> expect(cask.depends_on.x11).not_to be nil <add> end <add> end <add> <add> context "invalid depends_on x11 value" do <add> let(:token) { "invalid/invalid-depends-on-x11-value" } <add> <add> it "refuses to load" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError) <add> end <add> end <add> end <add> <add> describe "conflicts_with stanza" do <add> context "valid" do <add> let(:token) { "with-conflicts-with" } <add> <add> it "allows conflicts_with stanza to be specified" do <add> expect(cask.conflicts_with.formula).not_to be nil <add> end <add> end <add> <add> context "invalid conflicts_with key" do <add> let(:token) { "invalid/invalid-conflicts-with-key" } <add> <add> it "refuses to load invalid conflicts_with key" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError) <add> end <add> end <add> end <add> <add> describe "installer stanza" do <add> context "script" do <add> let(:token) { "with-installer-script" } <add> <add> it "allows installer script to be specified" do <add> expect(cask.artifacts[:installer].first.script[:executable]).to eq("/usr/bin/true") <add> expect(cask.artifacts[:installer].first.script[:args]).to eq(["--flag"]) <add> expect(cask.artifacts[:installer].to_a[1].script[:executable]).to eq("/usr/bin/false") <add> expect(cask.artifacts[:installer].to_a[1].script[:args]).to eq(["--flag"]) <add> end <add> end <add> <add> context "manual" do <add> let(:token) { "with-installer-manual" } <add> <add> it "allows installer manual to be specified" do <add> expect(cask.artifacts[:installer].first.manual).to eq("Caffeine.app") <add> end <add> end <add> end <add> <add> describe "stage_only stanza" do <add> context "when there is no other activatable artifact" do <add> let(:token) { "stage-only" } <add> <add> it "allows stage_only stanza to be specified" do <add> expect(cask.artifacts[:stage_only].first).to eq([true]) <add> end <add> end <add> <add> context "when there is are activatable artifacts" do <add> let(:token) { "invalid/invalid-stage-only-conflict" } <add> <add> it "prevents specifying stage_only" do <add> expect { cask }.to raise_error(Hbc::CaskInvalidError, /'stage_only' must be the only activatable artifact/) <add> end <add> end <add> end <add> <add> describe "auto_updates stanza" do <add> let(:token) { "auto-updates" } <add> <add> it "allows auto_updates stanza to be specified" do <add> expect(cask.auto_updates).to be true <add> end <add> end <add> <add> describe "appdir" do <add> context "interpolation of the appdir in stanzas" do <add> let(:token) { "appdir-interpolation" } <add> <add> it "is allowed" do <add> expect(cask.artifacts[:binary].first).to eq(["#{Hbc.appdir}/some/path"]) <add> end <add> end <add> <add> it "does not include a trailing slash" do <add> begin <add> original_appdir = Hbc.appdir <add> Hbc.appdir = "#{original_appdir}/" <add> <add> cask = Hbc::Cask.new("appdir-trailing-slash") do <add> binary "#{appdir}/some/path" <add> end <add> <add> expect(cask.artifacts[:binary].first).to eq(["#{original_appdir}/some/path"]) <add> ensure <add> Hbc.appdir = original_appdir <add> end <add> end <add> end <add>end <ide><path>Library/Homebrew/cask/test/cask/dsl_test.rb <del>require "test_helper" <del> <del>describe Hbc::DSL do <del> it "lets you set url, homepage, and version" do <del> test_cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/basic-cask.rb") <del> test_cask.url.to_s.must_equal "http://example.com/TestCask.dmg" <del> test_cask.homepage.must_equal "http://example.com/" <del> test_cask.version.to_s.must_equal "1.2.3" <del> end <del> <del> describe "when a Cask includes an unknown method" do <del> attempt_unknown_method = nil <del> <del> before do <del> attempt_unknown_method = lambda do <del> Hbc::Cask.new("unexpected-method-cask") do <del> future_feature :not_yet_on_your_machine <del> end <del> end <del> end <del> <del> it "prints a warning that it has encountered an unexpected method" do <del> expected = Regexp.compile(<<-EOS.undent.lines.map(&:chomp).join("")) <del> (?m) <del> Warning: <del> .* <del> Unexpected method 'future_feature' called on Cask unexpected-method-cask\\. <del> .* <del> https://github.com/caskroom/homebrew-cask/blob/master/doc/reporting_bugs/pre_bug_report.md <del> .* <del> https://github.com/caskroom/homebrew-cask#reporting-bugs <del> EOS <del> <del> attempt_unknown_method.must_output nil, expected <del> end <del> <del> it "will simply warn, not throw an exception" do <del> begin <del> shutup do <del> attempt_unknown_method.call <del> end <del> rescue StandardError => e <del> flunk("Wanted unexpected method to simply warn, but got exception #{e}") <del> end <del> end <del> end <del> <del> describe "header line" do <del> it "requires a valid header format" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-header-format.rb") <del> }.must_raise(SyntaxError) <del> end <del> <del> it "requires the header token to match the file name" do <del> err = lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-header-token-mismatch.rb") <del> }.must_raise(Hbc::CaskTokenDoesNotMatchError) <del> err.message.must_include "Bad header line:" <del> err.message.must_include "does not match file name" <del> end <del> <del> it "does not require a DSL version in the header" do <del> test_cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/no-dsl-version.rb") <del> test_cask.token.must_equal "no-dsl-version" <del> test_cask.url.to_s.must_equal "http://example.com/TestCask.dmg" <del> test_cask.homepage.must_equal "http://example.com/" <del> test_cask.version.to_s.must_equal "1.2.3" <del> end <del> <del> it "may use deprecated DSL version hash syntax" do <del> stub = proc do |arg| <del> arg == "HOMEBREW_DEVELOPER" ? nil : ENV[arg] <del> end <del> <del> ENV.stub :[], stub do <del> shutup do <del> test_cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-dsl-version.rb") <del> test_cask.token.must_equal "with-dsl-version" <del> test_cask.url.to_s.must_equal "http://example.com/TestCask.dmg" <del> test_cask.homepage.must_equal "http://example.com/" <del> test_cask.version.to_s.must_equal "1.2.3" <del> end <del> end <del> end <del> end <del> <del> describe "name stanza" do <del> it "lets you set the full name via a name stanza" do <del> cask = Hbc::Cask.new("name-cask") do <del> name "Proper Name" <del> end <del> <del> cask.name.must_equal [ <del> "Proper Name", <del> ] <del> end <del> <del> it "Accepts an array value to the name stanza" do <del> cask = Hbc::Cask.new("array-name-cask") do <del> name ["Proper Name", "Alternate Name"] <del> end <del> <del> cask.name.must_equal [ <del> "Proper Name", <del> "Alternate Name", <del> ] <del> end <del> <del> it "Accepts multiple name stanzas" do <del> cask = Hbc::Cask.new("multi-name-cask") do <del> name "Proper Name" <del> name "Alternate Name" <del> end <del> <del> cask.name.must_equal [ <del> "Proper Name", <del> "Alternate Name", <del> ] <del> end <del> end <del> <del> describe "sha256 stanza" do <del> it "lets you set checksum via sha256" do <del> cask = Hbc::Cask.new("checksum-cask") do <del> sha256 "imasha2" <del> end <del> <del> cask.sha256.must_equal "imasha2" <del> end <del> end <del> <del> describe "language stanza" do <del> it "allows multilingual casks" do <del> cask = lambda do <del> Hbc::Cask.new("cask-with-apps") do <del> language "zh" do <del> sha256 "abc123" <del> "zh-CN" <del> end <del> <del> language "en-US", default: true do <del> sha256 "xyz789" <del> "en-US" <del> end <del> <del> url "https://example.org/#{language}.zip" <del> end <del> end <del> <del> MacOS.stub :languages, ["zh"] do <del> cask.call.language.must_equal "zh-CN" <del> cask.call.sha256.must_equal "abc123" <del> cask.call.url.to_s.must_equal "https://example.org/zh-CN.zip" <del> end <del> <del> MacOS.stub :languages, ["zh-XX"] do <del> cask.call.language.must_equal "zh-CN" <del> cask.call.sha256.must_equal "abc123" <del> cask.call.url.to_s.must_equal "https://example.org/zh-CN.zip" <del> end <del> <del> MacOS.stub :languages, ["en"] do <del> cask.call.language.must_equal "en-US" <del> cask.call.sha256.must_equal "xyz789" <del> cask.call.url.to_s.must_equal "https://example.org/en-US.zip" <del> end <del> <del> MacOS.stub :languages, ["xx-XX"] do <del> cask.call.language.must_equal "en-US" <del> cask.call.sha256.must_equal "xyz789" <del> cask.call.url.to_s.must_equal "https://example.org/en-US.zip" <del> end <del> <del> MacOS.stub :languages, ["xx-XX", "zh", "en"] do <del> cask.call.language.must_equal "zh-CN" <del> cask.call.sha256.must_equal "abc123" <del> cask.call.url.to_s.must_equal "https://example.org/zh-CN.zip" <del> end <del> <del> MacOS.stub :languages, ["xx-XX", "en-US", "zh"] do <del> cask.call.language.must_equal "en-US" <del> cask.call.sha256.must_equal "xyz789" <del> cask.call.url.to_s.must_equal "https://example.org/en-US.zip" <del> end <del> end <del> end <del> <del> describe "app stanza" do <del> it "allows you to specify app stanzas" do <del> cask = Hbc::Cask.new("cask-with-apps") do <del> app "Foo.app" <del> app "Bar.app" <del> end <del> <del> Array(cask.artifacts[:app]).must_equal [["Foo.app"], ["Bar.app"]] <del> end <del> <del> it "allow app stanzas to be empty" do <del> cask = Hbc::Cask.new("cask-with-no-apps") <del> Array(cask.artifacts[:app]).must_equal %w[] <del> end <del> end <del> <del> describe "caveats stanza" do <del> it "allows caveats to be specified via a method define" do <del> cask = Hbc::Cask.new("plain-cask") <del> <del> cask.caveats.must_be :empty? <del> <del> cask = Hbc::Cask.new("cask-with-caveats") do <del> def caveats; <<-EOS.undent <del> When you install this Cask, you probably want to know this. <del> EOS <del> end <del> end <del> <del> cask.caveats.must_equal "When you install this Cask, you probably want to know this.\n" <del> end <del> end <del> <del> describe "pkg stanza" do <del> it "allows installable pkgs to be specified" do <del> cask = Hbc::Cask.new("cask-with-pkgs") do <del> pkg "Foo.pkg" <del> pkg "Bar.pkg" <del> end <del> <del> Array(cask.artifacts[:pkg]).must_equal [["Foo.pkg"], ["Bar.pkg"]] <del> end <del> end <del> <del> describe "url stanza" do <del> it "prevents defining multiple urls" do <del> err = lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-two-url.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> err.message.must_include "'url' stanza may only appear once" <del> end <del> end <del> <del> describe "homepage stanza" do <del> it "prevents defining multiple homepages" do <del> err = lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-two-homepage.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> err.message.must_include "'homepage' stanza may only appear once" <del> end <del> end <del> <del> describe "version stanza" do <del> it "prevents defining multiple versions" do <del> err = lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-two-version.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> err.message.must_include "'version' stanza may only appear once" <del> end <del> end <del> <del> describe "appcast stanza" do <del> it "allows appcasts to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-appcast.rb") <del> cask.appcast.to_s.must_match(/^http/) <del> end <del> <del> it "prevents defining multiple appcasts" do <del> err = lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-appcast-multiple.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> err.message.must_include "'appcast' stanza may only appear once" <del> end <del> <del> it "refuses to load invalid appcast URLs" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-appcast-url.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> end <del> end <del> <del> describe "gpg stanza" do <del> it "allows gpg stanza to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-gpg.rb") <del> cask.gpg.to_s.must_match(/\S/) <del> end <del> <del> it "allows gpg stanza to be specified with :key_url" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-gpg-key-url.rb") <del> cask.gpg.to_s.must_match(/\S/) <del> end <del> <del> it "prevents specifying gpg stanza multiple times" do <del> err = lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-gpg-multiple-stanzas.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> err.message.must_include "'gpg' stanza may only appear once" <del> end <del> <del> it "prevents missing gpg key parameters" do <del> err = lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-gpg-missing-key.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> err.message.must_include "'gpg' stanza must include exactly one" <del> end <del> <del> it "prevents conflicting gpg key parameters" do <del> err = lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-gpg-conflicting-keys.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> err.message.must_include "'gpg' stanza must include exactly one" <del> end <del> <del> it "refuses to load invalid gpg signature URLs" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-gpg-signature-url.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> end <del> <del> it "refuses to load invalid gpg key URLs" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-gpg-key-url.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> end <del> <del> it "refuses to load invalid gpg key IDs" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-gpg-key-id.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> end <del> <del> it "refuses to load if gpg parameter is unknown" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-gpg-parameter.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> end <del> end <del> <del> describe "depends_on stanza" do <del> it "refuses to load with an invalid depends_on key" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-depends-on-key.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> end <del> end <del> <del> describe "depends_on formula" do <del> it "allows depends_on formula to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-depends-on-formula.rb") <del> cask.depends_on.formula.wont_be_nil <del> end <del> <del> it "allows multiple depends_on formula to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-depends-on-formula-multiple.rb") <del> cask.depends_on.formula.wont_be_nil <del> end <del> end <del> <del> describe "depends_on cask" do <del> it "allows depends_on cask to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-depends-on-cask.rb") <del> cask.depends_on.cask.wont_be_nil <del> end <del> <del> it "allows multiple depends_on cask to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-depends-on-cask-multiple.rb") <del> cask.depends_on.cask.wont_be_nil <del> end <del> end <del> <del> describe "depends_on macos" do <del> it "allows depends_on macos to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-depends-on-macos-string.rb") <del> cask.depends_on.macos.wont_be_nil <del> end <del> it "refuses to load with an invalid depends_on macos value" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-depends-on-macos-bad-release.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> end <del> it "refuses to load with conflicting depends_on macos forms" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-depends-on-macos-conflicting-forms.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> end <del> end <del> <del> describe "depends_on arch" do <del> it "allows depends_on arch to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-depends-on-arch.rb") <del> cask.depends_on.arch.wont_be_nil <del> end <del> it "refuses to load with an invalid depends_on arch value" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-depends-on-arch-value.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> end <del> end <del> <del> describe "depends_on x11" do <del> it "allows depends_on x11 to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-depends-on-x11.rb") <del> cask.depends_on.x11.wont_be_nil <del> end <del> it "refuses to load with an invalid depends_on x11 value" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-depends-on-x11-value.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> end <del> end <del> <del> describe "conflicts_with stanza" do <del> it "allows conflicts_with stanza to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-conflicts-with.rb") <del> cask.conflicts_with.formula.wont_be_nil <del> end <del> <del> it "refuses to load invalid conflicts_with key" do <del> lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-conflicts-with-key.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> end <del> end <del> <del> describe "installer stanza" do <del> it "allows installer script to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-installer-script.rb") <del> cask.artifacts[:installer].first.script[:executable].must_equal "/usr/bin/true" <del> cask.artifacts[:installer].first.script[:args].must_equal ["--flag"] <del> cask.artifacts[:installer].to_a[1].script[:executable].must_equal "/usr/bin/false" <del> cask.artifacts[:installer].to_a[1].script[:args].must_equal ["--flag"] <del> end <del> it "allows installer manual to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/with-installer-manual.rb") <del> cask.artifacts[:installer].first.manual.must_equal "Caffeine.app" <del> end <del> end <del> <del> describe "stage_only stanza" do <del> it "allows stage_only stanza to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/stage-only.rb") <del> cask.artifacts[:stage_only].first.must_equal [true] <del> end <del> <del> it "prevents specifying stage_only with other activatables" do <del> err = lambda { <del> Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/invalid/invalid-stage-only-conflict.rb") <del> }.must_raise(Hbc::CaskInvalidError) <del> err.message.must_include "'stage_only' must be the only activatable artifact" <del> end <del> end <del> <del> describe "auto_updates stanza" do <del> it "allows auto_updates stanza to be specified" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/auto-updates.rb") <del> cask.auto_updates.must_equal true <del> end <del> end <del> <del> describe "appdir" do <del> it "allows interpolation of the appdir value in stanzas" do <del> cask = Hbc::CaskLoader.load_from_file(TEST_FIXTURE_DIR/"cask/Casks/appdir-interpolation.rb") <del> cask.artifacts[:binary].first.must_equal ["#{Hbc.appdir}/some/path"] <del> end <del> <del> it "does not include a trailing slash" do <del> original_appdir = Hbc.appdir <del> Hbc.appdir = "#{original_appdir}/" <del> <del> begin <del> cask = Hbc::Cask.new("appdir-trailing-slash") do <del> binary "#{appdir}/some/path" <del> end <del> <del> cask.artifacts[:binary].first.must_equal ["#{original_appdir}/some/path"] <del> ensure <del> Hbc.appdir = original_appdir <del> end <del> end <del> end <del>end
2
Javascript
Javascript
replace string replace with string templates
ac46837d356738984b70e1591678f6b325fd15a1
<ide><path>packages/react-native-codegen/src/generators/modules/GenerateModuleJavaSpec.js <ide> const {unwrapNullable} = require('../../parsers/flow/modules/utils'); <ide> <ide> type FilesOutput = Map<string, string>; <ide> <del>const moduleTemplate = ` <add>const FileTemplate = ({ <add> packageName, <add> className, <add> methods, <add> imports, <add>}: $ReadOnly<{| <add> packageName: string, <add> className: string, <add> methods: string, <add> imports: string, <add>|}>) => { <add> return ` <ide> /** <ide> * ${'C'}opyright (c) Facebook, Inc. and its affiliates. <ide> * <ide> const moduleTemplate = ` <ide> * @nolint <ide> */ <ide> <del>package ::_PACKAGENAME_::; <add>package ${packageName}; <ide> <del>::_IMPORTS_:: <add>${imports} <ide> <del>public abstract class ::_CLASSNAME_:: extends ReactContextBaseJavaModule implements ReactModuleWithSpec, TurboModule { <del> public ::_CLASSNAME_::(ReactApplicationContext reactContext) { <add>public abstract class ${className} extends ReactContextBaseJavaModule implements ReactModuleWithSpec, TurboModule { <add> public ${className}(ReactApplicationContext reactContext) { <ide> super(reactContext); <ide> } <ide> <del>::_METHODS_:: <add>${methods} <ide> } <ide> `; <add>}; <ide> <ide> function translateFunctionParamToJavaType( <ide> param: NativeModuleMethodParamSchema, <ide> module.exports = { <ide> <ide> files.set( <ide> `${className}.java`, <del> moduleTemplate <del> .replace( <del> /::_IMPORTS_::/g, <del> Array.from(imports) <del> .sort() <del> .map(p => `import ${p};`) <del> .join('\n'), <del> ) <del> .replace(/::_PACKAGENAME_::/g, packageName) <del> .replace(/::_CLASSNAME_::/g, className) <del> .replace(/::_METHODS_::/g, methods.filter(m => !!m).join('\n\n')), <add> FileTemplate({ <add> packageName, <add> className, <add> methods: methods.filter(Boolean).join('\n\n'), <add> imports: Array.from(imports) <add> .sort() <add> .map(p => `import ${p};`) <add> .join('\n'), <add> }), <ide> ); <ide> }); <ide>
1
Text
Text
add v3.6.0-beta.4 to changelog
2765c4806f805100cf2ab8693eb7bb9a31922df1
<ide><path>CHANGELOG.md <ide> # Ember Changelog <ide> <add>### v3.6.0-beta.4 (November 12, 2018) <add> <add>- [#17184](https://github.com/emberjs/ember.js/pull/17184) [BUGFIX] Ensures removeAllListeners does not break subsequent adds <add>- [#17186](https://github.com/emberjs/ember.js/pull/17186) [BUGFIX] Fix RouteInfo QP mutability <add>- [#17192](https://github.com/emberjs/ember.js/pull/17192) [BUGFIX] currentRoute should respect substates <add> <ide> ### v3.6.0-beta.3 (November 5, 2018) <ide> <ide> - [#17169](https://github.com/emberjs/ember.js/pull/17169) [BUGFIX] Add default implementations of Component lifecycle hooks
1
Javascript
Javascript
add extra anti-dos tech to net.server
38dde9684f43dff636d1b798e7537b93d2b6b7fd
<ide><path>benchmark/idle_clients.js <ide> var errors = 0, connections = 0; <ide> <ide> var lastClose = 0; <ide> <del>function maybeConnect (s) { <del> var now = new Date(); <del> if (now - lastClose > 5000) { <del> // Just connect immediately <del> connect(); <del> } else { <del> // Otherwise wait a little - see if this one is connected still. Just to <del> // avoid spinning at 100% cpu when the server totally rejects our <del> // connections. <del> setTimeout(function () { <del> if (s.writable && s.readable) connect(); <del> }, 100); <del> } <del>} <del> <ide> function connect () { <ide> process.nextTick(function () { <ide> var s = net.Stream(); <ide> function connect () { <ide> s.on('connect', function () { <ide> gotConnected = true; <ide> connections++; <del> maybeConnect(s); <add> connect(); <ide> }); <ide> <ide> s.on('close', function () { <ide><path>lib/net.js <ide> function Server (/* [ options, ] listener */) { <ide> if (typeof arguments[0] == "object") { <ide> options = arguments[0]; <ide> } <del> <add> <ide> // listener: find the last argument that is a function <ide> for (var l = arguments.length - 1; l >= 0; l--) { <ide> if (typeof arguments[l] == "function") { <ide> function Server (/* [ options, ] listener */) { <ide> // Just in case we don't have a dummy fd. <ide> if (!self._dummyFD) self._getDummyFD(); <ide> <add> if (self._acceptTimer) { <add> // Somehow the watcher got started again. Need to wait until <add> // the timer finishes. <add> self.watcher.stop(); <add> } <add> <ide> while (self.fd) { <ide> try { <ide> var peerInfo = accept(self.fd); <ide> } catch (e) { <del> if (e.errno == EMFILE) { <del> // Output a warning, but only at most every 5 seconds. <del> var now = new Date(); <del> if (now - self._lastEMFILEWarning > 5000) { <del> console.error("(node) Hit max file limit. Increase 'ulimit -n'."); <del> } <add> if (e.errno != EMFILE) throw e; <add> <add> // Output a warning, but only at most every 5 seconds. <add> var now = new Date(); <add> if (now - self._lastEMFILEWarning > 5000) { <add> console.error("(node) Hit max file limit. Increase 'ulimit -n'."); <ide> self._lastEMFILEWarning = now; <add> } <ide> <del> // Gracefully reject pending clients by freeing up a file <del> // descriptor. <del> if (self._dummyFD) { <del> close(self._dummyFD); <del> self._dummyFD = null; <del> while (true) { <del> peerInfo = accept(self.fd); <del> if (!peerInfo) break; <del> close(peerInfo.fd); <add> var acceptCount = 0; <add> <add> // Gracefully reject pending clients by freeing up a file <add> // descriptor. <add> if (self._dummyFD) { <add> close(self._dummyFD); // Free up an fd <add> self._dummyFD = null; <add> // Accept and close the waiting clients one at a time. <add> // Single threaded programming ftw. <add> while (true) { <add> peerInfo = accept(self.fd); <add> if (!peerInfo) break; <add> close(peerInfo.fd); <add> <add> // Don't become DoS'd by incoming requests <add> if (++acceptCount > 50) { <add> assert(!self._acceptTimer); <add> self.watcher.stop(); <add> // Wait a second before accepting more. <add> self._acceptTimer = setTimeout(function () { <add> assert(parseInt(self.fd) >= 0); <add> self._acceptTimer = null; <add> self.watcher.start(); <add> }, 1000); <add> break; <ide> } <del> self._getDummyFD(); <ide> } <del> return; <add> // Reacquire the dummy fd <add> self._getDummyFD(); <ide> } <del> throw e; <add> return; <ide> } <ide> if (!peerInfo) return; <ide>
2
Go
Go
get mtu from default route
ff4e58ff560d864432261a68502a8a5543145fdf
<ide><path>daemon/daemon.go <ide> import ( <ide> "github.com/docker/docker/registry" <ide> "github.com/docker/docker/runconfig" <ide> "github.com/docker/docker/trust" <add> "github.com/docker/libcontainer/netlink" <ide> "github.com/docker/libnetwork" <ide> ) <ide> <ide> func (daemon *Daemon) RegisterLinks(container *Container, hostConfig *runconfig. <ide> } <ide> <ide> func NewDaemon(config *Config, registryService *registry.Service) (daemon *Daemon, err error) { <add> setDefaultMtu(config) <add> <ide> // Ensure we have compatible configuration options <ide> if err := checkConfigOptions(config); err != nil { <ide> return nil, err <ide> func (daemon *Daemon) newBaseContainer(id string) CommonContainer { <ide> root: daemon.containerRoot(id), <ide> } <ide> } <add> <add>func setDefaultMtu(config *Config) { <add> // do nothing if the config does not have the default 0 value. <add> if config.Mtu != 0 { <add> return <add> } <add> config.Mtu = defaultNetworkMtu <add> if routeMtu, err := getDefaultRouteMtu(); err == nil { <add> config.Mtu = routeMtu <add> } <add>} <add> <add>var errNoDefaultRoute = errors.New("no default route was found") <add> <add>// getDefaultRouteMtu returns the MTU for the default route's interface. <add>func getDefaultRouteMtu() (int, error) { <add> routes, err := netlink.NetworkGetRoutes() <add> if err != nil { <add> return 0, err <add> } <add> for _, r := range routes { <add> if r.Default { <add> return r.Iface.MTU, nil <add> } <add> } <add> return 0, errNoDefaultRoute <add>}
1
Python
Python
add solution method for project_euler/problem_37
6541236fdf807a185a7eaf5c159c45fa885d85fb
<ide><path>project_euler/problem_37/sol1.py <ide> def compute_truncated_primes(count: int = 11) -> list[int]: <ide> return list_truncated_primes <ide> <ide> <add>def solution() -> int: <add> """ <add> Returns the sum of truncated primes <add> """ <add> return sum(compute_truncated_primes(11)) <add> <add> <ide> if __name__ == "__main__": <ide> print(f"{sum(compute_truncated_primes(11)) = }")
1
PHP
PHP
remove uneeded implementedevents()
01e44dc6f63046eddc8f46a7f3186dd64ad05f15
<ide><path>src/Controller/Component/AuthComponent.php <ide> public function startup(Event $event) <ide> return $this->_unauthorized($controller); <ide> } <ide> <del> /** <del> * Events supported by this component. <del> * <del> * @return array <del> */ <del> public function implementedEvents() <del> { <del> return [ <del> 'Controller.startup' => 'startup', <del> ]; <del> } <del> <ide> /** <ide> * Checks whether current action is accessible without authentication. <ide> *
1
Ruby
Ruby
initialize instance variables
5ddb60f2b35e67b500efbc2da0c752a64e36385b
<ide><path>activerecord/lib/active_record/base.rb <ide> def initialize(attributes = nil, options = {}) <ide> @marked_for_destruction = false <ide> @previously_changed = {} <ide> @changed_attributes = {} <add> @relation = nil <ide> <ide> ensure_proper_type <ide> set_serialized_attributes <ide> def encode_with(coder) <ide> # post.title # => 'hello world' <ide> def init_with(coder) <ide> @attributes = coder['attributes'] <add> @relation = nil <ide> <ide> set_serialized_attributes <ide>
1
Javascript
Javascript
remove event config
b6df4417c79c11cfb44f965fab55b573882b1d54
<ide><path>packages/react-dom/src/client/ReactDOMComponent.js <ide> import { <ide> registrationNameDependencies, <ide> possibleRegistrationNames, <del>} from '../events/EventPluginRegistry'; <add>} from '../events/EventRegistry'; <ide> import {canUseDOM} from 'shared/ExecutionEnvironment'; <ide> import invariant from 'shared/invariant'; <ide> import { <ide><path>packages/react-dom/src/events/DOMEventProperties.js <ide> import type { <ide> TopLevelType, <ide> DOMTopLevelEventType, <ide> } from '../events/TopLevelEventTypes'; <del>import type {EventTypes} from '../events/PluginModuleType'; <ide> <add>import {registerTwoPhaseEvent} from './EventRegistry'; <ide> import * as DOMTopLevelEventTypes from './DOMTopLevelEventTypes'; <ide> import { <ide> DiscreteEvent, <ide> import { <ide> <ide> import {enableCreateEventHandleAPI} from 'shared/ReactFeatureFlags'; <ide> <del>// Needed for SimpleEventPlugin, rather than <del>// do it in two places, which duplicates logic <del>// and increases the bundle size, we do it all <del>// here once. If we remove or refactor the <del>// SimpleEventPlugin, we should also remove or <del>// update the below line. <del>export const simpleEventPluginEventTypes: EventTypes = {}; <del> <ide> export const topLevelEventsToReactNames: Map< <ide> TopLevelType, <ide> string | null, <ide> const continuousPairsForSimpleEventPlugin = [ <ide> /** <ide> * Turns <ide> * ['abort', ...] <add> * <ide> * into <del> * eventTypes = { <del> * 'abort': { <del> * phasedRegistrationNames: { <del> * bubbled: 'onAbort', <del> * captured: 'onAbortCapture', <del> * }, <del> * dependencies: [TOP_ABORT], <del> * }, <del> * ... <del> * }; <add> * <ide> * topLevelEventsToReactNames = new Map([ <ide> * [TOP_ABORT, 'onAbort'], <ide> * ]); <add> * <add> * and registers them. <ide> */ <del> <del>function processSimpleEventPluginPairsByPriority( <add>function registerSimplePluginEventsAndSetTheirPriorities( <ide> eventTypes: Array<DOMTopLevelEventType | string>, <ide> priority: EventPriority, <ide> ): void { <ide> function processSimpleEventPluginPairsByPriority( <ide> const topEvent = ((eventTypes[i]: any): DOMTopLevelEventType); <ide> const event = ((eventTypes[i + 1]: any): string); <ide> const capitalizedEvent = event[0].toUpperCase() + event.slice(1); <del> const onEvent = 'on' + capitalizedEvent; <del> <del> const config = { <del> phasedRegistrationNames: { <del> bubbled: onEvent, <del> captured: onEvent + 'Capture', <del> }, <del> dependencies: [topEvent], <del> eventPriority: priority, <del> }; <add> const reactName = 'on' + capitalizedEvent; <ide> eventPriorities.set(topEvent, priority); <del> topLevelEventsToReactNames.set(topEvent, onEvent); <del> simpleEventPluginEventTypes[event] = config; <add> topLevelEventsToReactNames.set(topEvent, reactName); <add> registerTwoPhaseEvent(reactName, [topEvent]); <ide> } <ide> } <ide> <del>function processTopEventPairsByPriority( <add>function setEventPriorities( <ide> eventTypes: Array<DOMTopLevelEventType | string>, <ide> priority: EventPriority, <ide> ): void { <ide> function processTopEventPairsByPriority( <ide> } <ide> } <ide> <del>// SimpleEventPlugin <del>processSimpleEventPluginPairsByPriority( <del> discreteEventPairsForSimpleEventPlugin, <del> DiscreteEvent, <del>); <del>processSimpleEventPluginPairsByPriority( <del> userBlockingPairsForSimpleEventPlugin, <del> UserBlockingEvent, <del>); <del>processSimpleEventPluginPairsByPriority( <del> continuousPairsForSimpleEventPlugin, <del> ContinuousEvent, <del>); <del>// Not used by SimpleEventPlugin <del>processTopEventPairsByPriority(otherDiscreteEvents, DiscreteEvent); <del> <ide> export function getEventPriorityForPluginSystem( <ide> topLevelType: TopLevelType, <ide> ): EventPriority { <ide> export function getEventPriorityForListenerSystem( <ide> } <ide> return ContinuousEvent; <ide> } <add> <add>export function registerSimpleEvents() { <add> registerSimplePluginEventsAndSetTheirPriorities( <add> discreteEventPairsForSimpleEventPlugin, <add> DiscreteEvent, <add> ); <add> registerSimplePluginEventsAndSetTheirPriorities( <add> userBlockingPairsForSimpleEventPlugin, <add> UserBlockingEvent, <add> ); <add> registerSimplePluginEventsAndSetTheirPriorities( <add> continuousPairsForSimpleEventPlugin, <add> ContinuousEvent, <add> ); <add> setEventPriorities(otherDiscreteEvents, DiscreteEvent); <add>} <ide><path>packages/react-dom/src/events/DOMModernPluginEventSystem.js <ide> import type { <ide> import type {EventPriority, ReactScopeInstance} from 'shared/ReactTypes'; <ide> import type {Fiber} from 'react-reconciler/src/ReactInternalTypes'; <ide> <del>import { <del> injectEventPlugin, <del> registrationNameDependencies, <del>} from './EventPluginRegistry'; <add>import {registrationNameDependencies} from './EventRegistry'; <ide> import { <ide> PLUGIN_EVENT_SYSTEM, <ide> LEGACY_FB_SUPPORT, <ide> import * as ModernSelectEventPlugin from './plugins/ModernSelectEventPlugin'; <ide> import * as ModernSimpleEventPlugin from './plugins/ModernSimpleEventPlugin'; <ide> <ide> // TODO: remove top-level side effect. <del>injectEventPlugin(ModernSimpleEventPlugin.eventTypes); <del>injectEventPlugin(ModernEnterLeaveEventPlugin.eventTypes); <del>injectEventPlugin(ModernChangeEventPlugin.eventTypes); <del>injectEventPlugin(ModernSelectEventPlugin.eventTypes); <del>injectEventPlugin(ModernBeforeInputEventPlugin.eventTypes); <add>ModernSimpleEventPlugin.registerEvents(); <add>ModernEnterLeaveEventPlugin.registerEvents(); <add>ModernChangeEventPlugin.registerEvents(); <add>ModernSelectEventPlugin.registerEvents(); <add>ModernBeforeInputEventPlugin.registerEvents(); <ide> <ide> function extractEvents( <ide> dispatchQueue: DispatchQueue, <add><path>packages/react-dom/src/events/EventRegistry.js <del><path>packages/react-dom/src/events/EventPluginRegistry.js <ide> */ <ide> <ide> import type {TopLevelType} from './TopLevelEventTypes'; <del>import type {EventTypes} from './PluginModuleType'; <ide> <ide> /** <ide> * Mapping from registration name to event name <ide> export const registrationNameDependencies = {}; <ide> export const possibleRegistrationNames = __DEV__ ? {} : (null: any); <ide> // Trust the developer to only use possibleRegistrationNames in __DEV__ <ide> <del>function publishEventForPlugin( <del> eventTypes: EventTypes, <del> eventName: string, <del>): boolean { <del> const dispatchConfig = eventTypes[eventName]; <del> const phasedRegistrationNames = dispatchConfig.phasedRegistrationNames; <del> if (phasedRegistrationNames) { <del> for (const phaseName in phasedRegistrationNames) { <del> if (phasedRegistrationNames.hasOwnProperty(phaseName)) { <del> const phasedRegistrationName = phasedRegistrationNames[phaseName]; <del> publishRegistrationName( <del> phasedRegistrationName, <del> eventTypes[eventName].dependencies, <del> ); <del> } <del> } <del> return true; <del> } else if (dispatchConfig.registrationName) { <del> publishRegistrationName( <del> dispatchConfig.registrationName, <del> eventTypes[eventName].dependencies, <del> ); <del> return true; <del> } <del> return false; <add>export function registerTwoPhaseEvent( <add> registrationName: string, <add> dependencies: ?Array<TopLevelType>, <add>): void { <add> registerDirectEvent(registrationName, dependencies); <add> registerDirectEvent(registrationName + 'Capture', dependencies); <ide> } <ide> <del>function publishRegistrationName( <add>export function registerDirectEvent( <ide> registrationName: string, <ide> dependencies: ?Array<TopLevelType>, <del>): void { <add>) { <ide> if (__DEV__) { <ide> if (registrationNameDependencies[registrationName]) { <ide> console.error( <del> 'EventPluginRegistry: More than one plugin attempted to publish the same ' + <add> 'EventRegistry: More than one plugin attempted to publish the same ' + <ide> 'registration name, `%s`.', <ide> registrationName, <ide> ); <ide> function publishRegistrationName( <ide> } <ide> } <ide> } <del> <del>export function injectEventPlugin(eventTypes: EventTypes): void { <del> for (const eventName in eventTypes) { <del> publishEventForPlugin(eventTypes, eventName); <del> } <del>} <ide><path>packages/react-dom/src/events/PluginModuleType.js <ide> */ <ide> <ide> import type {Fiber} from 'react-reconciler/src/ReactInternalTypes'; <del>import type { <del> DispatchConfig, <del> ReactSyntheticEvent, <del>} from './ReactSyntheticEventType'; <del> <del>export type EventTypes = {[key: string]: DispatchConfig, ...}; <add>import type {ReactSyntheticEvent} from './ReactSyntheticEventType'; <ide> <ide> export type AnyNativeEvent = Event | KeyboardEvent | MouseEvent | TouchEvent; <ide> <ide><path>packages/react-dom/src/events/plugins/ModernBeforeInputEventPlugin.js <ide> import type {TopLevelType} from '../../events/TopLevelEventTypes'; <ide> <ide> import {canUseDOM} from 'shared/ExecutionEnvironment'; <ide> <add>import {registerTwoPhaseEvent} from '../EventRegistry'; <ide> import { <ide> TOP_BLUR, <ide> TOP_COMPOSITION_START, <ide> const useFallbackCompositionData = <ide> const SPACEBAR_CODE = 32; <ide> const SPACEBAR_CHAR = String.fromCharCode(SPACEBAR_CODE); <ide> <del>// Events and their corresponding property names. <del>const eventTypes: EventTypes = { <del> beforeInput: { <del> phasedRegistrationNames: { <del> bubbled: 'onBeforeInput', <del> captured: 'onBeforeInputCapture', <del> }, <del> dependencies: [ <del> TOP_COMPOSITION_END, <del> TOP_KEY_PRESS, <del> TOP_TEXT_INPUT, <del> TOP_PASTE, <del> ], <del> }, <del> compositionEnd: { <del> phasedRegistrationNames: { <del> bubbled: 'onCompositionEnd', <del> captured: 'onCompositionEndCapture', <del> }, <del> dependencies: [ <del> TOP_BLUR, <del> TOP_COMPOSITION_END, <del> TOP_KEY_DOWN, <del> TOP_KEY_PRESS, <del> TOP_KEY_UP, <del> TOP_MOUSE_DOWN, <del> ], <del> }, <del> compositionStart: { <del> phasedRegistrationNames: { <del> bubbled: 'onCompositionStart', <del> captured: 'onCompositionStartCapture', <del> }, <del> dependencies: [ <del> TOP_BLUR, <del> TOP_COMPOSITION_START, <del> TOP_KEY_DOWN, <del> TOP_KEY_PRESS, <del> TOP_KEY_UP, <del> TOP_MOUSE_DOWN, <del> ], <del> }, <del> compositionUpdate: { <del> phasedRegistrationNames: { <del> bubbled: 'onCompositionUpdate', <del> captured: 'onCompositionUpdateCapture', <del> }, <del> dependencies: [ <del> TOP_BLUR, <del> TOP_COMPOSITION_UPDATE, <del> TOP_KEY_DOWN, <del> TOP_KEY_PRESS, <del> TOP_KEY_UP, <del> TOP_MOUSE_DOWN, <del> ], <del> }, <del>}; <add>function registerEvents() { <add> registerTwoPhaseEvent('onBeforeInput', [ <add> TOP_COMPOSITION_END, <add> TOP_KEY_PRESS, <add> TOP_TEXT_INPUT, <add> TOP_PASTE, <add> ]); <add> registerTwoPhaseEvent('onCompositionEnd', [ <add> TOP_BLUR, <add> TOP_COMPOSITION_END, <add> TOP_KEY_DOWN, <add> TOP_KEY_PRESS, <add> TOP_KEY_UP, <add> TOP_MOUSE_DOWN, <add> ]); <add> registerTwoPhaseEvent('onCompositionStart', [ <add> TOP_BLUR, <add> TOP_COMPOSITION_START, <add> TOP_KEY_DOWN, <add> TOP_KEY_PRESS, <add> TOP_KEY_UP, <add> TOP_MOUSE_DOWN, <add> ]); <add> registerTwoPhaseEvent('onCompositionUpdate', [ <add> TOP_BLUR, <add> TOP_COMPOSITION_UPDATE, <add> TOP_KEY_DOWN, <add> TOP_KEY_PRESS, <add> TOP_KEY_UP, <add> TOP_MOUSE_DOWN, <add> ]); <add>} <ide> <ide> // Track whether we've ever handled a keypress on the space key. <ide> let hasSpaceKeypress = false; <ide> function extractEvents( <ide> ); <ide> } <ide> <del>export {eventTypes, extractEvents}; <add>export {registerEvents, extractEvents}; <ide><path>packages/react-dom/src/events/plugins/ModernChangeEventPlugin.js <ide> * <ide> * @flow <ide> */ <del>import type {AnyNativeEvent, EventTypes} from '../PluginModuleType'; <add>import type {AnyNativeEvent} from '../PluginModuleType'; <ide> import type {TopLevelType} from '../TopLevelEventTypes'; <ide> import type {DispatchQueue} from '../PluginModuleType'; <ide> import type {EventSystemFlags} from '../EventSystemFlags'; <ide> <add>import {registerTwoPhaseEvent} from '../EventRegistry'; <ide> import SyntheticEvent from '../SyntheticEvent'; <ide> import isTextInputElement from '../isTextInputElement'; <ide> import {canUseDOM} from 'shared/ExecutionEnvironment'; <ide> import { <ide> accumulateTwoPhaseListeners, <ide> } from '../DOMModernPluginEventSystem'; <ide> <del>const eventTypes: EventTypes = { <del> change: { <del> phasedRegistrationNames: { <del> bubbled: 'onChange', <del> captured: 'onChangeCapture', <del> }, <del> dependencies: [ <del> TOP_BLUR, <del> TOP_CHANGE, <del> TOP_CLICK, <del> TOP_FOCUS, <del> TOP_INPUT, <del> TOP_KEY_DOWN, <del> TOP_KEY_UP, <del> TOP_SELECTION_CHANGE, <del> ], <del> }, <del>}; <add>function registerEvents() { <add> registerTwoPhaseEvent('onChange', [ <add> TOP_BLUR, <add> TOP_CHANGE, <add> TOP_CLICK, <add> TOP_FOCUS, <add> TOP_INPUT, <add> TOP_KEY_DOWN, <add> TOP_KEY_UP, <add> TOP_SELECTION_CHANGE, <add> ]); <add>} <ide> <ide> function createAndAccumulateChangeEvent( <ide> dispatchQueue, <ide> function extractEvents( <ide> } <ide> } <ide> <del>export {eventTypes, extractEvents}; <add>export {registerEvents, extractEvents}; <ide><path>packages/react-dom/src/events/plugins/ModernEnterLeaveEventPlugin.js <ide> * LICENSE file in the root directory of this source tree. <ide> */ <ide> <add>import {registerDirectEvent} from '../EventRegistry'; <ide> import { <ide> TOP_MOUSE_OUT, <ide> TOP_MOUSE_OVER, <ide> import {accumulateEnterLeaveListeners} from '../DOMModernPluginEventSystem'; <ide> import {HostComponent, HostText} from 'react-reconciler/src/ReactWorkTags'; <ide> import {getNearestMountedFiber} from 'react-reconciler/src/ReactFiberTreeReflection'; <ide> <del>const eventTypes: EventTypes = { <del> mouseEnter: { <del> registrationName: 'onMouseEnter', <del> dependencies: [TOP_MOUSE_OUT, TOP_MOUSE_OVER], <del> }, <del> mouseLeave: { <del> registrationName: 'onMouseLeave', <del> dependencies: [TOP_MOUSE_OUT, TOP_MOUSE_OVER], <del> }, <del> pointerEnter: { <del> registrationName: 'onPointerEnter', <del> dependencies: [TOP_POINTER_OUT, TOP_POINTER_OVER], <del> }, <del> pointerLeave: { <del> registrationName: 'onPointerLeave', <del> dependencies: [TOP_POINTER_OUT, TOP_POINTER_OVER], <del> }, <del>}; <add>function registerEvents() { <add> registerDirectEvent('onMouseEnter', [TOP_MOUSE_OUT, TOP_MOUSE_OVER]); <add> registerDirectEvent('onMouseLeave', [TOP_MOUSE_OUT, TOP_MOUSE_OVER]); <add> registerDirectEvent('onPointerEnter', [TOP_POINTER_OUT, TOP_POINTER_OVER]); <add> registerDirectEvent('onPointerLeave', [TOP_POINTER_OUT, TOP_POINTER_OVER]); <add>} <ide> <ide> /** <ide> * For almost every interaction we care about, there will be both a top-level <ide> function extractEvents( <ide> accumulateEnterLeaveListeners(dispatchQueue, leave, enter, from, to); <ide> } <ide> <del>export {eventTypes, extractEvents}; <add>export {registerEvents, extractEvents}; <ide><path>packages/react-dom/src/events/plugins/ModernSelectEventPlugin.js <ide> import SyntheticEvent from '../../events/SyntheticEvent'; <ide> import isTextInputElement from '../isTextInputElement'; <ide> import shallowEqual from 'shared/shallowEqual'; <ide> <add>import {registerTwoPhaseEvent} from '../EventRegistry'; <ide> import { <ide> TOP_BLUR, <ide> TOP_CONTEXT_MENU, <ide> const rootTargetDependencies = [ <ide> TOP_MOUSE_UP, <ide> ]; <ide> <del>const eventTypes: EventTypes = { <del> select: { <del> phasedRegistrationNames: { <del> bubbled: 'onSelect', <del> captured: 'onSelectCapture', <del> }, <del> dependencies: [...rootTargetDependencies, TOP_SELECTION_CHANGE], <del> }, <del>}; <add>function registerEvents() { <add> registerTwoPhaseEvent('onSelect', [ <add> ...rootTargetDependencies, <add> TOP_SELECTION_CHANGE, <add> ]); <add>} <ide> <ide> let activeElement = null; <ide> let activeElementInst = null; <ide> function isListeningToEvent( <ide> return listenerMap.has(listenerMapKey); <ide> } <ide> <add>/** <add> * This plugin creates an `onSelect` event that normalizes select events <add> * across form elements. <add> * <add> * Supported elements are: <add> * - input (see `isTextInputElement`) <add> * - textarea <add> * - contentEditable <add> * <add> * This differs from native browser implementations in the following ways: <add> * - Fires on contentEditable fields as well as inputs. <add> * - Fires for collapsed selection. <add> * - Fires after user input. <add> */ <ide> function extractEvents( <ide> dispatchQueue, <ide> topLevelType, <ide> function extractEvents( <ide> return; <ide> } <ide> <del>/** <del> * This plugin creates an `onSelect` event that normalizes select events <del> * across form elements. <del> * <del> * Supported elements are: <del> * - input (see `isTextInputElement`) <del> * - textarea <del> * - contentEditable <del> * <del> * This differs from native browser implementations in the following ways: <del> * - Fires on contentEditable fields as well as inputs. <del> * - Fires for collapsed selection. <del> * - Fires after user input. <del> */ <del>export {eventTypes, extractEvents}; <add>export {registerEvents, extractEvents}; <ide><path>packages/react-dom/src/events/plugins/ModernSimpleEventPlugin.js <ide> import SyntheticEvent from '../../events/SyntheticEvent'; <ide> import * as DOMTopLevelEventTypes from '../DOMTopLevelEventTypes'; <ide> import { <ide> topLevelEventsToReactNames, <del> simpleEventPluginEventTypes, <add> registerSimpleEvents, <ide> } from '../DOMEventProperties'; <ide> import { <ide> accumulateTwoPhaseListeners, <ide> function extractEvents( <ide> return event; <ide> } <ide> <del>export { <del> // simpleEventPluginEventTypes gets populated from <del> // the DOMEventProperties module. <del> simpleEventPluginEventTypes as eventTypes, <del> extractEvents, <del>}; <add>export {registerSimpleEvents as registerEvents, extractEvents};
10
PHP
PHP
add test for entity clean()
6b8245dfebf0ce519263b326e8a0d84d9049da1e
<ide><path>tests/TestCase/ORM/TableTest.php <ide> public function testSavePrimaryKeyEntityExists() <ide> $this->assertSame($entity, $table->save($entity)); <ide> } <ide> <del> <ide> /** <ide> * Test that save works with replace saveStrategy and are not deleted once they are not null <ide> * <ide> public function testAtomicSaveRollbackOnFailure() <ide> $table->save($data); <ide> } <ide> <add> <ide> /** <ide> * Tests that only the properties marked as dirty are actually saved <ide> * to the database <ide> public function testSaveCorrectPrimaryKeyType() <ide> $this->assertSame(self::$nextUserId, $entity->id); <ide> } <ide> <add> /** <add> * Tests entity clean() <add> * <add> * @return void <add> */ <add> public function testEntityClean() <add> { <add> $table = TableRegistry::get('Articles'); <add> $validator = $table->validator()->requirePresence('body'); <add> $entity = $table->newEntity(['title' => 'mark']); <add> <add> $entity->dirty('title', true); <add> $entity->invalid('title', 'albert'); <add> <add> $this->assertNotEmpty($entity->errors()); <add> $this->assertTrue($entity->dirty()); <add> $this->assertEquals(['title' => 'albert'], $entity->invalid()); <add> <add> $entity->title = 'alex'; <add> $this->assertSame($entity->getOriginal('title'), 'mark'); <add> <add> $entity->clean(); <add> <add> $this->assertEmpty($entity->errors()); <add> $this->assertFalse($entity->dirty()); <add> $this->assertEquals([], $entity->invalid()); <add> $this->assertSame($entity->getOriginal('title'), 'alex'); <add> } <add> <ide> /** <ide> * Tests the loadInto() method <ide> *
1
Go
Go
replace flaky testfollowlogshandledecodeerr
c91e09bee27fe4e4e106c4959400eb12d8adbedb
<ide><path>daemon/logger/loggerutils/follow_test.go <add>package loggerutils // import "github.com/docker/docker/daemon/logger/loggerutils" <add> <add>import ( <add> "io" <add> "os" <add> "testing" <add> <add> "gotest.tools/v3/assert" <add>) <add> <add>func TestHandleDecoderErr(t *testing.T) { <add> f, err := os.CreateTemp("", t.Name()) <add> assert.NilError(t, err) <add> defer os.Remove(f.Name()) <add> <add> _, err = f.Write([]byte("hello")) <add> assert.NilError(t, err) <add> <add> pos, err := f.Seek(0, io.SeekCurrent) <add> assert.NilError(t, err) <add> assert.Assert(t, pos != 0) <add> <add> dec := &testDecoder{} <add> <add> // Simulate "turncate" case, where the file was bigger before. <add> fl := &follow{file: f, dec: dec, oldSize: 100} <add> err = fl.handleDecodeErr(io.EOF) <add> assert.NilError(t, err) <add> <add> // handleDecodeErr seeks to zero. <add> pos, err = f.Seek(0, io.SeekCurrent) <add> assert.NilError(t, err) <add> assert.Equal(t, int64(0), pos) <add> <add> // Reset is called. <add> assert.Equal(t, 1, dec.resetCount) <add>} <ide><path>daemon/logger/loggerutils/logfile_test.go <ide> import ( <ide> "bufio" <ide> "bytes" <ide> "context" <del> "errors" <ide> "fmt" <ide> "io" <ide> "os" <ide> "path/filepath" <ide> "strings" <del> "sync" <ide> "sync/atomic" <ide> "testing" <ide> "text/tabwriter" <ide> import ( <ide> ) <ide> <ide> type testDecoder struct { <del> rdr io.Reader <del> scanner *bufio.Scanner <add> rdr io.Reader <add> scanner *bufio.Scanner <add> resetCount int <ide> } <ide> <ide> func (d *testDecoder) Decode() (*logger.Message, error) { <ide> func (d *testDecoder) Decode() (*logger.Message, error) { <ide> func (d *testDecoder) Reset(rdr io.Reader) { <ide> d.rdr = rdr <ide> d.scanner = bufio.NewScanner(rdr) <add> d.resetCount++ <ide> } <ide> <ide> func (d *testDecoder) Close() { <ide> func TestFollowLogsProducerGone(t *testing.T) { <ide> } <ide> } <ide> <del>type lineDecoder struct { <del> r *bufio.Reader <del> resetCount int <del>} <del> <del>func (d *lineDecoder) Decode() (*logger.Message, error) { <del> line, err := d.r.ReadString('\n') <del> if err != nil { <del> return nil, err <del> } <del> m := logger.NewMessage() <del> m.Line = []byte(line) <del> return m, nil <del>} <del> <del>func (d *lineDecoder) Reset(r io.Reader) { <del> d.r = bufio.NewReader(r) <del> d.resetCount++ <del>} <del> <del>func (d *lineDecoder) Close() { <del>} <del> <del>func TestFollowLogsHandleDecodeErr(t *testing.T) { <del> lw := logger.NewLogWatcher() <del> defer lw.ConsumerGone() <del> <del> fw, err := os.CreateTemp("", t.Name()) <del> assert.NilError(t, err) <del> defer os.Remove(fw.Name()) <del> <del> fr, err := os.Open(fw.Name()) <del> assert.NilError(t, err) <del> <del> dec := &lineDecoder{} <del> dec.Reset(fr) <del> <del> var since, until time.Time <del> rotate := make(chan interface{}) <del> evict := make(chan interface{}) <del> <del> var wg sync.WaitGroup <del> wg.Add(1) <del> go func() { <del> defer wg.Done() <del> followLogs(fr, lw, rotate, evict, dec, since, until) <del> }() <del> <del> sendReceive := func(f io.Writer, message string) { <del> _, err = f.Write([]byte(message)) <del> assert.NilError(t, err) <del> m := <-lw.Msg <del> assert.Equal(t, message, string(m.Line)) <del> } <del> <del> sendReceive(fw, "log1\n") <del> sendReceive(fw, "log2\n") <del> <del> ft, err := os.OpenFile(fw.Name(), os.O_WRONLY|os.O_TRUNC, 0600) <del> assert.NilError(t, err) <del> <del> sendReceive(ft, "log3\n") <del> <del> evict <- errors.New("stop followLogs") <del> wg.Wait() <del> <del> // followLogs calls Reset() in the beginning, <del> // each 3 writes result Reset(), then handleDecodeErr() calles Reset(). <del> assert.Equal(t, 5, dec.resetCount) <del>} <del> <ide> func TestCheckCapacityAndRotate(t *testing.T) { <ide> dir, err := os.MkdirTemp("", t.Name()) <ide> assert.NilError(t, err)
2
Javascript
Javascript
reduce require calls in http2/core
557f5702eccd48da757df9adc5385d664717a173
<ide><path>lib/internal/http2/core.js <ide> const { onServerStream, <ide> Http2ServerResponse, <ide> } = require('internal/http2/compat'); <ide> const { utcDate } = require('internal/http'); <del>const { promisify } = require('internal/util'); <add>const { <add> promisify, <add> customInspectSymbol: kInspect <add>} = require('internal/util'); <ide> const { isArrayBufferView } = require('internal/util/types'); <ide> const { defaultTriggerAsyncIdScope } = require('internal/async_hooks'); <del>const { _connectionListener: httpConnectionListener } = require('http'); <add>const { _connectionListener: httpConnectionListener } = http; <ide> const { createPromise, promiseResolve } = process.binding('util'); <ide> const debug = util.debuglog('http2'); <ide> <ide> const { constants, nameForErrorCode } = binding; <ide> const NETServer = net.Server; <ide> const TLSServer = tls.Server; <ide> <del>const kInspect = require('internal/util').customInspectSymbol; <ide> const { kIncomingMessage } = require('_http_common'); <ide> const { kServerResponse } = require('_http_server'); <ide>
1
Go
Go
make volumes-from a slice instead of string split
b4f2821e6d4ba6f6073365a244681df21f5d4472
<ide><path>runconfig/hostconfig.go <ide> type HostConfig struct { <ide> PublishAllPorts bool <ide> Dns []string <ide> DnsSearch []string <del> VolumesFrom string <add> VolumesFrom []string <ide> } <ide> <ide> func ContainerHostConfigFromJob(job *engine.Job) *HostConfig { <ide> hostConfig := &HostConfig{ <ide> ContainerIDFile: job.Getenv("ContainerIDFile"), <ide> Privileged: job.GetenvBool("Privileged"), <ide> PublishAllPorts: job.GetenvBool("PublishAllPorts"), <del> VolumesFrom: job.Getenv("VolumesFrom"), <ide> } <ide> job.GetenvJson("LxcConf", &hostConfig.LxcConf) <ide> job.GetenvJson("PortBindings", &hostConfig.PortBindings) <ide> func ContainerHostConfigFromJob(job *engine.Job) *HostConfig { <ide> if DnsSearch := job.GetenvList("DnsSearch"); DnsSearch != nil { <ide> hostConfig.DnsSearch = DnsSearch <ide> } <add> if VolumesFrom := job.GetenvList("VolumesFrom"); VolumesFrom != nil { <add> hostConfig.VolumesFrom = VolumesFrom <add> } <ide> return hostConfig <ide> } <ide><path>runconfig/parse.go <ide> func parseRun(cmd *flag.FlagSet, args []string, sysInfo *sysinfo.SysInfo) (*Conf <ide> PublishAllPorts: *flPublishAll, <ide> Dns: flDns.GetAll(), <ide> DnsSearch: flDnsSearch.GetAll(), <del> VolumesFrom: strings.Join(flVolumesFrom.GetAll(), ","), <add> VolumesFrom: flVolumesFrom.GetAll(), <ide> } <ide> <ide> if sysInfo != nil && flMemory > 0 && !sysInfo.SwapLimit { <ide><path>runtime/volumes.go <ide> func setupMountsForContainer(container *Container, envPath string) error { <ide> <ide> func applyVolumesFrom(container *Container) error { <ide> volumesFrom := container.hostConfig.VolumesFrom <del> if volumesFrom != "" { <del> for _, containerSpec := range strings.Split(volumesFrom, ",") { <add> if len(volumesFrom) > 0 { <add> for _, containerSpec := range volumesFrom { <ide> var ( <ide> mountRW = true <ide> specParts = strings.SplitN(containerSpec, ":", 2) <ide> ) <ide> <ide> switch len(specParts) { <ide> case 0: <del> return fmt.Errorf("Malformed volumes-from specification: %s", volumesFrom) <add> return fmt.Errorf("Malformed volumes-from specification: %s", containerSpec) <ide> case 2: <ide> switch specParts[1] { <ide> case "ro":
3
Javascript
Javascript
support concurrent rendering
c774f9e97e37c5dec41044256a8fa33ed49a3501
<ide><path>Libraries/Animated/createAnimatedComponent_EXPERIMENTAL.js <ide> * @format <ide> */ <ide> <add>import useAnimatedProps from './useAnimatedProps'; <add>import useMergeRefs from '../Utilities/useMergeRefs'; <ide> import * as React from 'react'; <ide> <ide> /** <ide> import * as React from 'react'; <ide> export default function createAnimatedComponent<TProps: {...}, TInstance>( <ide> Component: React.AbstractComponent<TProps, TInstance>, <ide> ): React.AbstractComponent<TProps, TInstance> { <del> return React.forwardRef((props, ref) => { <del> throw new Error('createAnimatedComponent: Not yet implemented.'); <add> return React.forwardRef((props, forwardedRef) => { <add> const [reducedProps, callbackRef] = useAnimatedProps<TProps, TInstance>( <add> props, <add> ); <add> const ref = useMergeRefs<TInstance | null>(callbackRef, forwardedRef); <add> <add> return <Component {...reducedProps} ref={ref} />; <ide> }); <ide> } <ide><path>Libraries/Animated/useAnimatedProps.js <add>/** <add> * Copyright (c) Facebook, Inc. and its affiliates. <add> * <add> * This source code is licensed under the MIT license found in the <add> * LICENSE file in the root directory of this source tree. <add> * <add> * @flow strict-local <add> * @format <add> */ <add> <add>'use strict'; <add> <add>import AnimatedProps from './nodes/AnimatedProps'; <add>import {AnimatedEvent} from './AnimatedEvent'; <add>import useRefEffect from '../Utilities/useRefEffect'; <add>import {useCallback, useLayoutEffect, useMemo, useReducer, useRef} from 'react'; <add> <add>type ReducedProps<TProps> = { <add> ...TProps, <add> collapsable: boolean, <add> ... <add>}; <add>type CallbackRef<T> = T => mixed; <add> <add>export default function useAnimatedProps<TProps: {...}, TInstance>( <add> props: TProps, <add>): [ReducedProps<TProps>, CallbackRef<TInstance | null>] { <add> const [, scheduleUpdate] = useReducer(count => count + 1, 0); <add> const onUpdateRef = useRef<?() => void>(null); <add> <add> // TODO: Only invalidate `node` if animated props or `style` change. In the <add> // previous implementation, we permitted `style` to override props with the <add> // same name property name as styles, so we can probably continue doing that. <add> // The ordering of other props *should* not matter. <add> const node = useMemo( <add> () => new AnimatedProps(props, () => onUpdateRef.current?.()), <add> [props], <add> ); <add> useAnimatedPropsLifecycle(node); <add> <add> // TODO: This "effect" does three things: <add> // <add> // 1) Call `setNativeView`. <add> // 2) Update `onUpdateRef`. <add> // 3) Update listeners for `AnimatedEvent` props. <add> // <add> // Ideally, each of these would be separat "effects" so that they are not <add> // unnecessarily re-run when irrelevant dependencies change. For example, we <add> // should be able to hoist all `AnimatedEvent` props and only do #3 if either <add> // the `AnimatedEvent` props change or `instance` changes. <add> // <add> // But there is no way to transparently compose three separate callback refs, <add> // so we just combine them all into one for now. <add> const refEffect = useCallback( <add> instance => { <add> // NOTE: This may be called more often than necessary (e.g. when `props` <add> // changes), but `setNativeView` already optimizes for that. <add> node.setNativeView(instance); <add> <add> // NOTE: This callback is only used by the JavaScript animation driver. <add> onUpdateRef.current = () => { <add> if ( <add> process.env.NODE_ENV === 'test' || <add> typeof instance !== 'object' || <add> typeof instance?.setNativeProps !== 'function' || <add> isFabricInstance(instance) <add> ) { <add> // Schedule an update for this component to update `reducedProps`, <add> // but do not compute it immediately. If a parent also updated, we <add> // need to merge those new props in before updating. <add> scheduleUpdate(); <add> } else if (!node.__isNative) { <add> // $FlowIgnore[not-a-function] - Assume it's still a function. <add> instance.setNativeProps(node.__getAnimatedValue()); <add> } else { <add> throw new Error( <add> 'Attempting to run JS driven animation on animated node ' + <add> 'that has been moved to "native" earlier by starting an ' + <add> 'animation with `useNativeDriver: true`', <add> ); <add> } <add> }; <add> <add> const target = getEventTarget(instance); <add> const events = []; <add> <add> for (const propName in props) { <add> const propValue = props[propName]; <add> if (propValue instanceof AnimatedEvent && propValue.__isNative) { <add> propValue.__attach(target, propName); <add> events.push([propName, propValue]); <add> } <add> } <add> <add> return () => { <add> onUpdateRef.current = null; <add> <add> for (const [propName, propValue] of events) { <add> propValue.__detach(target, propName); <add> } <add> }; <add> }, <add> [props, node], <add> ); <add> const callbackRef = useRefEffect<TInstance>(refEffect); <add> <add> return [reduceAnimatedProps<TProps>(node), callbackRef]; <add>} <add> <add>function reduceAnimatedProps<TProps>( <add> node: AnimatedProps, <add>): ReducedProps<TProps> { <add> // Force `collapsable` to be false so that the native view is not flattened. <add> // Flattened views cannot be accurately referenced by the native driver. <add> return { <add> ...node.__getValue(), <add> collapsable: false, <add> }; <add>} <add> <add>/** <add> * Manages the lifecycle of the supplied `AnimatedProps` by invoking `__attach` <add> * and `__detach`. However, this is more complicated because `AnimatedProps` <add> * uses reference counting to determine when to recursively detach its children <add> * nodes. So in order to optimize this, we avoid detaching until the next attach <add> * unless we are unmounting. <add> */ <add>function useAnimatedPropsLifecycle(node: AnimatedProps): void { <add> const prevNodeRef = useRef<?AnimatedProps>(null); <add> const isUnmountingRef = useRef<boolean>(false); <add> <add> useLayoutEffect(() => { <add> isUnmountingRef.current = false; <add> return () => { <add> isUnmountingRef.current = true; <add> }; <add> }, []); <add> <add> useLayoutEffect(() => { <add> node.__attach(); <add> if (prevNodeRef.current != null) { <add> const prevNode = prevNodeRef.current; <add> // TODO: Stop restoring default values (unless `reset` is called). <add> prevNode.__restoreDefaultValues(); <add> prevNode.__detach(); <add> prevNodeRef.current = null; <add> } <add> return () => { <add> if (isUnmountingRef.current) { <add> // NOTE: Do not restore default values on unmount, see D18197735. <add> node.__detach(); <add> } else { <add> prevNodeRef.current = node; <add> } <add> }; <add> }, [node]); <add>} <add> <add>function getEventTarget<TInstance>(instance: TInstance): TInstance { <add> return typeof instance === 'object' && <add> typeof instance?.getScrollableNode === 'function' <add> ? // $FlowFixMe[incompatible-use] - Legacy instance assumptions. <add> instance.getScrollableNode() <add> : instance; <add>} <add> <add>// $FlowFixMe[unclear-type] - Legacy instance assumptions. <add>function isFabricInstance(instance: any): boolean { <add> return ( <add> hasFabricHandle(instance) || <add> // Some components have a setNativeProps function but aren't a host component <add> // such as lists like FlatList and SectionList. These should also use <add> // forceUpdate in Fabric since setNativeProps doesn't exist on the underlying <add> // host component. This crazy hack is essentially special casing those lists and <add> // ScrollView itself to use forceUpdate in Fabric. <add> // If these components end up using forwardRef then these hacks can go away <add> // as instance would actually be the underlying host component and the above check <add> // would be sufficient. <add> hasFabricHandle(instance?.getNativeScrollRef?.()) || <add> hasFabricHandle(instance?.getScrollResponder?.()?.getNativeScrollRef?.()) <add> ); <add>} <add> <add>// $FlowFixMe[unclear-type] - Legacy instance assumptions. <add>function hasFabricHandle(instance: any): boolean { <add> // eslint-disable-next-line dot-notation <add> return instance?.['_internalInstanceHandle']?.stateNode?.canonical != null; <add>}
2
Python
Python
move import at the top of the file
ea865e4955e94326bbe9a12d2da653bd0f2c0993
<ide><path>kubernetes_tests/test_kubernetes_pod_operator_backcompat.py <ide> from airflow.models import DAG, TaskInstance <ide> from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator <ide> from airflow.utils import timezone <add>from airflow.utils.state import State <ide> from airflow.version import version as airflow_version <ide> <ide> <ide> def create_context(self, task): <ide> @mock.patch("airflow.kubernetes.pod_launcher.PodLauncher.monitor_pod") <ide> @mock.patch("airflow.kubernetes.kube_client.get_kube_client") <ide> def test_image_pull_secrets_correctly_set(self, mock_client, monitor_mock, start_mock): <del> from airflow.utils.state import State <del> <ide> fake_pull_secrets = "fakeSecret" <ide> k = KubernetesPodOperator( <ide> namespace='default', <ide> def test_xcom_push(self): <ide> @mock.patch("airflow.kubernetes.kube_client.get_kube_client") <ide> def test_envs_from_configmaps(self, mock_client, mock_monitor, mock_start): <ide> # GIVEN <del> from airflow.utils.state import State <del> <ide> configmap = 'test-configmap' <ide> # WHEN <ide> k = KubernetesPodOperator( <ide> def test_envs_from_configmaps(self, mock_client, mock_monitor, mock_start): <ide> @mock.patch("airflow.kubernetes.kube_client.get_kube_client") <ide> def test_envs_from_secrets(self, mock_client, monitor_mock, start_mock): <ide> # GIVEN <del> from airflow.utils.state import State <del> <ide> secret_ref = 'secret_name' <ide> secrets = [Secret('env', None, secret_ref)] <ide> # WHEN <ide> def test_pod_priority_class_name( <ide> self, mock_client, monitor_mock, start_mock <ide> ): # pylint: disable=unused-argument <ide> """Test ability to assign priorityClassName to pod""" <del> from airflow.utils.state import State <del> <ide> priority_class_name = "medium-test" <ide> k = KubernetesPodOperator( <ide> namespace='default',
1
Python
Python
remove some weird syntax for kwargs
cf377c766d6ac8d72116bc71ad5c9d2f29996e03
<ide><path>numpy/lib/function_base.py <ide> def insert(arr, obj, values, axis=None): <ide> <ide> if isinstance(obj, slice): <ide> # turn it into a range object <del> indices = arange(*obj.indices(N), **{'dtype': intp}) <add> indices = arange(*obj.indices(N), dtype=intp) <ide> else: <ide> # need to copy obj, because indices will be changed in-place <ide> indices = np.array(obj)
1
Python
Python
add dtypes to the _array_api namespace
9934cf3abcd6ba9438c340042e94f8343e3f3d13
<ide><path>numpy/_array_api/__init__.py <ide> <ide> __all__ += ['arange', 'empty', 'empty_like', 'eye', 'full', 'full_like', 'linspace', 'ones', 'ones_like', 'zeros', 'zeros_like'] <ide> <add>from .dtypes import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64, bool <add> <add>__all__ += ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool'] <add> <ide> from .elementwise_functions import abs, acos, acosh, add, asin, asinh, atan, atan2, atanh, bitwise_and, bitwise_left_shift, bitwise_invert, bitwise_or, bitwise_right_shift, bitwise_xor, ceil, cos, cosh, divide, equal, exp, expm1, floor, floor_divide, greater, greater_equal, isfinite, isinf, isnan, less, less_equal, log, log1p, log2, log10, logical_and, logical_not, logical_or, logical_xor, multiply, negative, not_equal, positive, pow, remainder, round, sign, sin, sinh, square, sqrt, subtract, tan, tanh, trunc <ide> <ide> __all__ += ['abs', 'acos', 'acosh', 'add', 'asin', 'asinh', 'atan', 'atan2', 'atanh', 'bitwise_and', 'bitwise_left_shift', 'bitwise_invert', 'bitwise_or', 'bitwise_right_shift', 'bitwise_xor', 'ceil', 'cos', 'cosh', 'divide', 'equal', 'exp', 'expm1', 'floor', 'floor_divide', 'greater', 'greater_equal', 'isfinite', 'isinf', 'isnan', 'less', 'less_equal', 'log', 'log1p', 'log2', 'log10', 'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'multiply', 'negative', 'not_equal', 'positive', 'pow', 'remainder', 'round', 'sign', 'sin', 'sinh', 'square', 'sqrt', 'subtract', 'tan', 'tanh', 'trunc'] <ide><path>numpy/_array_api/dtypes.py <add>from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64, bool <add> <add>__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
2
Go
Go
add a builder.commit method
7bccdc0d33d0589792b9b867b13c9e50dcf48358
<ide><path>builder.go <ide> func (builder *Builder) Run(image *Image, cmd ...string) (*Container, error) { <ide> return container, nil <ide> } <ide> <add>func (builder *Builder) Commit(container *Container, repository, tag, comment, author string) (*Image, error) { <add> return builder.runtime.Commit(container.Id, repository, tag, comment, author) <add>} <add> <ide> func (builder *Builder) clearTmp(containers, images map[string]struct{}) { <ide> for c := range containers { <ide> tmp := builder.runtime.Get(c) <ide> func (builder *Builder) Build(dockerfile io.Reader, stdout io.Writer) error { <ide> } <ide> <ide> // Commit the container <del> base, err := builder.runtime.Commit(c.Id, "", "", "", "") <add> base, err := builder.Commit(c, "", "", "", "") <ide> if err != nil { <ide> return err <ide> }
1
Python
Python
fix example tests
c7c1408875909ddb58111a82fd0e8250137d0490
<ide><path>examples/blogpost/tests.py <ide> from django.core.urlresolvers import reverse <ide> from django.utils import simplejson as json <ide> <add>from djangorestframework.compat import RequestFactory <add> <ide> from blogpost import models <ide> import blogpost <ide>
1
Python
Python
add math.isclose to see also
74c111f0c40978ad064daabfe17658a308721a2d
<ide><path>numpy/core/numeric.py <ide> def isclose(a, b, rtol=1.e-5, atol=1.e-8, equal_nan=False): <ide> See Also <ide> -------- <ide> allclose <add> math.isclose <ide> <ide> Notes <ide> -----
1
Javascript
Javascript
move chunk graph building into separate file
a430d5564c7d68baa07865b058621ab7c54d2cbd
<ide><path>lib/Compilation.js <ide> const { <ide> } = require("tapable"); <ide> const util = require("util"); <ide> const { CachedSource } = require("webpack-sources"); <del>const AsyncDependencyToInitialChunkError = require("./AsyncDependencyToInitialChunkError"); <ide> const Chunk = require("./Chunk"); <ide> const ChunkGraph = require("./ChunkGraph"); <ide> const ChunkGroup = require("./ChunkGroup"); <ide> const DependencyTemplates = require("./DependencyTemplates"); <ide> const Entrypoint = require("./Entrypoint"); <ide> const ErrorHelpers = require("./ErrorHelpers"); <ide> const FileSystemInfo = require("./FileSystemInfo"); <del>const { <del> connectChunkGroupAndChunk, <del> connectChunkGroupParentAndChild <del>} = require("./GraphHelpers"); <add>const { connectChunkGroupAndChunk } = require("./GraphHelpers"); <ide> const { makeWebpackError } = require("./HookWebpackError"); <ide> const MainTemplate = require("./MainTemplate"); <ide> const ModuleDependencyError = require("./ModuleDependencyError"); <ide> const RuntimeGlobals = require("./RuntimeGlobals"); <ide> const RuntimeTemplate = require("./RuntimeTemplate"); <ide> const Stats = require("./Stats"); <ide> const WebpackError = require("./WebpackError"); <add>const buildChunkGraph = require("./buildChunkGraph"); <ide> const { Logger, LogType } = require("./logging/Logger"); <ide> const StatsFactory = require("./stats/StatsFactory"); <ide> const StatsPrinter = require("./stats/StatsPrinter"); <ide> const AsyncQueue = require("./util/AsyncQueue"); <del>const Queue = require("./util/Queue"); <ide> const { <ide> compareLocations, <ide> concatComparators, <ide> const byNameOrHash = concatComparators( <ide> ) <ide> ); <ide> <del>/** <del> * @template T <del> * @param {Set<T>} a first set <del> * @param {Set<T>} b second set <del> * @returns {number} cmp <del> */ <del>const bySetSize = (a, b) => { <del> return a.size - b.size; <del>}; <del> <ide> /** <ide> * @template T <ide> * @param {Set<T>} set set to add items to <ide> class Compilation { <ide> } <ide> } <ide> } <del> this.processDependenciesBlocksForChunkGroups( <add> buildChunkGraph( <add> this, <ide> /** @type {Entrypoint[]} */ (this.chunkGroups.slice()) <ide> ); <ide> this.hooks.afterChunks.call(this.chunks); <ide> class Compilation { <ide> return this.hooks.dependencyReference.call(ref, dependency); <ide> } <ide> <del> /** <del> * This method creates the Chunk graph from the Module graph <del> * @private <del> * @param {Entrypoint[]} inputChunkGroups chunk groups which are processed <del> * @returns {void} <del> */ <del> processDependenciesBlocksForChunkGroups(inputChunkGroups) { <del> // Process is splitting into two parts: <del> // Part one traverse the module graph and builds a very basic chunks graph <del> // in chunkDependencies. <del> // Part two traverse every possible way through the basic chunk graph and <del> // tracks the available modules. While traversing it connects chunks with <del> // eachother and Blocks with Chunks. It stops traversing when all modules <del> // for a chunk are already available. So it doesn't connect unneeded chunks. <del> <del> const moduleGraph = this.moduleGraph; <del> <del> /** @typedef {{block: AsyncDependenciesBlock, chunkGroup: ChunkGroup, couldBeFiltered: boolean}} ChunkGroupDep */ <del> <del> /** @type {Map<ChunkGroup, ChunkGroupDep[]>} */ <del> const chunkDependencies = new Map(); <del> /** @type {Set<ChunkGroup>} */ <del> const allCreatedChunkGroups = new Set(); <del> <del> // PREPARE <del> /** @type {Map<DependenciesBlock, { modules: Module[], blocks: AsyncDependenciesBlock[]}>} */ <del> const blockInfoMap = new Map(); <del> <del> /** <del> * @param {Dependency} d dependency to iterate over <del> * @returns {void} <del> */ <del> const iteratorDependency = d => { <del> // We skip Dependencies without Reference <del> const ref = this.getDependencyReference(d); <del> if (!ref) { <del> return; <del> } <del> // We skip Dependencies without Module pointer <del> const refModule = ref.module; <del> if (!refModule) { <del> return; <del> } <del> // We skip weak Dependencies <del> if (ref.weak) { <del> return; <del> } <del> <del> blockInfoModules.add(refModule); <del> }; <del> <del> /** <del> * @param {AsyncDependenciesBlock} b blocks to prepare <del> * @returns {void} <del> */ <del> const iteratorBlockPrepare = b => { <del> blockInfoBlocks.push(b); <del> blockQueue.push(b); <del> }; <del> <del> /** @type {DependenciesBlock} */ <del> let block; <del> /** @type {DependenciesBlock[]} */ <del> let blockQueue; <del> /** @type {Set<Module>} */ <del> let blockInfoModules; <del> /** @type {AsyncDependenciesBlock[]} */ <del> let blockInfoBlocks; <del> <del> for (const module of this.modules) { <del> blockQueue = [module]; <del> while (blockQueue.length > 0) { <del> block = blockQueue.pop(); <del> blockInfoModules = new Set(); <del> blockInfoBlocks = []; <del> <del> if (block.dependencies) { <del> for (const dep of block.dependencies) iteratorDependency(dep); <del> } <del> <del> if (block.blocks) { <del> for (const b of block.blocks) iteratorBlockPrepare(b); <del> } <del> <del> const blockInfo = { <del> modules: Array.from(blockInfoModules), <del> blocks: blockInfoBlocks <del> }; <del> blockInfoMap.set(block, blockInfo); <del> } <del> } <del> <del> const chunkGraph = this.chunkGraph; <del> <del> // PART ONE <del> <del> /** @type {Map<ChunkGroup, { preOrderIndex: number, postOrderIndex: number }>} */ <del> const chunkGroupCounters = new Map(); <del> for (const chunkGroup of inputChunkGroups) { <del> chunkGroupCounters.set(chunkGroup, { <del> preOrderIndex: 0, <del> postOrderIndex: 0 <del> }); <del> } <del> <del> let nextFreeModulePreOrderIndex = 0; <del> let nextFreeModulePostOrderIndex = 0; <del> <del> /** @type {Map<DependenciesBlock, ChunkGroup>} */ <del> const blockChunkGroups = new Map(); <del> <del> /** @type {Set<DependenciesBlock>} */ <del> const blocksWithNestedBlocks = new Set(); <del> <del> const ADD_AND_ENTER_MODULE = 0; <del> const ENTER_MODULE = 1; <del> const PROCESS_BLOCK = 2; <del> const LEAVE_MODULE = 3; <del> <del> /** <del> * @typedef {Object} QueueItem <del> * @property {number} action <del> * @property {DependenciesBlock} block <del> * @property {Module} module <del> * @property {Chunk} chunk <del> * @property {ChunkGroup} chunkGroup <del> */ <del> <del> /** <del> * @param {QueueItem[]} queue the queue array (will be mutated) <del> * @param {ChunkGroup} chunkGroup chunk group <del> * @returns {QueueItem[]} the queue array again <del> */ <del> const reduceChunkGroupToQueueItem = (queue, chunkGroup) => { <del> for (const chunk of chunkGroup.chunks) { <del> for (const module of chunkGraph.getChunkEntryModulesIterable(chunk)) { <del> queue.push({ <del> action: ENTER_MODULE, <del> block: module, <del> module, <del> chunk, <del> chunkGroup <del> }); <del> } <del> } <del> return queue; <del> }; <del> <del> // Start with the provided modules/chunks <del> /** @type {QueueItem[]} */ <del> let queue = inputChunkGroups <del> .reduce(reduceChunkGroupToQueueItem, []) <del> .reverse(); <del> /** @type {QueueItem[]} */ <del> let queueDelayed = []; <del> <del> /** @type {Module} */ <del> let module; <del> /** @type {Chunk} */ <del> let chunk; <del> /** @type {ChunkGroup} */ <del> let chunkGroup; <del> <del> // For each async Block in graph <del> /** <del> * @param {AsyncDependenciesBlock} b iterating over each Async DepBlock <del> * @returns {void} <del> */ <del> const iteratorBlock = b => { <del> // 1. We create a chunk for this Block <del> // but only once (blockChunkGroups map) <del> let c = blockChunkGroups.get(b); <del> if (c === undefined) { <del> c = this.namedChunkGroups.get(b.chunkName); <del> if (c && c.isInitial()) { <del> this.errors.push( <del> new AsyncDependencyToInitialChunkError(b.chunkName, module, b.loc) <del> ); <del> c = chunkGroup; <del> } else { <del> c = this.addChunkInGroup( <del> b.groupOptions || b.chunkName, <del> module, <del> b.loc, <del> b.request <del> ); <del> chunkGroupCounters.set(c, { preOrderIndex: 0, postOrderIndex: 0 }); <del> blockChunkGroups.set(b, c); <del> allCreatedChunkGroups.add(c); <del> } <del> } else { <del> c.addOptions(b.groupOptions); <del> c.addOrigin(module, b.loc, b.request); <del> } <del> <del> // 2. We store the Block+Chunk mapping as dependency for the chunk <del> let deps = chunkDependencies.get(chunkGroup); <del> if (!deps) chunkDependencies.set(chunkGroup, (deps = [])); <del> deps.push({ <del> block: b, <del> chunkGroup: c, <del> couldBeFiltered: true <del> }); <del> <del> // 3. We enqueue the DependenciesBlock for traversal <del> queueDelayed.push({ <del> action: PROCESS_BLOCK, <del> block: b, <del> module: module, <del> chunk: c.chunks[0], <del> chunkGroup: c <del> }); <del> }; <del> <del> // Iterative traversal of the Module graph <del> // Recursive would be simpler to write but could result in Stack Overflows <del> while (queue.length) { <del> while (queue.length) { <del> const queueItem = queue.pop(); <del> module = queueItem.module; <del> block = queueItem.block; <del> chunk = queueItem.chunk; <del> chunkGroup = queueItem.chunkGroup; <del> <del> switch (queueItem.action) { <del> case ADD_AND_ENTER_MODULE: { <del> // We connect Module and Chunk when not already done <del> if (!chunkGraph.connectChunkAndModule(chunk, module)) { <del> // already connected, skip it <del> break; <del> } <del> } <del> // fallthrough <del> case ENTER_MODULE: { <del> if (chunkGroup !== undefined) { <del> const index = chunkGroup.getModulePreOrderIndex(module); <del> if (index === undefined) { <del> chunkGroup.setModulePreOrderIndex( <del> module, <del> chunkGroupCounters.get(chunkGroup).preOrderIndex++ <del> ); <del> } <del> } <del> <del> if ( <del> moduleGraph.setPreOrderIndexIfUnset( <del> module, <del> nextFreeModulePreOrderIndex <del> ) <del> ) { <del> nextFreeModulePreOrderIndex++; <del> } <del> <del> queue.push({ <del> action: LEAVE_MODULE, <del> block, <del> module, <del> chunk, <del> chunkGroup <del> }); <del> } <del> // fallthrough <del> case PROCESS_BLOCK: { <del> // get prepared block info <del> const blockInfo = blockInfoMap.get(block); <del> <del> // Traverse all referenced modules <del> for (let i = blockInfo.modules.length - 1; i >= 0; i--) { <del> const refModule = blockInfo.modules[i]; <del> if (chunkGraph.isModuleInChunk(refModule, chunk)) { <del> // skip early if already connected <del> continue; <del> } <del> // enqueue the add and enter to enter in the correct order <del> // this is relevant with circular dependencies <del> queue.push({ <del> action: ADD_AND_ENTER_MODULE, <del> block: refModule, <del> module: refModule, <del> chunk, <del> chunkGroup <del> }); <del> } <del> <del> // Traverse all Blocks <del> for (const block of blockInfo.blocks) iteratorBlock(block); <del> <del> if (blockInfo.blocks.length > 0 && module !== block) { <del> blocksWithNestedBlocks.add(block); <del> } <del> break; <del> } <del> case LEAVE_MODULE: { <del> if (chunkGroup !== undefined) { <del> const index = chunkGroup.getModulePostOrderIndex(module); <del> if (index === undefined) { <del> chunkGroup.setModulePostOrderIndex( <del> module, <del> chunkGroupCounters.get(chunkGroup).postOrderIndex++ <del> ); <del> } <del> } <del> <del> if ( <del> moduleGraph.setPostOrderIndexIfUnset( <del> module, <del> nextFreeModulePostOrderIndex <del> ) <del> ) { <del> nextFreeModulePostOrderIndex++; <del> } <del> break; <del> } <del> } <del> } <del> const tempQueue = queue; <del> queue = queueDelayed.reverse(); <del> queueDelayed = tempQueue; <del> } <del> <del> // PART TWO <del> /** @type {Set<Module>} */ <del> let newAvailableModules; <del> <del> /** <del> * @typedef {Object} ChunkGroupInfo <del> * @property {Set<Module>} minAvailableModules current minimal set of modules available at this point <del> * @property {Set<Module>[]} availableModulesToBeMerged enqueued updates to the minimal set of available modules <del> */ <del> <del> /** @type {Map<ChunkGroup, ChunkGroupInfo>} */ <del> const chunkGroupInfoMap = new Map(); <del> <del> /** @type {Queue<ChunkGroup>} */ <del> const queue2 = new Queue(inputChunkGroups); <del> <del> for (const chunkGroup of inputChunkGroups) { <del> chunkGroupInfoMap.set(chunkGroup, { <del> minAvailableModules: undefined, <del> availableModulesToBeMerged: [new Set()] <del> }); <del> } <del> <del> /** <del> * Helper function to check if all modules of a chunk are available <del> * <del> * @param {ChunkGroup} chunkGroup the chunkGroup to scan <del> * @param {Set<Module>} availableModules the comparitor set <del> * @returns {boolean} return true if all modules of a chunk are available <del> */ <del> const areModulesAvailable = (chunkGroup, availableModules) => { <del> for (const chunk of chunkGroup.chunks) { <del> for (const module of chunkGraph.getChunkModulesIterable(chunk)) { <del> if (!availableModules.has(module)) return false; <del> } <del> } <del> return true; <del> }; <del> <del> // For each edge in the basic chunk graph <del> /** <del> * @param {ChunkGroupDep} dep the dependency used for filtering <del> * @returns {boolean} used to filter "edges" (aka Dependencies) that were pointing <del> * to modules that are already available. Also filters circular dependencies in the chunks graph <del> */ <del> const filterFn = dep => { <del> const depChunkGroup = dep.chunkGroup; <del> if (!dep.couldBeFiltered) return true; <del> if (blocksWithNestedBlocks.has(dep.block)) return true; <del> if (areModulesAvailable(depChunkGroup, newAvailableModules)) { <del> return false; // break all modules are already available <del> } <del> dep.couldBeFiltered = false; <del> return true; <del> }; <del> <del> // Iterative traversing of the basic chunk graph <del> while (queue2.length) { <del> chunkGroup = queue2.dequeue(); <del> const info = chunkGroupInfoMap.get(chunkGroup); <del> const availableModulesToBeMerged = info.availableModulesToBeMerged; <del> let minAvailableModules = info.minAvailableModules; <del> <del> // 1. Get minimal available modules <del> // It doesn't make sense to traverse a chunk again with more available modules. <del> // This step calculates the minimal available modules and skips traversal when <del> // the list didn't shrink. <del> availableModulesToBeMerged.sort(bySetSize); <del> let changed = false; <del> for (const availableModules of availableModulesToBeMerged) { <del> if (minAvailableModules === undefined) { <del> minAvailableModules = new Set(availableModules); <del> info.minAvailableModules = minAvailableModules; <del> changed = true; <del> } else { <del> for (const m of minAvailableModules) { <del> if (!availableModules.has(m)) { <del> minAvailableModules.delete(m); <del> changed = true; <del> } <del> } <del> } <del> } <del> availableModulesToBeMerged.length = 0; <del> if (!changed) continue; <del> <del> // 2. Get the edges at this point of the graph <del> const deps = chunkDependencies.get(chunkGroup); <del> if (!deps) continue; <del> if (deps.length === 0) continue; <del> <del> // 3. Create a new Set of available modules at this points <del> newAvailableModules = new Set(minAvailableModules); <del> for (const chunk of chunkGroup.chunks) { <del> for (const m of chunkGraph.getChunkModulesIterable(chunk)) { <del> newAvailableModules.add(m); <del> } <del> } <del> <del> // 4. Foreach remaining edge <del> const nextChunkGroups = new Set(); <del> for (let i = 0; i < deps.length; i++) { <del> const dep = deps[i]; <del> <del> // Filter inline, rather than creating a new array from `.filter()` <del> if (!filterFn(dep)) { <del> continue; <del> } <del> const depChunkGroup = dep.chunkGroup; <del> const depBlock = dep.block; <del> <del> // 5. Connect block with chunk <del> chunkGraph.connectBlockAndChunkGroup(depBlock, depChunkGroup); <del> <del> // 6. Connect chunk with parent <del> connectChunkGroupParentAndChild(chunkGroup, depChunkGroup); <del> <del> nextChunkGroups.add(depChunkGroup); <del> } <del> <del> // 7. Enqueue further traversal <del> for (const nextChunkGroup of nextChunkGroups) { <del> let nextInfo = chunkGroupInfoMap.get(nextChunkGroup); <del> if (nextInfo === undefined) { <del> nextInfo = { <del> minAvailableModules: undefined, <del> availableModulesToBeMerged: [] <del> }; <del> chunkGroupInfoMap.set(nextChunkGroup, nextInfo); <del> } <del> nextInfo.availableModulesToBeMerged.push(newAvailableModules); <del> <del> // As queue deduplicates enqueued items this makes sure that a ChunkGroup <del> // is not enqueued twice <del> queue2.enqueue(nextChunkGroup); <del> } <del> } <del> <del> // Remove all unconnected chunk groups <del> for (const chunkGroup of allCreatedChunkGroups) { <del> if (chunkGroup.getNumberOfParents() === 0) { <del> for (const chunk of chunkGroup.chunks) { <del> this.chunks.delete(chunk); <del> chunkGraph.disconnectChunk(chunk); <del> } <del> chunkGraph.disconnectChunkGroup(chunkGroup); <del> chunkGroup.remove(); <del> } <del> } <del> } <del> <ide> /** <ide> * <ide> * @param {Module} module module relationship for removal <ide><path>lib/buildChunkGraph.js <add>/* <add> MIT License http://www.opensource.org/licenses/mit-license.php <add> Author Tobias Koppers @sokra <add>*/ <add> <add>"use strict"; <add> <add>const AsyncDependencyToInitialChunkError = require("./AsyncDependencyToInitialChunkError"); <add>const { connectChunkGroupParentAndChild } = require("./GraphHelpers"); <add>const Queue = require("./util/Queue"); <add> <add>/** @typedef {import("./AsyncDependenciesBlock")} AsyncDependenciesBlock */ <add>/** @typedef {import("./Chunk")} Chunk */ <add>/** @typedef {import("./ChunkGroup")} ChunkGroup */ <add>/** @typedef {import("./Compilation")} Compilation */ <add>/** @typedef {import("./DependenciesBlock")} DependenciesBlock */ <add>/** @typedef {import("./Dependency")} Dependency */ <add>/** @typedef {import("./Entrypoint")} Entrypoint */ <add>/** @typedef {import("./Module")} Module */ <add>/** @typedef {import("./ModuleGraph")} ModuleGraph */ <add> <add>/** <add> * @template T <add> * @param {Set<T>} a first set <add> * @param {Set<T>} b second set <add> * @returns {number} cmp <add> */ <add>const bySetSize = (a, b) => { <add> return a.size - b.size; <add>}; <add> <add>/** <add> * Extracts simplified info from the modules and their dependencies <add> * @param {Compilation} compilation the compilation <add> * @returns {Map<DependenciesBlock, { modules: Module[], blocks: AsyncDependenciesBlock[]}>} the mapping block to modules and inner blocks <add> */ <add>const extraceBlockInfoMap = compilation => { <add> /** @type {Map<DependenciesBlock, { modules: Module[], blocks: AsyncDependenciesBlock[]}>} */ <add> const blockInfoMap = new Map(); <add> <add> /** <add> * @param {Dependency} d dependency to iterate over <add> * @returns {void} <add> */ <add> const iteratorDependency = d => { <add> // We skip Dependencies without Reference <add> const ref = compilation.getDependencyReference(d); <add> if (!ref) { <add> return; <add> } <add> // We skip Dependencies without Module pointer <add> const refModule = ref.module; <add> if (!refModule) { <add> return; <add> } <add> // We skip weak Dependencies <add> if (ref.weak) { <add> return; <add> } <add> <add> blockInfoModules.add(refModule); <add> }; <add> <add> /** <add> * @param {AsyncDependenciesBlock} b blocks to prepare <add> * @returns {void} <add> */ <add> const iteratorBlockPrepare = b => { <add> blockInfoBlocks.push(b); <add> blockQueue.push(b); <add> }; <add> <add> /** @type {DependenciesBlock} */ <add> let block; <add> /** @type {DependenciesBlock[]} */ <add> let blockQueue; <add> /** @type {Set<Module>} */ <add> let blockInfoModules; <add> /** @type {AsyncDependenciesBlock[]} */ <add> let blockInfoBlocks; <add> <add> for (const module of compilation.modules) { <add> blockQueue = [module]; <add> while (blockQueue.length > 0) { <add> block = blockQueue.pop(); <add> blockInfoModules = new Set(); <add> blockInfoBlocks = []; <add> <add> if (block.dependencies) { <add> for (const dep of block.dependencies) iteratorDependency(dep); <add> } <add> <add> if (block.blocks) { <add> for (const b of block.blocks) iteratorBlockPrepare(b); <add> } <add> <add> const blockInfo = { <add> modules: Array.from(blockInfoModules), <add> blocks: blockInfoBlocks <add> }; <add> blockInfoMap.set(block, blockInfo); <add> } <add> } <add> <add> return blockInfoMap; <add>}; <add> <add>/** <add> * This method creates the Chunk graph from the Module graph <add> * @param {Compilation} compilation the compilation <add> * @param {Entrypoint[]} inputChunkGroups chunk groups which are processed <add> * @returns {void} <add> */ <add>const buildChunkGraph = (compilation, inputChunkGroups) => { <add> // Process is splitting into two parts: <add> // Part one traverse the module graph and builds a very basic chunks graph <add> // in chunkDependencies. <add> // Part two traverse every possible way through the basic chunk graph and <add> // tracks the available modules. While traversing it connects chunks with <add> // eachother and Blocks with Chunks. It stops traversing when all modules <add> // for a chunk are already available. So it doesn't connect unneeded chunks. <add> <add> /** @typedef {{block: AsyncDependenciesBlock, chunkGroup: ChunkGroup, couldBeFiltered: boolean}} ChunkGroupDep */ <add> <add> const { moduleGraph } = compilation; <add> <add> /** @type {Map<ChunkGroup, ChunkGroupDep[]>} */ <add> const chunkDependencies = new Map(); <add> /** @type {Set<ChunkGroup>} */ <add> const allCreatedChunkGroups = new Set(); <add> <add> // PREPARE <add> const blockInfoMap = extraceBlockInfoMap(compilation); <add> <add> const { chunkGraph, namedChunkGroups } = compilation; <add> <add> // PART ONE <add> <add> /** @type {Map<ChunkGroup, { preOrderIndex: number, postOrderIndex: number }>} */ <add> const chunkGroupCounters = new Map(); <add> for (const chunkGroup of inputChunkGroups) { <add> chunkGroupCounters.set(chunkGroup, { <add> preOrderIndex: 0, <add> postOrderIndex: 0 <add> }); <add> } <add> <add> let nextFreeModulePreOrderIndex = 0; <add> let nextFreeModulePostOrderIndex = 0; <add> <add> /** @type {Map<DependenciesBlock, ChunkGroup>} */ <add> const blockChunkGroups = new Map(); <add> <add> /** @type {Set<DependenciesBlock>} */ <add> const blocksWithNestedBlocks = new Set(); <add> <add> const ADD_AND_ENTER_MODULE = 0; <add> const ENTER_MODULE = 1; <add> const PROCESS_BLOCK = 2; <add> const LEAVE_MODULE = 3; <add> <add> /** <add> * @typedef {Object} QueueItem <add> * @property {number} action <add> * @property {DependenciesBlock} block <add> * @property {Module} module <add> * @property {Chunk} chunk <add> * @property {ChunkGroup} chunkGroup <add> */ <add> <add> /** <add> * @param {QueueItem[]} queue the queue array (will be mutated) <add> * @param {ChunkGroup} chunkGroup chunk group <add> * @returns {QueueItem[]} the queue array again <add> */ <add> const reduceChunkGroupToQueueItem = (queue, chunkGroup) => { <add> for (const chunk of chunkGroup.chunks) { <add> for (const module of chunkGraph.getChunkEntryModulesIterable(chunk)) { <add> queue.push({ <add> action: ENTER_MODULE, <add> block: module, <add> module, <add> chunk, <add> chunkGroup <add> }); <add> } <add> } <add> return queue; <add> }; <add> <add> // Start with the provided modules/chunks <add> /** @type {QueueItem[]} */ <add> let queue = inputChunkGroups <add> .reduce(reduceChunkGroupToQueueItem, []) <add> .reverse(); <add> /** @type {QueueItem[]} */ <add> let queueDelayed = []; <add> <add> /** @type {Module} */ <add> let module; <add> /** @type {Chunk} */ <add> let chunk; <add> /** @type {ChunkGroup} */ <add> let chunkGroup; <add> /** @type {DependenciesBlock} */ <add> let block; <add> <add> // For each async Block in graph <add> /** <add> * @param {AsyncDependenciesBlock} b iterating over each Async DepBlock <add> * @returns {void} <add> */ <add> const iteratorBlock = b => { <add> // 1. We create a chunk for this Block <add> // but only once (blockChunkGroups map) <add> let c = blockChunkGroups.get(b); <add> if (c === undefined) { <add> c = namedChunkGroups.get(b.chunkName); <add> if (c && c.isInitial()) { <add> compilation.errors.push( <add> new AsyncDependencyToInitialChunkError(b.chunkName, module, b.loc) <add> ); <add> c = chunkGroup; <add> } else { <add> c = compilation.addChunkInGroup( <add> b.groupOptions || b.chunkName, <add> module, <add> b.loc, <add> b.request <add> ); <add> chunkGroupCounters.set(c, { preOrderIndex: 0, postOrderIndex: 0 }); <add> blockChunkGroups.set(b, c); <add> allCreatedChunkGroups.add(c); <add> } <add> } else { <add> c.addOptions(b.groupOptions); <add> c.addOrigin(module, b.loc, b.request); <add> } <add> <add> // 2. We store the Block+Chunk mapping as dependency for the chunk <add> let deps = chunkDependencies.get(chunkGroup); <add> if (!deps) chunkDependencies.set(chunkGroup, (deps = [])); <add> deps.push({ <add> block: b, <add> chunkGroup: c, <add> couldBeFiltered: true <add> }); <add> <add> // 3. We enqueue the DependenciesBlock for traversal <add> queueDelayed.push({ <add> action: PROCESS_BLOCK, <add> block: b, <add> module: module, <add> chunk: c.chunks[0], <add> chunkGroup: c <add> }); <add> }; <add> <add> // Iterative traversal of the Module graph <add> // Recursive would be simpler to write but could result in Stack Overflows <add> while (queue.length) { <add> while (queue.length) { <add> const queueItem = queue.pop(); <add> module = queueItem.module; <add> block = queueItem.block; <add> chunk = queueItem.chunk; <add> chunkGroup = queueItem.chunkGroup; <add> <add> switch (queueItem.action) { <add> case ADD_AND_ENTER_MODULE: { <add> // We connect Module and Chunk when not already done <add> if (!chunkGraph.connectChunkAndModule(chunk, module)) { <add> // already connected, skip it <add> break; <add> } <add> } <add> // fallthrough <add> case ENTER_MODULE: { <add> if (chunkGroup !== undefined) { <add> const index = chunkGroup.getModulePreOrderIndex(module); <add> if (index === undefined) { <add> chunkGroup.setModulePreOrderIndex( <add> module, <add> chunkGroupCounters.get(chunkGroup).preOrderIndex++ <add> ); <add> } <add> } <add> <add> if ( <add> moduleGraph.setPreOrderIndexIfUnset( <add> module, <add> nextFreeModulePreOrderIndex <add> ) <add> ) { <add> nextFreeModulePreOrderIndex++; <add> } <add> <add> queue.push({ <add> action: LEAVE_MODULE, <add> block, <add> module, <add> chunk, <add> chunkGroup <add> }); <add> } <add> // fallthrough <add> case PROCESS_BLOCK: { <add> // get prepared block info <add> const blockInfo = blockInfoMap.get(block); <add> <add> // Traverse all referenced modules <add> for (let i = blockInfo.modules.length - 1; i >= 0; i--) { <add> const refModule = blockInfo.modules[i]; <add> if (chunkGraph.isModuleInChunk(refModule, chunk)) { <add> // skip early if already connected <add> continue; <add> } <add> // enqueue the add and enter to enter in the correct order <add> // this is relevant with circular dependencies <add> queue.push({ <add> action: ADD_AND_ENTER_MODULE, <add> block: refModule, <add> module: refModule, <add> chunk, <add> chunkGroup <add> }); <add> } <add> <add> // Traverse all Blocks <add> for (const block of blockInfo.blocks) iteratorBlock(block); <add> <add> if (blockInfo.blocks.length > 0 && module !== block) { <add> blocksWithNestedBlocks.add(block); <add> } <add> break; <add> } <add> case LEAVE_MODULE: { <add> if (chunkGroup !== undefined) { <add> const index = chunkGroup.getModulePostOrderIndex(module); <add> if (index === undefined) { <add> chunkGroup.setModulePostOrderIndex( <add> module, <add> chunkGroupCounters.get(chunkGroup).postOrderIndex++ <add> ); <add> } <add> } <add> <add> if ( <add> moduleGraph.setPostOrderIndexIfUnset( <add> module, <add> nextFreeModulePostOrderIndex <add> ) <add> ) { <add> nextFreeModulePostOrderIndex++; <add> } <add> break; <add> } <add> } <add> } <add> const tempQueue = queue; <add> queue = queueDelayed.reverse(); <add> queueDelayed = tempQueue; <add> } <add> <add> // PART TWO <add> /** @type {Set<Module>} */ <add> let newAvailableModules; <add> <add> /** <add> * @typedef {Object} ChunkGroupInfo <add> * @property {Set<Module>} minAvailableModules current minimal set of modules available at this point <add> * @property {Set<Module>[]} availableModulesToBeMerged enqueued updates to the minimal set of available modules <add> */ <add> <add> /** @type {Map<ChunkGroup, ChunkGroupInfo>} */ <add> const chunkGroupInfoMap = new Map(); <add> <add> /** @type {Queue<ChunkGroup>} */ <add> const queue2 = new Queue(inputChunkGroups); <add> <add> for (const chunkGroup of inputChunkGroups) { <add> chunkGroupInfoMap.set(chunkGroup, { <add> minAvailableModules: undefined, <add> availableModulesToBeMerged: [new Set()] <add> }); <add> } <add> <add> /** <add> * Helper function to check if all modules of a chunk are available <add> * <add> * @param {ChunkGroup} chunkGroup the chunkGroup to scan <add> * @param {Set<Module>} availableModules the comparitor set <add> * @returns {boolean} return true if all modules of a chunk are available <add> */ <add> const areModulesAvailable = (chunkGroup, availableModules) => { <add> for (const chunk of chunkGroup.chunks) { <add> for (const module of chunkGraph.getChunkModulesIterable(chunk)) { <add> if (!availableModules.has(module)) return false; <add> } <add> } <add> return true; <add> }; <add> <add> // For each edge in the basic chunk graph <add> /** <add> * @param {ChunkGroupDep} dep the dependency used for filtering <add> * @returns {boolean} used to filter "edges" (aka Dependencies) that were pointing <add> * to modules that are already available. Also filters circular dependencies in the chunks graph <add> */ <add> const filterFn = dep => { <add> const depChunkGroup = dep.chunkGroup; <add> if (!dep.couldBeFiltered) return true; <add> if (blocksWithNestedBlocks.has(dep.block)) return true; <add> if (areModulesAvailable(depChunkGroup, newAvailableModules)) { <add> return false; // break all modules are already available <add> } <add> dep.couldBeFiltered = false; <add> return true; <add> }; <add> <add> // Iterative traversing of the basic chunk graph <add> while (queue2.length) { <add> chunkGroup = queue2.dequeue(); <add> const info = chunkGroupInfoMap.get(chunkGroup); <add> const availableModulesToBeMerged = info.availableModulesToBeMerged; <add> let minAvailableModules = info.minAvailableModules; <add> <add> // 1. Get minimal available modules <add> // It doesn't make sense to traverse a chunk again with more available modules. <add> // This step calculates the minimal available modules and skips traversal when <add> // the list didn't shrink. <add> availableModulesToBeMerged.sort(bySetSize); <add> let changed = false; <add> for (const availableModules of availableModulesToBeMerged) { <add> if (minAvailableModules === undefined) { <add> minAvailableModules = new Set(availableModules); <add> info.minAvailableModules = minAvailableModules; <add> changed = true; <add> } else { <add> for (const m of minAvailableModules) { <add> if (!availableModules.has(m)) { <add> minAvailableModules.delete(m); <add> changed = true; <add> } <add> } <add> } <add> } <add> availableModulesToBeMerged.length = 0; <add> if (!changed) continue; <add> <add> // 2. Get the edges at this point of the graph <add> const deps = chunkDependencies.get(chunkGroup); <add> if (!deps) continue; <add> if (deps.length === 0) continue; <add> <add> // 3. Create a new Set of available modules at this points <add> newAvailableModules = new Set(minAvailableModules); <add> for (const chunk of chunkGroup.chunks) { <add> for (const m of chunkGraph.getChunkModulesIterable(chunk)) { <add> newAvailableModules.add(m); <add> } <add> } <add> <add> // 4. Foreach remaining edge <add> const nextChunkGroups = new Set(); <add> for (let i = 0; i < deps.length; i++) { <add> const dep = deps[i]; <add> <add> // Filter inline, rather than creating a new array from `.filter()` <add> if (!filterFn(dep)) { <add> continue; <add> } <add> const depChunkGroup = dep.chunkGroup; <add> const depBlock = dep.block; <add> <add> // 5. Connect block with chunk <add> chunkGraph.connectBlockAndChunkGroup(depBlock, depChunkGroup); <add> <add> // 6. Connect chunk with parent <add> connectChunkGroupParentAndChild(chunkGroup, depChunkGroup); <add> <add> nextChunkGroups.add(depChunkGroup); <add> } <add> <add> // 7. Enqueue further traversal <add> for (const nextChunkGroup of nextChunkGroups) { <add> let nextInfo = chunkGroupInfoMap.get(nextChunkGroup); <add> if (nextInfo === undefined) { <add> nextInfo = { <add> minAvailableModules: undefined, <add> availableModulesToBeMerged: [] <add> }; <add> chunkGroupInfoMap.set(nextChunkGroup, nextInfo); <add> } <add> nextInfo.availableModulesToBeMerged.push(newAvailableModules); <add> <add> // As queue deduplicates enqueued items this makes sure that a ChunkGroup <add> // is not enqueued twice <add> queue2.enqueue(nextChunkGroup); <add> } <add> } <add> <add> // Remove all unconnected chunk groups <add> for (const chunkGroup of allCreatedChunkGroups) { <add> if (chunkGroup.getNumberOfParents() === 0) { <add> for (const chunk of chunkGroup.chunks) { <add> compilation.chunks.delete(chunk); <add> chunkGraph.disconnectChunk(chunk); <add> } <add> chunkGraph.disconnectChunkGroup(chunkGroup); <add> chunkGroup.remove(); <add> } <add> } <add>}; <add> <add>module.exports = buildChunkGraph;
2
PHP
PHP
fix path in testsuite
ed22c17eba39388c30de4ea79fe38b8533ff4a77
<ide><path>lib/Cake/Test/Case/AllDatabaseTest.php <ide> public static function suite() { <ide> <ide> $path = CORE_TEST_CASES . DS . 'Model' . DS; <ide> $tasks = array( <del> 'DbAcl', <add> 'AclNode', <ide> 'CakeSchema', <ide> 'ConnectionManager', <ide> 'Datasource' . DS . 'DboSource',
1
Python
Python
add indirection file to ncf async process.
c6bef65adab528927a821a1bfff25104dcdb25d1
<ide><path>official/recommendation/data_preprocessing.py <ide> from official.datasets import movielens <ide> from official.recommendation import constants as rconst <ide> from official.recommendation import stat_utils <del> <del> <del>_ASYNC_GEN_PATH = os.path.join(os.path.dirname(__file__), <del> "data_async_generation.py") <add>from official.recommendation import popen_helper <ide> <ide> <ide> class NCFDataset(object): <ide> def instantiate_pipeline(dataset, data_dir, batch_size, eval_batch_size, <ide> # contention with the main training process. <ide> subproc_env["CUDA_VISIBLE_DEVICES"] = "" <ide> <del> python = "python3" if six.PY3 else "python2" <del> <ide> # By limiting the number of workers we guarantee that the worker <ide> # pool underlying the training generation doesn't starve other processes. <ide> num_workers = int(multiprocessing.cpu_count() * 0.75) <ide> <del> subproc_args = [ <del> python, _ASYNC_GEN_PATH, <add> subproc_args = popen_helper.INVOCATION + [ <ide> "--data_dir", data_dir, <ide> "--cache_id", str(ncf_dataset.cache_paths.cache_id), <ide> "--num_neg", str(num_neg), <ide><path>official/recommendation/neumf_model.py <ide> <ide> from six.moves import xrange # pylint: disable=redefined-builtin <ide> import tensorflow as tf <del>from tensorflow.python.keras.utils import tf_utils <ide> <ide> from official.datasets import movielens # pylint: disable=g-bad-import-order <del>from official.utils.accelerator import tpu as tpu_utils <ide> <ide> <ide> def neumf_model_fn(features, labels, mode, params): <ide><path>official/recommendation/popen_helper.py <add># Copyright 2018 The TensorFlow Authors. All Rights Reserved. <add># <add># Licensed under the Apache License, Version 2.0 (the "License"); <add># you may not use this file except in compliance with the License. <add># You may obtain a copy of the License at <add># <add># http://www.apache.org/licenses/LICENSE-2.0 <add># <add># Unless required by applicable law or agreed to in writing, software <add># distributed under the License is distributed on an "AS IS" BASIS, <add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add># See the License for the specific language governing permissions and <add># limitations under the License. <add># ============================================================================== <add>"""Helper file for running the async data generation process in OSS.""" <add> <add>import os <add>import six <add> <add> <add>_PYTHON = "python3" if six.PY3 else "python2" <add> <add>_ASYNC_GEN_PATH = os.path.join(os.path.dirname(__file__), <add> "data_async_generation.py") <add> <add>INVOCATION = [_PYTHON, _ASYNC_GEN_PATH]
3
Ruby
Ruby
add a test case for range type casting
3ba9d32c6cb7e288ae6473298673bf1f2797f846
<ide><path>activerecord/test/cases/base_test.rb <ide> def test_find_by_slug_with_array <ide> assert_equal Topic.find(['1-meowmeow', '2-hello']), Topic.find([1, 2]) <ide> end <ide> <add> def test_find_by_slug_with_range <add> assert_equal Topic.where(id: '1-meowmeow'..'2-hello'), Topic.where(id: 1..2) <add> end <add> <ide> def test_equality_of_new_records <ide> assert_not_equal Topic.new, Topic.new <ide> assert_equal false, Topic.new == Topic.new
1
Javascript
Javascript
use electron version only in fingerprint
c3bf0893b4280eaadd1d20fdd175279989da775c
<ide><path>script/utils/fingerprint.js <ide> var fingerprintPath = path.resolve(__dirname, '..', '..', 'node_modules', '.atom <ide> module.exports = { <ide> fingerprint: function () { <ide> var packageJson = fs.readFileSync(path.resolve(__dirname, '..', '..', 'package.json')) <del> var body = packageJson.toString() + process.platform + process.version <add> <add> //Include the electron minor version in the fingerprint since that changing requires a re-install <add> var electronVersion = JSON.parse(packageJson).electronVersion.replace(/\.\d+$/, '') <add> <add> var body = electronVersion + process.platform + process.version <ide> return crypto.createHash('sha1').update(body).digest('hex') <ide> }, <ide>
1
Mixed
Ruby
accept json with no backslashes/escaping
45635098ac827ee5b243ac01b6267843cd89200a
<ide><path>actioncable/CHANGELOG.md <add>* Allow channel identifiers with no backslahes/escaping to be accepted <add> by the subscription storer. <add> <add> *Jon Moss* <add> <ide> * Safely support autoloading and class unloading, by preventing concurrent <ide> loads, and disconnecting all cables during reload. <ide> <ide><path>actioncable/lib/action_cable/connection/subscriptions.rb <ide> def execute_command(data) <ide> end <ide> <ide> def add(data) <del> id_key = data['identifier'] <del> id_options = ActiveSupport::JSON.decode(id_key).with_indifferent_access <add> id_options = decode_hash(data['identifier']) <add> identifier = normalize_identifier(id_options) <ide> <ide> subscription_klass = connection.server.channel_classes[id_options[:channel]] <ide> <ide> if subscription_klass <del> subscriptions[id_key] ||= subscription_klass.new(connection, id_key, id_options) <add> subscriptions[identifier] ||= subscription_klass.new(connection, identifier, id_options) <ide> else <ide> logger.error "Subscription class not found (#{data.inspect})" <ide> end <ide> end <ide> <ide> def remove(data) <ide> logger.info "Unsubscribing from channel: #{data['identifier']}" <del> remove_subscription subscriptions[data['identifier']] <add> remove_subscription subscriptions[normalize_identifier(data['identifier'])] <ide> end <ide> <ide> def remove_subscription(subscription) <ide> def remove_subscription(subscription) <ide> end <ide> <ide> def perform_action(data) <del> find(data).perform_action ActiveSupport::JSON.decode(data['data']) <add> find(data).perform_action(decode_hash(data['data'])) <ide> end <ide> <ide> def identifiers <ide> def unsubscribe_from_all <ide> private <ide> delegate :logger, to: :connection <ide> <add> def normalize_identifier(identifier) <add> identifier = ActiveSupport::JSON.encode(identifier) if identifier.is_a?(Hash) <add> identifier <add> end <add> <add> # If `data` is a Hash, this means that the original JSON <add> # sent by the client had no backslashes in it, and does <add> # not need to be decoded again. <add> def decode_hash(data) <add> data = ActiveSupport::JSON.decode(data) unless data.is_a?(Hash) <add> data.with_indifferent_access <add> end <add> <ide> def find(data) <del> if subscription = subscriptions[data['identifier']] <add> if subscription = subscriptions[normalize_identifier(data['identifier'])] <ide> subscription <ide> else <ide> raise "Unable to find subscription with identifier: #{data['identifier']}" <ide><path>actioncable/test/connection/subscriptions_test.rb <ide> def speak(data) <ide> end <ide> end <ide> <del> test "unsubscrib from all" do <add> test "unsubscribe from all" do <ide> run_in_eventmachine do <ide> setup_connection <ide> <ide> channel1 = subscribe_to_chat_channel <ide> <del> channel2_id = ActiveSupport::JSON.encode(id: 2, channel: 'ActionCable::Connection::SubscriptionsTest::ChatChannel') <add> channel2_id = ActiveSupport::JSON.encode({ id: 2, channel: 'ActionCable::Connection::SubscriptionsTest::ChatChannel' }) <ide> channel2 = subscribe_to_chat_channel(channel2_id) <ide> <ide> channel1.expects(:unsubscribe_from_channel) <ide><path>actioncable/test/test_helper.rb <ide> require 'mocha/setup' <ide> <ide> require 'rack/mock' <add>require 'active_support/core_ext/hash/indifferent_access' <ide> <ide> # Require all the stubs and models <ide> Dir[File.dirname(__FILE__) + '/stubs/*.rb'].each {|file| require file }
4
Ruby
Ruby
test basic auth with symbols in login and password
2ae757d76bdc4c02e47a43ebc5ecbfc8bb8dee41
<ide><path>actionpack/test/controller/http_basic_authentication_test.rb <ide> class DummyController < ActionController::Base <ide> before_action :authenticate, only: :index <ide> before_action :authenticate_with_request, only: :display <ide> before_action :authenticate_long_credentials, only: :show <add> before_action :auth_with_special_chars, only: :special_creds <ide> <ide> http_basic_authenticate_with :name => "David", :password => "Goliath", :only => :search <ide> <ide> def show <ide> render plain: 'Only for loooooong credentials' <ide> end <ide> <add> def special_creds <add> render plain: 'Only for special credentials' <add> end <add> <ide> def search <ide> render plain: 'All inline' <ide> end <ide> def authenticate_with_request <ide> end <ide> end <ide> <add> def auth_with_special_chars <add> authenticate_or_request_with_http_basic do |username, password| <add> username == 'login!@#$%^&*()_+{}[];"\',./<>?`~ \n\r\t' && password == 'pwd:!@#$%^&*()_+{}[];"\',./<>?`~ \n\r\t' <add> end <add> end <add> <ide> def authenticate_long_credentials <ide> authenticate_or_request_with_http_basic do |username, password| <ide> username == '1234567890123456789012345678901234567890' && password == '1234567890123456789012345678901234567890' <ide> def test_encode_credentials_has_no_newline <ide> assert_equal 'Definitely Maybe', @response.body <ide> end <ide> <add> test "authentication request with valid credential special chars" do <add> @request.env['HTTP_AUTHORIZATION'] = encode_credentials('login!@#$%^&*()_+{}[];"\',./<>?`~ \n\r\t', 'pwd:!@#$%^&*()_+{}[];"\',./<>?`~ \n\r\t') <add> get :special_creds <add> <add> assert_response :success <add> assert_equal 'Only for special credentials', @response.body <add> end <add> <ide> test "authenticate with class method" do <ide> @request.env['HTTP_AUTHORIZATION'] = encode_credentials('David', 'Goliath') <ide> get :search
1
Python
Python
update backend functionality
69a8acc05a40a523aa4e51b2e281b94a2e8bdc43
<ide><path>keras/backend/__init__.py <ide> from __future__ import absolute_import <add>from __future__ import print_function <add>import os <add>import json <ide> from .common import epsilon, floatx, set_epsilon, set_floatx <ide> <ide> _BACKEND = 'theano' <add>_config_path = os.path.expanduser(os.path.join('~', '.keras', 'keras.json')) <add>if os.path.exists(_config_path): <add> _config = json.load(open(_config_path)) <add> _floatx = _config.get('floatx', floatx()) <add> assert _floatx in {'float32', 'float64'} <add> _epsilon = _config.get('epsilon', epsilon()) <add> assert type(_epsilon) == float <add> _backend = _config.get('backend', _BACKEND) <add> assert _backend in {'theano', 'tensorflow'} <add> <add> set_floatx(_floatx) <add> set_epsilon(_epsilon) <add> _BACKEND = _backend <add>else: <add> # save config file, for easy edition <add> _config = {'floatx': floatx(), <add> 'epsilon': epsilon(), <add> 'backend': _BACKEND} <add> json.dump(_config, open(_config_path, 'w')) <ide> <ide> if _BACKEND == 'theano': <add> print('Using Theano backend.') <ide> from .theano_backend import * <ide> elif _BACKEND == 'tensorflow': <add> print('Using TensorFlow backend.') <ide> from .tensorflow_backend import * <ide> else: <ide> raise Exception('Unknown backend: ' + str(backend)) <ide><path>keras/backend/common.py <ide> import numpy as np <ide> <ide> # the type of float to use throughout the session. <del>_FLOATX = 'float64' <add>_FLOATX = 'float32' <ide> _EPSILON = 10e-8 <ide> <ide> <ide><path>keras/backend/tensorflow_backend.py <ide> def temporal_padding(x, padding=1): <ide> return tf.pad(x, pattern) <ide> <ide> <del>def spatial_2d_padding(x, padding=(1, 1)): <add>def spatial_2d_padding(x, padding=(1, 1), dim_ordering='th'): <ide> '''Pad the 2nd and 3rd dimensions of a 4D tensor <ide> with "padding[0]" and "padding[1]" (resp.) zeros left and right. <ide> ''' <del> pattern = [[0, 0], [0, 0], <del> [padding[0], padding[0]], [padding[1], padding[1]]] <add> if dim_ordering == 'th': <add> pattern = [[0, 0], [0, 0], <add> [padding[0], padding[0]], [padding[1], padding[1]]] <add> else: <add> pattern = [[0, 0], <add> [padding[0], padding[0]], [padding[1], padding[1]], <add> [0, 0]] <ide> return tf.pad(x, pattern) <ide> <ide> <ide> def rnn(step_function, inputs, initial_states, <ide> for input in input_list: <ide> output, new_states = step_function(input, states) <ide> if masking: <add> # for now we raise an exception because tf.reduce_any will not work <add> raise Exception("Masking is Theano-only for the time being.") <add> <ide> # if all-zero input timestep, return <ide> # all-zero output and unchanged states <ide> switch = tf.reduce_any(input) <ide><path>keras/backend/theano_backend.py <ide> def temporal_padding(x, padding=1): <ide> return T.set_subtensor(output[:, padding:x.shape[1] + padding, :], x) <ide> <ide> <del>def spatial_2d_padding(x, padding=(1, 1)): <add>def spatial_2d_padding(x, padding=(1, 1), dim_ordering='th'): <ide> '''Pad the 2nd and 3rd dimensions of a 4D tensor <ide> with "padding[0]" and "padding[1]" (resp.) zeros left and right. <ide> ''' <ide> input_shape = x.shape <del> output_shape = (input_shape[0], <del> input_shape[1], <del> input_shape[2] + 2 * padding[0], <del> input_shape[3] + 2 * padding[1]) <del> output = T.zeros(output_shape) <del> indices = (slice(None), <del> slice(None), <del> slice(padding[0], input_shape[2] + padding[0]), <del> slice(padding[1], input_shape[3] + padding[1])) <add> if dim_ordering == 'th': <add> output_shape = (input_shape[0], <add> input_shape[1], <add> input_shape[2] + 2 * padding[0], <add> input_shape[3] + 2 * padding[1]) <add> output = T.zeros(output_shape) <add> indices = (slice(None), <add> slice(None), <add> slice(padding[0], input_shape[2] + padding[0]), <add> slice(padding[1], input_shape[3] + padding[1])) <add> <add> elif dim_ordering == 'tf': <add> output_shape = (input_shape[0], <add> input_shape[1] + 2 * padding[0], <add> input_shape[2] + 2 * padding[1], <add> input_shape[3]) <add> output = T.zeros(output_shape) <add> indices = (slice(None), <add> slice(padding[0], input_shape[1] + padding[0]), <add> slice(padding[1], input_shape[2] + padding[1]), <add> slice(None)) <add> else: <add> raise Exception('Invalid dim_ordering: ' + dim_ordering) <ide> return T.set_subtensor(output[indices], x) <ide> <ide> # VALUE MANIPULATION
4
PHP
PHP
add test to cover uncovered cases in redirectroute
1b10d28dfe16252fe0c29a914d5cf12d5ac5927c
<ide><path>tests/TestCase/Routing/Route/RedirectRouteTest.php <ide> public function setUp() <ide> Router::connect('/:controller/:action/*'); <ide> } <ide> <add> /** <add> * test match <add> * <add> * @return void <add> */ <add> public function testMatch() <add> { <add> $route = new RedirectRoute('/home', ['controller' => 'posts']); <add> $this->assertFalse($route->match(['controller' => 'posts', 'action' => 'index'])); <add> } <add> <add> /** <add> * test parse failure <add> * <add> * @return void <add> */ <add> public function testParseMiss() <add> { <add> $route = new RedirectRoute('/home', ['controller' => 'posts']); <add> $this->assertFalse($route->parse('/nope')); <add> $this->assertFalse($route->parse('/homes')); <add> } <add> <ide> /** <ide> * test the parsing of routes. <ide> * <ide> public function testParseSimple() <ide> $route->parse('/home'); <ide> } <ide> <add> /** <add> * test the parsing of routes. <add> * <add> * @expectedException Cake\Routing\Exception\RedirectException <add> * @expectedExceptionMessage http://localhost/posts <add> * @expectedExceptionCode 301 <add> * @return void <add> */ <add> public function testParseRedirectOption() <add> { <add> $route = new RedirectRoute('/home', ['redirect' => ['controller' => 'posts']]); <add> $route->parse('/home'); <add> } <add> <ide> /** <ide> * test the parsing of routes. <ide> *
1
Text
Text
improve the changelog entry
c4f2f5b9d7902c280fb341560cf23c52c99e4e55
<ide><path>actionpack/CHANGELOG.md <ide> ## Rails 4.0.0 (unreleased) ## <ide> <add>* Fixed `ActionView::Helpers::CaptureHelper#content_for` regression when trying to use it in <add> a boolean statement. <add> Fixes #9360. <add> <add> *Nikolay Shebanov* <add> <ide> * `format: true` does not override existing format constraints. <ide> Fixes #9466. <ide> <ide> * `ActionView::Helpers::TextHelper#highlight` now defaults to the <ide> HTML5 `mark` element. *Brian Cardarella* <ide> <del>* Fixed `ActionView::Helpers::CaptureHelper#content_for` regression (described in #9360). *Nikolay Shebanov* <del> <ide> Please check [3-2-stable](https://github.com/rails/rails/blob/3-2-stable/actionpack/CHANGELOG.md) for previous changes.
1
Ruby
Ruby
remove deprecations from active support
6a6fc4e1db2469bd309c074f607abb60764ba20d
<ide><path>activesupport/lib/active_support.rb <ide> module ActiveSupport <ide> autoload :Gzip <ide> autoload :Inflector <ide> autoload :JSON <del> autoload :Memoizable <ide> autoload :MessageEncryptor <ide> autoload :MessageVerifier <ide> autoload :Multibyte <ide><path>activesupport/lib/active_support/concern.rb <ide> def append_features(base) <ide> @_dependencies.each { |dep| base.send(:include, dep) } <ide> super <ide> base.extend const_get("ClassMethods") if const_defined?("ClassMethods") <del> if const_defined?("InstanceMethods") <del> base.send :include, const_get("InstanceMethods") <del> ActiveSupport::Deprecation.warn "The InstanceMethods module inside ActiveSupport::Concern will be " \ <del> "no longer included automatically. Please define instance methods directly in #{base} instead.", caller <del> end <ide> base.class_eval(&@_included_block) if instance_variable_defined?("@_included_block") <ide> end <ide> end <ide><path>activesupport/lib/active_support/memoizable.rb <del>require 'active_support/core_ext/kernel/singleton_class' <del>require 'active_support/core_ext/module/aliasing' <del>require 'active_support/deprecation' <del> <del>module ActiveSupport <del> module Memoizable <del> def self.extended(base) <del> ActiveSupport::Deprecation.warn "ActiveSupport::Memoizable is deprecated and will be removed in future releases," \ <del> "simply use Ruby memoization pattern instead.", caller <del> super <del> end <del> <del> def self.memoized_ivar_for(symbol) <del> "@_memoized_#{symbol.to_s.sub(/\?\Z/, '_query').sub(/!\Z/, '_bang')}".to_sym <del> end <del> <del> module InstanceMethods <del> def self.included(base) <del> base.class_eval do <del> unless base.method_defined?(:freeze_without_memoizable) <del> alias_method_chain :freeze, :memoizable <del> end <del> end <del> end <del> <del> def freeze_with_memoizable <del> memoize_all unless frozen? <del> freeze_without_memoizable <del> end <del> <del> def memoize_all <del> prime_cache ".*" <del> end <del> <del> def unmemoize_all <del> flush_cache ".*" <del> end <del> <del> def prime_cache(*syms) <del> syms.each do |sym| <del> methods.each do |m| <del> if m.to_s =~ /^_unmemoized_(#{sym})/ <del> if method(m).arity == 0 <del> __send__($1) <del> else <del> ivar = ActiveSupport::Memoizable.memoized_ivar_for($1) <del> instance_variable_set(ivar, {}) <del> end <del> end <del> end <del> end <del> end <del> <del> def flush_cache(*syms) <del> syms.each do |sym| <del> (methods + private_methods + protected_methods).each do |m| <del> if m.to_s =~ /^_unmemoized_(#{sym.to_s.gsub(/\?\Z/, '\?')})/ <del> ivar = ActiveSupport::Memoizable.memoized_ivar_for($1) <del> instance_variable_get(ivar).clear if instance_variable_defined?(ivar) <del> end <del> end <del> end <del> end <del> end <del> <del> def memoize(*symbols) <del> symbols.each do |symbol| <del> original_method = :"_unmemoized_#{symbol}" <del> memoized_ivar = ActiveSupport::Memoizable.memoized_ivar_for(symbol) <del> <del> class_eval <<-EOS, __FILE__, __LINE__ + 1 <del> include InstanceMethods # include InstanceMethods <del> # <del> if method_defined?(:#{original_method}) # if method_defined?(:_unmemoized_mime_type) <del> raise "Already memoized #{symbol}" # raise "Already memoized mime_type" <del> end # end <del> alias #{original_method} #{symbol} # alias _unmemoized_mime_type mime_type <del> # <del> if instance_method(:#{symbol}).arity == 0 # if instance_method(:mime_type).arity == 0 <del> def #{symbol}(reload = false) # def mime_type(reload = false) <del> if reload || !defined?(#{memoized_ivar}) || #{memoized_ivar}.empty? # if reload || !defined?(@_memoized_mime_type) || @_memoized_mime_type.empty? <del> #{memoized_ivar} = [#{original_method}] # @_memoized_mime_type = [_unmemoized_mime_type] <del> end # end <del> #{memoized_ivar}[0] # @_memoized_mime_type[0] <del> end # end <del> else # else <del> def #{symbol}(*args) # def mime_type(*args) <del> #{memoized_ivar} ||= {} unless frozen? # @_memoized_mime_type ||= {} unless frozen? <del> args_length = method(:#{original_method}).arity # args_length = method(:_unmemoized_mime_type).arity <del> if args.length == args_length + 1 && # if args.length == args_length + 1 && <del> (args.last == true || args.last == :reload) # (args.last == true || args.last == :reload) <del> reload = args.pop # reload = args.pop <del> end # end <del> # <del> if defined?(#{memoized_ivar}) && #{memoized_ivar} # if defined?(@_memoized_mime_type) && @_memoized_mime_type <del> if !reload && #{memoized_ivar}.has_key?(args) # if !reload && @_memoized_mime_type.has_key?(args) <del> #{memoized_ivar}[args] # @_memoized_mime_type[args] <del> elsif #{memoized_ivar} # elsif @_memoized_mime_type <del> #{memoized_ivar}[args] = #{original_method}(*args) # @_memoized_mime_type[args] = _unmemoized_mime_type(*args) <del> end # end <del> else # else <del> #{original_method}(*args) # _unmemoized_mime_type(*args) <del> end # end <del> end # end <del> end # end <del> # <del> if private_method_defined?(#{original_method.inspect}) # if private_method_defined?(:_unmemoized_mime_type) <del> private #{symbol.inspect} # private :mime_type <del> elsif protected_method_defined?(#{original_method.inspect}) # elsif protected_method_defined?(:_unmemoized_mime_type) <del> protected #{symbol.inspect} # protected :mime_type <del> end # end <del> EOS <del> end <del> end <del> end <del>end <ide><path>activesupport/lib/active_support/message_encryptor.rb <ide> class InvalidMessage < StandardError; end <ide> OpenSSLCipherError = OpenSSL::Cipher.const_defined?(:CipherError) ? OpenSSL::Cipher::CipherError : OpenSSL::CipherError <ide> <ide> def initialize(secret, options = {}) <del> unless options.is_a?(Hash) <del> ActiveSupport::Deprecation.warn "The second parameter should be an options hash. Use :cipher => 'algorithm' to specify the cipher algorithm." <del> options = { :cipher => options } <del> end <del> <ide> @secret = secret <ide> @cipher = options[:cipher] || 'aes-256-cbc' <ide> @verifier = MessageVerifier.new(@secret, :serializer => NullSerializer) <ide> @serializer = options[:serializer] || Marshal <ide> end <ide> <del> def encrypt(value) <del> ActiveSupport::Deprecation.warn "MessageEncryptor#encrypt is deprecated as it is not safe without a signature. " \ <del> "Please use MessageEncryptor#encrypt_and_sign instead." <del> _encrypt(value) <del> end <del> <del> def decrypt(value) <del> ActiveSupport::Deprecation.warn "MessageEncryptor#decrypt is deprecated as it is not safe without a signature. " \ <del> "Please use MessageEncryptor#decrypt_and_verify instead." <del> _decrypt(value) <del> end <del> <ide> # Encrypt and sign a message. We need to sign the message in order to avoid padding attacks. <ide> # Reference: http://www.limited-entropy.com/padding-oracle-attacks <ide> def encrypt_and_sign(value) <ide><path>activesupport/lib/active_support/message_verifier.rb <ide> class MessageVerifier <ide> class InvalidSignature < StandardError; end <ide> <ide> def initialize(secret, options = {}) <del> unless options.is_a?(Hash) <del> ActiveSupport::Deprecation.warn "The second parameter should be an options hash. Use :digest => 'algorithm' to specify the digest algorithm." <del> options = { :digest => options } <del> end <del> <ide> @secret = secret <ide> @digest = options[:digest] || 'SHA1' <ide> @serializer = options[:serializer] || Marshal <ide><path>activesupport/test/flush_cache_on_private_memoization_test.rb <del>require 'abstract_unit' <del>require 'test/unit' <del> <del>class FlushCacheOnPrivateMemoizationTest < Test::Unit::TestCase <del> ActiveSupport::Deprecation.silence do <del> extend ActiveSupport::Memoizable <del> end <del> <del> def test_public <del> assert_method_unmemoizable :pub <del> end <del> <del> def test_protected <del> assert_method_unmemoizable :prot <del> end <del> <del> def test_private <del> assert_method_unmemoizable :priv <del> end <del> <del> def pub; rand end <del> memoize :pub <del> <del> protected <del> <del> def prot; rand end <del> memoize :prot <del> <del> private <del> <del> def priv; rand end <del> memoize :priv <del> <del> def assert_method_unmemoizable(meth, message=nil) <del> full_message = build_message(message, "<?> not unmemoizable.\n", meth) <del> assert_block(full_message) do <del> a = send meth <del> b = send meth <del> unmemoize_all <del> c = send meth <del> a == b && a != c <del> end <del> end <del> <del>end <ide><path>activesupport/test/memoizable_test.rb <del>require 'abstract_unit' <del> <del>class MemoizableTest < ActiveSupport::TestCase <del> class Person <del> ActiveSupport::Deprecation.silence do <del> extend ActiveSupport::Memoizable <del> end <del> <del> attr_reader :name_calls, :age_calls, :is_developer_calls, :name_query_calls <del> <del> def initialize <del> @name_calls = 0 <del> @age_calls = 0 <del> @is_developer_calls = 0 <del> @name_query_calls = 0 <del> end <del> <del> def name <del> @name_calls += 1 <del> "Josh" <del> end <del> <del> def name? <del> @name_query_calls += 1 <del> true <del> end <del> memoize :name? <del> <del> def update(name) <del> "Joshua" <del> end <del> memoize :update <del> <del> def age <del> @age_calls += 1 <del> nil <del> end <del> <del> memoize :name, :age <del> <del> protected <del> <del> def memoize_protected_test <del> 'protected' <del> end <del> memoize :memoize_protected_test <del> <del> private <del> <del> def is_developer? <del> @is_developer_calls += 1 <del> "Yes" <del> end <del> memoize :is_developer? <del> end <del> <del> class Company <del> attr_reader :name_calls <del> def initialize <del> @name_calls = 0 <del> end <del> <del> def name <del> @name_calls += 1 <del> "37signals" <del> end <del> end <del> <del> module Rates <del> ActiveSupport::Deprecation.silence do <del> extend ActiveSupport::Memoizable <del> end <del> <del> attr_reader :sales_tax_calls <del> def sales_tax(price) <del> @sales_tax_calls ||= 0 <del> @sales_tax_calls += 1 <del> price * 0.1025 <del> end <del> memoize :sales_tax <del> end <del> <del> class Calculator <del> ActiveSupport::Deprecation.silence do <del> extend ActiveSupport::Memoizable <del> end <del> include Rates <del> <del> attr_reader :fib_calls <del> def initialize <del> @fib_calls = 0 <del> end <del> <del> def fib(n) <del> @fib_calls += 1 <del> <del> if n == 0 || n == 1 <del> n <del> else <del> fib(n - 1) + fib(n - 2) <del> end <del> end <del> memoize :fib <del> <del> def add_or_subtract(i, j, add) <del> if add <del> i + j <del> else <del> i - j <del> end <del> end <del> memoize :add_or_subtract <del> <del> def counter <del> @count ||= 0 <del> @count += 1 <del> end <del> memoize :counter <del> end <del> <del> def setup <del> @person = Person.new <del> @calculator = Calculator.new <del> end <del> <del> def test_memoization <del> assert_equal "Josh", @person.name <del> assert_equal 1, @person.name_calls <del> <del> 3.times { assert_equal "Josh", @person.name } <del> assert_equal 1, @person.name_calls <del> end <del> <del> def test_memoization_with_punctuation <del> assert_equal true, @person.name? <del> <del> assert_nothing_raised(NameError) do <del> @person.memoize_all <del> @person.unmemoize_all <del> end <del> end <del> <del> def test_memoization_flush_with_punctuation <del> assert_equal true, @person.name? <del> @person.flush_cache(:name?) <del> 3.times { assert_equal true, @person.name? } <del> assert_equal 2, @person.name_query_calls <del> end <del> <del> def test_memoization_with_nil_value <del> assert_equal nil, @person.age <del> assert_equal 1, @person.age_calls <del> <del> 3.times { assert_equal nil, @person.age } <del> assert_equal 1, @person.age_calls <del> end <del> <del> def test_reloadable <del> assert_equal 1, @calculator.counter <del> assert_equal 2, @calculator.counter(:reload) <del> assert_equal 2, @calculator.counter <del> assert_equal 3, @calculator.counter(true) <del> assert_equal 3, @calculator.counter <del> end <del> <del> def test_flush_cache <del> assert_equal 1, @calculator.counter <del> <del> assert @calculator.instance_variable_get(:@_memoized_counter).any? <del> @calculator.flush_cache(:counter) <del> assert @calculator.instance_variable_get(:@_memoized_counter).empty? <del> <del> assert_equal 2, @calculator.counter <del> end <del> <del> def test_unmemoize_all <del> assert_equal 1, @calculator.counter <del> <del> assert @calculator.instance_variable_get(:@_memoized_counter).any? <del> @calculator.unmemoize_all <del> assert @calculator.instance_variable_get(:@_memoized_counter).empty? <del> <del> assert_equal 2, @calculator.counter <del> end <del> <del> def test_memoize_all <del> @calculator.memoize_all <del> assert @calculator.instance_variable_defined?(:@_memoized_counter) <del> end <del> <del> def test_memoization_cache_is_different_for_each_instance <del> assert_equal 1, @calculator.counter <del> assert_equal 2, @calculator.counter(:reload) <del> assert_equal 1, Calculator.new.counter <del> end <del> <del> def test_memoized_is_not_affected_by_freeze <del> @person.freeze <del> assert_equal "Josh", @person.name <del> assert_equal "Joshua", @person.update("Joshua") <del> end <del> <del> def test_memoization_with_args <del> assert_equal 55, @calculator.fib(10) <del> assert_equal 11, @calculator.fib_calls <del> end <del> <del> def test_reloadable_with_args <del> assert_equal 55, @calculator.fib(10) <del> assert_equal 11, @calculator.fib_calls <del> assert_equal 55, @calculator.fib(10, :reload) <del> assert_equal 12, @calculator.fib_calls <del> assert_equal 55, @calculator.fib(10, true) <del> assert_equal 13, @calculator.fib_calls <del> end <del> <del> def test_memoization_with_boolean_arg <del> assert_equal 4, @calculator.add_or_subtract(2, 2, true) <del> assert_equal 2, @calculator.add_or_subtract(4, 2, false) <del> end <del> <del> def test_object_memoization <del> [Company.new, Company.new, Company.new].each do |company| <del> ActiveSupport::Deprecation.silence do <del> company.extend ActiveSupport::Memoizable <del> end <del> company.memoize :name <del> <del> assert_equal "37signals", company.name <del> assert_equal 1, company.name_calls <del> assert_equal "37signals", company.name <del> assert_equal 1, company.name_calls <del> end <del> end <del> <del> def test_memoized_module_methods <del> assert_equal 1.025, @calculator.sales_tax(10) <del> assert_equal 1, @calculator.sales_tax_calls <del> assert_equal 1.025, @calculator.sales_tax(10) <del> assert_equal 1, @calculator.sales_tax_calls <del> assert_equal 2.5625, @calculator.sales_tax(25) <del> assert_equal 2, @calculator.sales_tax_calls <del> end <del> <del> def test_object_memoized_module_methods <del> company = Company.new <del> company.extend(Rates) <del> <del> assert_equal 1.025, company.sales_tax(10) <del> assert_equal 1, company.sales_tax_calls <del> assert_equal 1.025, company.sales_tax(10) <del> assert_equal 1, company.sales_tax_calls <del> assert_equal 2.5625, company.sales_tax(25) <del> assert_equal 2, company.sales_tax_calls <del> end <del> <del> def test_double_memoization <del> assert_raise(RuntimeError) { Person.memoize :name } <del> person = Person.new <del> ActiveSupport::Deprecation.silence do <del> person.extend ActiveSupport::Memoizable <del> end <del> assert_raise(RuntimeError) { person.memoize :name } <del> <del> company = Company.new <del> ActiveSupport::Deprecation.silence do <del> company.extend ActiveSupport::Memoizable <del> end <del> company.memoize :name <del> assert_raise(RuntimeError) { company.memoize :name } <del> end <del> <del> def test_protected_method_memoization <del> person = Person.new <del> <del> assert_raise(NoMethodError) { person.memoize_protected_test } <del> assert_equal "protected", person.send(:memoize_protected_test) <del> end <del> <del> def test_private_method_memoization <del> person = Person.new <del> <del> assert_raise(NoMethodError) { person.is_developer? } <del> assert_equal "Yes", person.send(:is_developer?) <del> assert_equal 1, person.is_developer_calls <del> assert_equal "Yes", person.send(:is_developer?) <del> assert_equal 1, person.is_developer_calls <del> end <del> <del>end <ide><path>activesupport/test/message_encryptor_test.rb <ide> def test_alternative_serialization_method <ide> assert_equal encryptor.decrypt_and_verify(message), { "foo" => 123, "bar" => "2010-01-01T00:00:00Z" } <ide> end <ide> <del> def test_digest_algorithm_as_second_parameter_deprecation <del> assert_deprecated(/options hash/) do <del> ActiveSupport::MessageEncryptor.new(SecureRandom.hex(64), 'aes-256-cbc') <del> end <del> end <del> <ide> private <ide> <ide> def assert_not_decrypted(value) <ide><path>activesupport/test/message_verifier_test.rb <ide> def test_alternative_serialization_method <ide> assert_equal verifier.verify(message), { "foo" => 123, "bar" => "2010-01-01T00:00:00Z" } <ide> end <ide> <del> def test_digest_algorithm_as_second_parameter_deprecation <del> assert_deprecated(/options hash/) do <del> ActiveSupport::MessageVerifier.new("secret", "SHA1") <del> end <del> end <del> <ide> def assert_not_verified(message) <ide> assert_raise(ActiveSupport::MessageVerifier::InvalidSignature) do <ide> @verifier.verify(message)
9
PHP
PHP
fix query strings handling
1fdc127aeda134b175e115142cdccd4d9d6611a4
<ide><path>src/Illuminate/Routing/UrlGenerator.php <ide> public function to($path, $extra = [], $secure = null) <ide> // for passing the array of parameters to this URL as a list of segments. <ide> $root = $this->getRootUrl($scheme); <ide> <del> return $this->trimUrl($root, $path, $tail); <add> if (($queryStart = strpos($path, '?')) !== false) { <add> $query = mb_substr($path, $queryStart); <add> $path = mb_substr($path, 0, $queryStart); <add> } else { <add> $query = null; <add> } <add> <add> return $this->trimUrl($root, $path, $tail).$query; <ide> } <ide> <ide> /** <ide><path>tests/Routing/RoutingUrlGeneratorTest.php <ide> public function testBasicGeneration() <ide> $this->assertEquals('http://www.foo.com/foo/bar', $url->to('foo/bar')); <ide> $this->assertEquals('https://www.foo.com/foo/bar', $url->to('foo/bar', [], true)); <ide> $this->assertEquals('https://www.foo.com/foo/bar/baz/boom', $url->to('foo/bar', ['baz', 'boom'], true)); <add> $this->assertEquals('https://www.foo.com/foo/bar/baz?foo=bar', $url->to('foo/bar?foo=bar', ['baz'], true)); <ide> <ide> /* <ide> * Test HTTPS request URL generation...
2
Text
Text
add missing article in session ticket section
b98386c977d0082f3218afe5a746202d40b24d9d
<ide><path>doc/api/tls.md <ide> handlers. <ide> <ide> The servers encrypt the entire session state and send it <ide> to the client as a "ticket". When reconnecting, the state is sent to the server <del>in the initial connection. This mechanism avoids the need for server-side <add>in the initial connection. This mechanism avoids the need for a server-side <ide> session cache. If the server doesn't use the ticket, for any reason (failure <ide> to decrypt it, it's too old, etc.), it will create a new session and send a new <ide> ticket. See [RFC 5077][] for more information.
1
PHP
PHP
add integration test for issue in
dd063dfcbd3e5df8224b91febd1d213909f2f37e
<ide><path>tests/TestCase/Routing/DispatcherFactoryTest.php <ide> */ <ide> namespace Cake\Test\TestCase\Routing; <ide> <add>use Cake\Core\Configure; <add>use Cake\Network\Request; <ide> use Cake\Routing\DispatcherFactory; <ide> use Cake\TestSuite\TestCase; <ide> <ide> class DispatcherFactoryTest extends TestCase <ide> public function setUp() <ide> { <ide> parent::setUp(); <add> Configure::write('App.namespace', 'TestApp'); <ide> DispatcherFactory::clear(); <ide> } <ide> <ide> public function testCreate() <ide> $this->assertInstanceOf('Cake\Routing\Dispatcher', $result); <ide> $this->assertCount(1, $result->filters()); <ide> } <add> <add> /** <add> * test create() -> dispatch() -> response flow. <add> * <add> * @return void <add> */ <add> public function testCreateDispatchWithFilters() <add> { <add> $url = new Request([ <add> 'url' => 'posts', <add> 'params' => [ <add> 'controller' => 'Posts', <add> 'action' => 'index', <add> 'pass' => [], <add> 'bare' => true, <add> ] <add> ]); <add> $response = $this->getMockBuilder('Cake\Network\Response') <add> ->setMethods(['send']) <add> ->getMock(); <add> DispatcherFactory::add('ControllerFactory'); <add> DispatcherFactory::add('Append'); <add> <add> $dispatcher = DispatcherFactory::create(); <add> $result = $dispatcher->dispatch($url, $response); <add> $this->assertNull($result); <add> $this->assertEquals('posts index appended content', $response->body()); <add> } <ide> }
1
Ruby
Ruby
require string extension before exceptions
8818e008e360a6b6e94a1b470218efa951874e15
<ide><path>Library/Homebrew/test/testing_env.rb <ide> <ide> $:.push(File.expand_path(__FILE__+'/../..')) <ide> require 'extend/pathname' <add>require 'extend/string' <ide> require 'exceptions' <ide> require 'utils' <del>require 'extend/string' <ide> <ide> # these are defined in global.rb, but we don't want to break our actual <ide> # homebrew tree, and we do want to test everything :)
1
Text
Text
add whitespace check to else-if test
5418307936c3bc23c86adb8149769faba5b2ec51
<ide><path>curriculum/challenges/english/02-javascript-algorithms-and-data-structures/basic-javascript/introducing-else-if-statements.english.md <ide> tests: <ide> - text: You should have at least two <code>if</code> statements <ide> testString: assert(code.match(/if/g).length > 1); <ide> - text: You should have closing and opening curly braces for each <code>if else</code> code block. <del> testString: assert(code.match(/if\s*\((.+)\)\s*\{[\s\S]+\}\s*else if\s*\((.+)\)\s*\{[\s\S]+\}\s*else\s*\{[\s\S]+\s*\}/)); <add> testString: assert(code.match(/if\s*\((.+)\)\s*\{[\s\S]+\}\s*else\s+if\s*\((.+)\)\s*\{[\s\S]+\}\s*else\s*\{[\s\S]+\s*\}/)); <ide> - text: <code>testElseIf(0)</code> should return "Smaller than 5" <ide> testString: assert(testElseIf(0) === "Smaller than 5"); <ide> - text: <code>testElseIf(5)</code> should return "Between 5 and 10"
1
PHP
PHP
correct doc blocks
7132f47fafc7f4c03e2227256c70930d2ea789ff
<ide><path>src/Database/Expression/QueryExpression.php <ide> class QueryExpression implements ExpressionInterface, Countable <ide> * expression objects. Optionally, you can set the conjunction keyword to be used <ide> * for joining each part of this level of the expression tree. <ide> * <del> * @param array $conditions tree-like array structure containing all the conditions <add> * @param string|array|QueryExpression $conditions tree-like array structure containing all the conditions <ide> * to be added or nested inside this expression object. <ide> * @param array|\Cake\Database\TypeMap $types associative array of types to be associated with the values <ide> * passed in $conditions. <ide> public function add($conditions, $types = []) <ide> /** <ide> * Adds a new condition to the expression object in the form "field = value". <ide> * <del> * @param string $field database field to be compared against value <add> * @param string $field Database field to be compared against value <ide> * @param mixed $value The value to be bound to $field for comparison <ide> * @param string $type the type name for $value as configured using the Type map. <ide> * If it is suffixed with "[]" and the value is an array then multiple placeholders <ide> public function eq($field, $value, $type = null) <ide> /** <ide> * Adds a new condition to the expression object in the form "field != value". <ide> * <del> * @param string $field database field to be compared against value <add> * @param string $field Database field to be compared against value <ide> * @param mixed $value The value to be bound to $field for comparison <ide> * @param string $type the type name for $value as configured using the Type map. <ide> * If it is suffixed with "[]" and the value is an array then multiple placeholders <ide> public function notEq($field, $value, $type = null) <ide> /** <ide> * Adds a new condition to the expression object in the form "field > value". <ide> * <del> * @param string $field database field to be compared against value <add> * @param string $field Database field to be compared against value <ide> * @param mixed $value The value to be bound to $field for comparison <ide> * @param string $type the type name for $value as configured using the Type map. <ide> * @return $this <ide> public function gt($field, $value, $type = null) <ide> /** <ide> * Adds a new condition to the expression object in the form "field < value". <ide> * <del> * @param string $field database field to be compared against value <add> * @param string $field Database field to be compared against value <ide> * @param mixed $value The value to be bound to $field for comparison <ide> * @param string $type the type name for $value as configured using the Type map. <ide> * @return $this <ide> public function lt($field, $value, $type = null) <ide> /** <ide> * Adds a new condition to the expression object in the form "field >= value". <ide> * <del> * @param string $field database field to be compared against value <add> * @param string $field Database field to be compared against value <ide> * @param mixed $value The value to be bound to $field for comparison <ide> * @param string $type the type name for $value as configured using the Type map. <ide> * @return $this <ide> public function gte($field, $value, $type = null) <ide> /** <ide> * Adds a new condition to the expression object in the form "field <= value". <ide> * <del> * @param string $field database field to be compared against value <add> * @param string $field Database field to be compared against value <ide> * @param mixed $value The value to be bound to $field for comparison <ide> * @param string $type the type name for $value as configured using the Type map. <ide> * @return $this <ide> public function isNotNull($field) <ide> /** <ide> * Adds a new condition to the expression object in the form "field LIKE value". <ide> * <del> * @param string $field database field to be compared against value <add> * @param string $field Database field to be compared against value <ide> * @param mixed $value The value to be bound to $field for comparison <ide> * @param string $type the type name for $value as configured using the Type map. <ide> * @return $this <ide> public function like($field, $value, $type = null) <ide> /** <ide> * Adds a new condition to the expression object in the form "field NOT LIKE value". <ide> * <del> * @param string $field database field to be compared against value <add> * @param string $field Database field to be compared against value <ide> * @param mixed $value The value to be bound to $field for comparison <ide> * @param string $type the type name for $value as configured using the Type map. <ide> * @return $this <ide> public function notLike($field, $value, $type = null) <ide> * Adds a new condition to the expression object in the form <ide> * "field IN (value1, value2)". <ide> * <del> * @param string $field database field to be compared against value <del> * @param array $values the value to be bound to $field for comparison <add> * @param string $field Database field to be compared against value <add> * @param string|array $values the value to be bound to $field for comparison <ide> * @param string $type the type name for $value as configured using the Type map. <ide> * @return $this <ide> */ <ide> public function addCase($conditions, $values = [], $types = []) <ide> * Adds a new condition to the expression object in the form <ide> * "field NOT IN (value1, value2)". <ide> * <del> * @param string $field database field to be compared against value <add> * @param string $field Database field to be compared against value <ide> * @param array $values the value to be bound to $field for comparison <ide> * @param string $type the type name for $value as configured using the Type map. <ide> * @return $this <ide> protected function _parseCondition($field, $value) <ide> * Returns an array of placeholders that will have a bound value corresponding <ide> * to each value in the first argument. <ide> * <del> * @param string $field database field to be used to bind values <add> * @param string $field Database field to be used to bind values <ide> * @param array $values The values to bind <ide> * @param string $type the type to be used to bind the values <ide> * @return array
1
Python
Python
restore use of batch norm in model
a4633fff6fd7f5c84b41c24178c5d8b22aa09724
<ide><path>spacy/_ml.py <ide> def Tok2Vec(width, embed_size, preprocess=None): <ide> >> uniqued(embed, column=5) <ide> >> drop_layer( <ide> Residual( <del> (ExtractWindow(nW=1) >> ReLu(width, width*3)) <add> (ExtractWindow(nW=1) >> BN(Maxout(width, width*3))) <ide> ) <ide> ) ** 4, pad=4 <ide> )
1
Text
Text
fix reference to workerdata in worker_threads
31d5bdea70e44802918d6f4aa7c378bc1992be54
<ide><path>doc/api/worker_threads.md <ide> if (isMainThread) { <ide> * `options` {Object} <ide> * `eval` {boolean} If true, interpret the first argument to the constructor <ide> as a script that is executed once the worker is online. <del> * `data` {any} Any JavaScript value that will be cloned and made <add> * `workerData` {any} Any JavaScript value that will be cloned and made <ide> available as [`require('worker_threads').workerData`][]. The cloning will <ide> occur as described in the [HTML structured clone algorithm][], and an error <ide> will be thrown if the object cannot be cloned (e.g. because it contains
1
Go
Go
fix missing container runtime on upgrade
d7ceda4e375250e6868854cca96c49a369503745
<ide><path>daemon/start_linux.go <ide> import ( <ide> <ide> "github.com/docker/docker/container" <ide> "github.com/docker/docker/libcontainerd" <add> "github.com/docker/engine-api/types" <ide> ) <ide> <ide> func (daemon *Daemon) getLibcontainerdCreateOptions(container *container.Container) (*[]libcontainerd.CreateOption, error) { <ide> createOptions := []libcontainerd.CreateOption{} <ide> <add> // Ensure a runtime has been assigned to this container <add> if container.HostConfig.Runtime == "" { <add> container.HostConfig.Runtime = types.DefaultRuntimeName <add> container.ToDisk() <add> } <add> <ide> rt := daemon.configStore.GetRuntime(container.HostConfig.Runtime) <ide> if rt == nil { <del> return nil, fmt.Errorf("No such runtime '%s'", container.HostConfig.Runtime) <add> return nil, fmt.Errorf("no such runtime '%s'", container.HostConfig.Runtime) <ide> } <ide> createOptions = append(createOptions, libcontainerd.WithRuntime(rt.Path, rt.Args)) <ide>
1
Python
Python
remove context copying from run_async function
cb13128cf03b2f1b3c5b48eab518e57e7b5f6516
<ide><path>src/flask/helpers.py <ide> def run_async(func: t.Callable[..., t.Coroutine]) -> t.Callable[..., t.Any]: <ide> ) <ide> <ide> @wraps(func) <del> def outer(*args: t.Any, **kwargs: t.Any) -> t.Any: <del> """This function grabs the current context for the inner function. <del> <del> This is similar to the copy_current_xxx_context functions in the <del> ctx module, except it has an async inner. <del> """ <del> ctx = None <del> <del> if _request_ctx_stack.top is not None: <del> ctx = _request_ctx_stack.top.copy() <del> <del> @wraps(func) <del> async def inner(*a: t.Any, **k: t.Any) -> t.Any: <del> """This restores the context before awaiting the func. <del> <del> This is required as the function must be awaited within the <del> context. Only calling ``func`` (as per the <del> ``copy_current_xxx_context`` functions) doesn't work as the <del> with block will close before the coroutine is awaited. <del> """ <del> if ctx is not None: <del> with ctx: <del> return await func(*a, **k) <del> else: <del> return await func(*a, **k) <del> <del> return async_to_sync(inner)(*args, **kwargs) <add> def wrapper(*args: t.Any, **kwargs: t.Any) -> t.Any: <add> return async_to_sync(func)(*args, **kwargs) <ide> <del> outer._flask_sync_wrapper = True # type: ignore <del> return outer <add> wrapper._flask_sync_wrapper = True # type: ignore <add> return wrapper
1
Java
Java
hide buffer/blocking subscribeon behavior
1cac6fc588b61416db4e59930019fe3411653391
<ide><path>rxjava-core/src/main/java/rx/Observable.java <ide> public final Subscription subscribe(Subscriber<? super T> observer, Scheduler sc <ide> * @see #subscribeOn(rx.Scheduler, int) <ide> */ <ide> public final Observable<T> subscribeOn(Scheduler scheduler) { <del> return nest().lift(new OperatorSubscribeOn<T>(scheduler, false)); <add> return nest().lift(new OperatorSubscribeOn<T>(scheduler)); <ide> } <ide> <del> /** <del> * Asynchronously subscribes and unsubscribes Observers to this Observable on the specified {@link Scheduler} <del> * and allows buffering some events emitted from the source in the time gap between the original and <del> * actual subscription, and any excess events will block the source until the actual subscription happens. <del> * <p> <del> * This overload should help mitigate issues when subscribing to a PublishSubject (and derivatives <del> * such as GroupedObservable in operator groupBy) and events fired between the original and actual subscriptions <del> * are lost. <del> * <p> <del> * <img width="640" src="https://raw.github.com/wiki/Netflix/RxJava/images/rx-operators/subscribeOn.png"> <del> * <del> * @param scheduler <del> * the {@link Scheduler} to perform subscription and unsubscription actions on <del> * @param bufferSize the number of events to buffer before blocking the source while in the time gap, <del> * negative value indicates an unlimited buffer <del> * @return the source Observable modified so that its subscriptions and unsubscriptions happen <del> * on the specified {@link Scheduler} <del> * @see <a href="https://github.com/Netflix/RxJava/wiki/Observable-Utility-Operators#wiki-subscribeon">RxJava Wiki: subscribeOn()</a> <del> */ <del> public final Observable<T> subscribeOn(Scheduler scheduler, int bufferSize) { <del> return nest().lift(new OperatorSubscribeOn<T>(scheduler, true, bufferSize)); <del> } <del> <ide> /** <ide> * Returns an Observable that extracts a Double from each of the items emitted by the source <ide> * Observable via a function you specify, and then emits the sum of these Doubles. <ide><path>rxjava-core/src/main/java/rx/operators/OperatorSubscribeOn.java <ide> import rx.Scheduler; <ide> import rx.Scheduler.Inner; <ide> import rx.Subscriber; <del>import rx.observables.GroupedObservable; <del>import rx.subjects.PublishSubject; <ide> import rx.util.functions.Action1; <ide> <ide> /** <ide> /** The buffer size to avoid flooding. Negative value indicates an unbounded buffer. */ <ide> private final int bufferSize; <ide> <del> public OperatorSubscribeOn(Scheduler scheduler, boolean dontLoseEvents) { <del> this(scheduler, dontLoseEvents, -1); <add> public OperatorSubscribeOn(Scheduler scheduler) { <add> this.scheduler = scheduler; <add> this.dontLoseEvents = false; <add> this.bufferSize = -1; <ide> } <ide> <ide> /** <ide> * Construct a SubscribeOn operator. <ide> * <ide> * @param scheduler <ide> * the target scheduler <del> * @param dontLoseEvents <del> * indicate that events should be buffered until the actual subscription happens <ide> * @param bufferSize <ide> * if dontLoseEvents == true, this indicates the buffer size. Filling the buffer will <ide> * block the source. -1 indicates an unbounded buffer <ide> */ <del> public OperatorSubscribeOn(Scheduler scheduler, boolean dontLoseEvents, int bufferSize) { <add> public OperatorSubscribeOn(Scheduler scheduler, int bufferSize) { <ide> this.scheduler = scheduler; <del> this.dontLoseEvents = dontLoseEvents; <add> this.dontLoseEvents = true; <ide> this.bufferSize = bufferSize; <ide> } <ide> <ide> public void onError(Throwable e) { <ide> } <ide> <ide> boolean checkNeedBuffer(Observable<?> o) { <del> /* <del> * Included are some Observable types known to be "hot" and thus needing <del> * buffering when subscribing across thread boundaries otherwise <del> * we can lose data. <del> * <del> * See https://github.com/Netflix/RxJava/issues/844 for more information. <del> */ <del> return dontLoseEvents <del> || ((o instanceof GroupedObservable<?, ?>) <del> || (o instanceof PublishSubject<?>) <del> // || (o instanceof BehaviorSubject<?, ?>) <del> ); <add> return dontLoseEvents; <ide> } <ide> <ide> @Override <ide><path>rxjava-core/src/test/java/rx/operators/OperatorGroupByTest.java <ide> public void call() { <ide> <ide> }); <ide> } else { <del> return group.subscribeOn(Schedulers.newThread(), 1).delay(400, TimeUnit.MILLISECONDS).map(new Func1<Integer, String>() { <add> return group.nest().lift(new OperatorSubscribeOn<Integer>(Schedulers.newThread(), 1)).delay(400, TimeUnit.MILLISECONDS).map(new Func1<Integer, String>() { <ide> <ide> @Override <ide> public String call(Integer t1) { <ide> public Integer call(Integer t) { <ide> <ide> @Override <ide> public Observable<String> call(final GroupedObservable<Integer, Integer> group) { <del> return group.subscribeOn(Schedulers.newThread(), 0).map(new Func1<Integer, String>() { <add> return group.nest().lift(new OperatorSubscribeOn<Integer>(Schedulers.newThread(), 0)).map(new Func1<Integer, String>() { <ide> <ide> @Override <ide> public String call(Integer t1) { <ide><path>rxjava-core/src/test/java/rx/operators/OperatorSubscribeOnTest.java <ide> public Subscription schedule(final Action1<Scheduler.Inner> action, final long d <ide> public void testSubscribeOnPublishSubjectWithSlowScheduler() { <ide> PublishSubject<Integer> ps = PublishSubject.create(); <ide> TestSubscriber<Integer> ts = new TestSubscriber<Integer>(); <del> ps.subscribeOn(new SlowScheduler()).subscribe(ts); <add> ps.nest().lift(new OperatorSubscribeOn<Integer>(new SlowScheduler(), 0)).subscribe(ts); <ide> ps.onNext(1); <ide> ps.onNext(2); <ide> ps.onCompleted(); <ide> public Integer call(Integer t) { <ide> <ide> @Override <ide> public Observable<String> call(final GroupedObservable<Integer, Integer> group) { <del> return group.subscribeOn(Schedulers.newThread()).map(new Func1<Integer, String>() { <add> return group.nest().lift(new OperatorSubscribeOn<Integer>(Schedulers.newThread(), 0)).map(new Func1<Integer, String>() { <ide> <ide> @Override <ide> public String call(Integer t1) { <ide> void testBoundedBufferingWithSize(int size) throws Exception { <ide> <ide> final List<Long> deltas = Collections.synchronizedList(new ArrayList<Long>()); <ide> <del> Subscription s = timer.timestamp().subscribeOn( <del> new SlowScheduler(Schedulers.computation(), 1, TimeUnit.SECONDS), size).map(new Func1<Timestamped<Long>, Long>() { <add> Subscription s = timer.timestamp().nest().lift(new OperatorSubscribeOn<Timestamped<Long>>( <add> new SlowScheduler(Schedulers.computation(), 1, TimeUnit.SECONDS), size)).map(new Func1<Timestamped<Long>, Long>() { <ide> @Override <ide> public Long call(Timestamped<Long> t1) { <ide> long v = System.currentTimeMillis() - t1.getTimestampMillis();
4
Python
Python
add clip value as in neural turing machines
69afdd7ec40f23b056bafa73a82535a41e29e764
<ide><path>keras/optimizers.py <ide> def clip_norm(g, c, n): <ide> return g <ide> <ide> <add>def clip_value(g, c): <add> if c > 0: <add> g = T.switch(T.ge(g, c), c, g) <add> return g <add> <add> <ide> def kl_divergence(p, p_hat): <ide> return p_hat - p + p * T.log(p / p_hat) <ide> <ide> def get_gradients(self, loss, params): <ide> norm = T.sqrt(sum([T.sum(g ** 2) for g in grads])) <ide> grads = [clip_norm(g, self.clipnorm, norm) for g in grads] <ide> <add> if hasattr(self, 'clipvalue') and self.clipvalue > 0: <add> grads = [clip_value(g, self.clipvalue) for g in grads] <add> <ide> return grads <ide> <ide> def get_config(self):
1
Go
Go
add _llseek syscall
923609179b18fb5fc9d4ad7820646af7e09786a2
<ide><path>daemon/execdriver/native/seccomp_default.go <ide> var defaultSeccompProfile = &configs.Seccomp{ <ide> Action: configs.Allow, <ide> Args: []*configs.Arg{}, <ide> }, <add> { <add> Name: "_llseek", <add> Action: configs.Allow, <add> Args: []*configs.Arg{}, <add> }, <ide> { <ide> Name: "lremovexattr", <ide> Action: configs.Allow,
1
Ruby
Ruby
add nodoc to hashmerger and merger
28e534136f1d4a496eb39d752896603f6ef5d6ff
<ide><path>activerecord/lib/active_record/relation/merger.rb <ide> <ide> module ActiveRecord <ide> class Relation <del> class HashMerger <add> class HashMerger # :nodoc: <ide> attr_reader :relation, :hash <ide> <ide> def initialize(relation, hash) <ide> def other <ide> end <ide> end <ide> <del> class Merger <add> class Merger # :nodoc: <ide> attr_reader :relation, :values <ide> <ide> def initialize(relation, other)
1
Go
Go
remove unused containerlistoptions.quiet field
4d3f64da6348c20f57efe01ca483b130bcf5dd9b
<ide><path>api/types/client.go <ide> type ContainerExecInspect struct { <ide> <ide> // ContainerListOptions holds parameters to list containers with. <ide> type ContainerListOptions struct { <del> Quiet bool <ide> Size bool <ide> All bool <ide> Latest bool <ide><path>testutil/environment/clean.go <ide> func getPausedContainers(ctx context.Context, t testing.TB, client client.Contai <ide> filter.Add("status", "paused") <ide> containers, err := client.ContainerList(ctx, types.ContainerListOptions{ <ide> Filters: filter, <del> Quiet: true, <ide> All: true, <ide> }) <ide> assert.Check(t, err, "failed to list containers") <ide> func deleteAllContainers(t testing.TB, apiclient client.ContainerAPIClient, prot <ide> func getAllContainers(ctx context.Context, t testing.TB, client client.ContainerAPIClient) []types.Container { <ide> t.Helper() <ide> containers, err := client.ContainerList(ctx, types.ContainerListOptions{ <del> Quiet: true, <del> All: true, <add> All: true, <ide> }) <ide> assert.Check(t, err, "failed to list containers") <ide> return containers
2
Ruby
Ruby
fix migration version in doc of #up_only
46a2f93614ccf0d1628e6fc3c4666cee476d17c8
<ide><path>activerecord/lib/active_record/migration.rb <ide> def reversible <ide> # In the following example, the new column `published` will be given <ide> # the value `true` for all existing records. <ide> # <del> # class AddPublishedToPosts < ActiveRecord::Migration[5.3] <add> # class AddPublishedToPosts < ActiveRecord::Migration[5.2] <ide> # def change <ide> # add_column :posts, :published, :boolean, default: false <ide> # up_only do
1
Python
Python
make schema in dbapihook private
04b6559f8a06363a24e70f6638df59afe43ea163
<ide><path>airflow/hooks/dbapi.py <ide> def connect(self, host: str, port: int, username: str, schema: str) -> Any: <ide> # # <ide> ######################################################################################### <ide> class DbApiHook(BaseHook): <del> """Abstract base class for sql hooks.""" <add> """ <add> Abstract base class for sql hooks. <add> <add> :param schema: Optional DB schema that overrides the schema specified in the connection. Make sure that <add> if you change the schema parameter value in the constructor of the derived Hook, such change <add> should be done before calling the ``DBApiHook.__init__()``. <add> :type schema: Optional[str] <add> """ <ide> <ide> # Override to provide the connection name. <ide> conn_name_attr = None # type: str <ide> class DbApiHook(BaseHook): <ide> # Override with the object that exposes the connect method <ide> connector = None # type: Optional[ConnectorProtocol] <ide> <del> def __init__(self, *args, **kwargs): <add> def __init__(self, *args, schema: Optional[str] = None, **kwargs): <ide> super().__init__() <ide> if not self.conn_name_attr: <ide> raise AirflowException("conn_name_attr is not defined") <ide> def __init__(self, *args, **kwargs): <ide> setattr(self, self.conn_name_attr, self.default_conn_name) <ide> else: <ide> setattr(self, self.conn_name_attr, kwargs[self.conn_name_attr]) <del> self.schema: Optional[str] = kwargs.pop("schema", None) <add> # We should not make schema available in deriving hooks for backwards compatibility <add> # If a hook deriving from DBApiHook has a need to access schema, then it should retrieve it <add> # from kwargs and store it on its own. We do not run "pop" here as we want to give the <add> # Hook deriving from the DBApiHook to still have access to the field in it's constructor <add> self.__schema = schema <ide> <ide> def get_conn(self): <ide> """Returns a connection object""" <ide> def get_uri(self) -> str: <ide> host = conn.host <ide> if conn.port is not None: <ide> host += f':{conn.port}' <del> schema = self.schema or conn.schema or '' <add> schema = self.__schema or conn.schema or '' <ide> return urlunsplit((conn.conn_type, f'{login}{host}', schema, '', '')) <ide> <ide> def get_sqlalchemy_engine(self, engine_kwargs=None): <ide><path>airflow/providers/postgres/hooks/postgres.py <ide> def __init__(self, *args, **kwargs) -> None: <ide> super().__init__(*args, **kwargs) <ide> self.connection: Optional[Connection] = kwargs.pop("connection", None) <ide> self.conn: connection = None <add> self.schema: Optional[str] = kwargs.pop("schema", None) <ide> <ide> def _get_cursor(self, raw_cursor: str) -> CursorType: <ide> _cursor = raw_cursor.lower() <ide><path>tests/hooks/test_dbapi.py <ide> def get_conn(self): <ide> return conn <ide> <ide> self.db_hook = UnitTestDbApiHook() <add> self.db_hook_schema_override = UnitTestDbApiHook(schema='schema-override') <ide> <ide> def test_get_records(self): <ide> statement = "SQL" <ide> def test_get_uri_schema_not_none(self): <ide> assert "conn_type://login:password@host:1/schema" == self.db_hook.get_uri() <ide> <ide> def test_get_uri_schema_override(self): <del> self.db_hook.get_connection = mock.MagicMock( <add> self.db_hook_schema_override.get_connection = mock.MagicMock( <ide> return_value=Connection( <ide> conn_type="conn_type", <ide> host="host", <ide> def test_get_uri_schema_override(self): <ide> port=1, <ide> ) <ide> ) <del> self.db_hook.schema = 'schema-override' <del> assert "conn_type://login:password@host:1/schema-override" == self.db_hook.get_uri() <add> assert "conn_type://login:password@host:1/schema-override" == self.db_hook_schema_override.get_uri() <ide> <ide> def test_get_uri_schema_none(self): <ide> self.db_hook.get_connection = mock.MagicMock(
3
Java
Java
expose principal in serverrequest
ba39697f2e568b27a97b2bf195b6b8c99ae1ce95
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/DefaultServerRequest.java <ide> import java.net.InetSocketAddress; <ide> import java.net.URI; <ide> import java.nio.charset.Charset; <add>import java.security.Principal; <ide> import java.util.Collections; <ide> import java.util.List; <ide> import java.util.Locale; <ide> public Mono<WebSession> session() { <ide> return this.exchange.getSession(); <ide> } <ide> <add> @Override <add> public Mono<? extends Principal> principal() { <add> return this.exchange.getPrincipal(); <add> } <add> <ide> private ServerHttpRequest request() { <ide> return this.exchange.getRequest(); <ide> } <ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/RequestPredicates.java <ide> package org.springframework.web.reactive.function.server; <ide> <ide> import java.net.URI; <add>import java.security.Principal; <ide> import java.util.Arrays; <ide> import java.util.Collections; <ide> import java.util.HashSet; <ide> public Mono<WebSession> session() { <ide> return this.request.session(); <ide> } <ide> <add> @Override <add> public Mono<? extends Principal> principal() { <add> return this.request.principal(); <add> } <add> <ide> @Override <ide> public String toString() { <ide> return method() + " " + path(); <ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/ServerRequest.java <ide> import java.net.InetSocketAddress; <ide> import java.net.URI; <ide> import java.nio.charset.Charset; <add>import java.security.Principal; <ide> import java.util.List; <ide> import java.util.Locale; <ide> import java.util.Map; <ide> default String pathVariable(String name) { <ide> */ <ide> Mono<WebSession> session(); <ide> <add> /** <add> * Return the authenticated user for the request, if any. <add> */ <add> Mono<? extends Principal> principal(); <add> <ide> <ide> /** <ide> * Represents the headers of the HTTP request. <ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/function/server/support/ServerRequestWrapper.java <ide> import java.net.InetSocketAddress; <ide> import java.net.URI; <ide> import java.nio.charset.Charset; <add>import java.security.Principal; <ide> import java.util.List; <ide> import java.util.Locale; <ide> import java.util.Map; <ide> public Mono<WebSession> session() { <ide> return this.delegate.session(); <ide> } <ide> <add> @Override <add> public Mono<? extends Principal> principal() { <add> return this.delegate.principal(); <add> } <ide> <ide> /** <ide> * Implementation of the {@code Headers} interface that can be subclassed <ide><path>spring-webflux/src/test/java/org/springframework/web/reactive/function/server/MockServerRequest.java <ide> import java.net.InetSocketAddress; <ide> import java.net.URI; <ide> import java.nio.charset.Charset; <add>import java.security.Principal; <ide> import java.time.Instant; <ide> import java.time.ZoneId; <ide> import java.time.ZonedDateTime; <ide> public class MockServerRequest implements ServerRequest { <ide> <ide> private final WebSession session; <ide> <add> private Principal principal; <ide> <ide> private MockServerRequest(HttpMethod method, URI uri, <ide> MockHeaders headers, Object body, Map<String, Object> attributes, <ide> MultiValueMap<String, String> queryParams, <del> Map<String, String> pathVariables, WebSession session) { <add> Map<String, String> pathVariables, WebSession session, Principal principal) { <ide> <ide> this.method = method; <ide> this.uri = uri; <ide> private MockServerRequest(HttpMethod method, URI uri, <ide> this.queryParams = queryParams; <ide> this.pathVariables = pathVariables; <ide> this.session = session; <add> this.principal = principal; <ide> } <ide> <ide> <ide> public Mono<WebSession> session() { <ide> return Mono.justOrEmpty(this.session); <ide> } <ide> <add> @Override <add> public Mono<? extends Principal> principal() { <add> return Mono.justOrEmpty(this.principal); <add> } <ide> <ide> public static Builder builder() { <ide> return new BuilderImpl(); <ide> public interface Builder { <ide> <ide> Builder session(WebSession session); <ide> <add> Builder session(Principal principal); <add> <ide> MockServerRequest body(Object body); <ide> <ide> MockServerRequest build(); <ide> private static class BuilderImpl implements Builder { <ide> <ide> private WebSession session; <ide> <add> private Principal principal; <add> <ide> @Override <ide> public Builder method(HttpMethod method) { <ide> Assert.notNull(method, "'method' must not be null"); <ide> public Builder session(WebSession session) { <ide> return this; <ide> } <ide> <add> @Override <add> public Builder session(Principal principal) { <add> Assert.notNull(principal, "'principal' must not be null"); <add> this.principal = principal; <add> return this; <add> } <add> <ide> @Override <ide> public MockServerRequest body(Object body) { <ide> this.body = body; <ide> return new MockServerRequest(this.method, this.uri, this.headers, this.body, <del> this.attributes, this.queryParams, this.pathVariables, this.session); <add> this.attributes, this.queryParams, this.pathVariables, this.session, <add> this.principal); <ide> } <ide> <ide> @Override <ide> public MockServerRequest build() { <ide> return new MockServerRequest(this.method, this.uri, this.headers, null, <del> this.attributes, this.queryParams, this.pathVariables, this.session); <add> this.attributes, this.queryParams, this.pathVariables, this.session, <add> this.principal); <ide> } <ide> } <ide>
5
Ruby
Ruby
skip directory in env_script_all_files
328fa80f413af936fb1612156208d27c42dd4697
<ide><path>Library/Homebrew/extend/pathname.rb <ide> def write_env_script target, env <ide> def env_script_all_files dst, env <ide> dst.mkpath <ide> Pathname.glob("#{self}/*") do |file| <add> next if file.directory? <ide> dst.install_p file <ide> new_file = dst+file.basename <ide> file.write_env_script(new_file, env)
1
Javascript
Javascript
use native promise in local-cli
812591ac422bcec0b2aaaa7169d3ba0d55e39acf
<ide><path>local-cli/bundle/buildBundle.js <ide> 'use strict'; <ide> <ide> const log = require('../util/log').out('bundle'); <del>const Promise = require('promise'); <ide> const Server = require('../../packager/react-packager/src/Server'); <ide> <ide> const outputBundle = require('./output/bundle'); <ide><path>local-cli/bundle/output/bundle.js <ide> */ <ide> 'use strict'; <ide> <del>const Promise = require('promise'); <del> <ide> const meta = require('./meta'); <ide> const writeFile = require('./writeFile'); <ide> <ide><path>local-cli/bundle/output/unbundle/as-assets.js <ide> 'use strict'; <ide> <ide> const MAGIC_UNBUNDLE_NUMBER = require('./magic-number'); <del>const Promise = require('promise'); <ide> <ide> const buildSourceMapWithMetaData = require('./build-unbundle-sourcemap-with-metadata'); <ide> const mkdirp = require('mkdirp'); <ide><path>local-cli/bundle/output/unbundle/as-indexed-file.js <ide> 'use strict'; <ide> <ide> const MAGIC_UNBUNDLE_FILE_HEADER = require('./magic-number'); <del>const Promise = require('promise'); <ide> <ide> const buildSourceMapWithMetaData = require('./build-unbundle-sourcemap-with-metadata'); <ide> const fs = require('fs'); <ide><path>local-cli/bundle/output/unbundle/write-sourcemap.js <ide> */ <ide> 'use strict'; <ide> <del>const Promise = require('promise'); <del> <ide> const writeFile = require('../writeFile'); <ide> <ide> function writeSourcemap(fileName, contents, log) { <ide><path>local-cli/bundle/output/writeFile.js <ide> */ <ide> 'use strict'; <ide> <del>const Promise = require('promise'); <del> <ide> const fs = require('fs'); <ide> <ide> function writeFile(file, data, encoding) { <ide><path>local-cli/cliEntry.js <ide> 'use strict'; <ide> <ide> const Config = require('./util/Config'); <del>const Promise = require('promise'); <ide> <ide> const assertRequiredOptions = require('./util/assertRequiredOptions'); <ide> const chalk = require('chalk'); <ide><path>local-cli/library/library.js <ide> const copyAndReplace = require('../util/copyAndReplace'); <ide> const fs = require('fs'); <ide> const isValidPackageName = require('../util/isValidPackageName'); <ide> const path = require('path'); <del>const Promise = require('promise'); <ide> const walk = require('../util/walk'); <ide> <ide> /** <ide><path>local-cli/logAndroid/logAndroid.js <ide> <ide> const chalk = require('chalk'); <ide> const child_process = require('child_process'); <del>const Promise = require('promise'); <ide> <ide> /** <ide> * Starts adb logcat <ide><path>local-cli/logIOS/logIOS.js <add>/** <add> * Copyright (c) 2015-present, Facebook, Inc. <add> * All rights reserved. <add> * <add> * This source code is licensed under the BSD-style license found in the <add> * LICENSE file in the root directory of this source tree. An additional grant <add> * of patent rights can be found in the PATENTS file in the same directory. <add> */ <ide> 'use strict'; <ide> <ide> const chalk = require('chalk'); <ide> const child_process = require('child_process'); <ide> const os = require('os'); <ide> const path = require('path'); <del>const Promise = require('promise'); <ide> <ide> /** <ide> * Starts iOS device syslog tail <ide><path>local-cli/runAndroid/runAndroid.js <ide> */ <ide> 'use strict'; <ide> <del>const Promise = require('promise'); <del> <ide> const adb = require('./adb'); <ide> const chalk = require('chalk'); <ide> const child_process = require('child_process'); <ide><path>local-cli/upgrade/upgrade.js <ide> */ <ide> 'use strict'; <ide> <del>const Promise = require('promise'); <del> <ide> const chalk = require('chalk'); <ide> const copyProjectTemplateAndReplace = require('../generator/copyProjectTemplateAndReplace'); <ide> const fs = require('fs');
12
Python
Python
add __f2py_numpy_version__ attribute
908d865712d423dcd6117929fca463c1708dc702
<ide><path>numpy/f2py/rules.py <ide> from . import __version__ <ide> f2py_version = __version__.version <ide> <add>from .. import version as _numpy_version <add>numpy_version = _numpy_version.version <add> <ide> import os <ide> import time <ide> import copy <ide> \t\t\"This module '#modulename#' is auto-generated with f2py (version:#f2py_version#).\\nFunctions:\\n\"\n#docs#\".\"); <ide> \tPyDict_SetItemString(d, \"__doc__\", s); <ide> \tPy_DECREF(s); <add>\ts = PyUnicode_FromString(\"""" + numpy_version + """\"); <add>\tPyDict_SetItemString(d, \"__f2py_numpy_version__\", s); <add>\tPy_DECREF(s); <ide> \t#modulename#_error = PyErr_NewException (\"#modulename#.error\", NULL, NULL); <ide> \t/* <ide> \t * Store the error object inside the dict, so that it could get deallocated. <ide><path>numpy/f2py/tests/test_regression.py <ide> import pytest <ide> <ide> import numpy as np <del>from numpy.testing import assert_raises, assert_equal <add>from numpy.testing import assert_, assert_raises, assert_equal, assert_string_equal <ide> <ide> from . import util <ide> <ide> def test_inout(self): <ide> x = np.arange(3, dtype=np.float32) <ide> self.module.foo(x) <ide> assert_equal(x, [3, 1, 2]) <add> <add> <add>class TestNumpyVersionAttribute(util.F2PyTest): <add> # Check that th attribute __f2py_numpy_version__ is present <add> # in the compiled module and that has the value np.__version__. <add> sources = [_path('src', 'regression', 'inout.f90')] <add> <add> @pytest.mark.slow <add> def test_numpy_version_attribute(self): <add> <add> # Check that self.module has an attribute named "__f2py_numpy_version__" <add> assert_(hasattr(self.module, "__f2py_numpy_version__"), <add> msg="Fortran module does not have __f2py_numpy_version__") <add> <add> # Check that the attribute __f2py_numpy_version__ is a string <add> assert_(isinstance(self.module.__f2py_numpy_version__, str), <add> msg="__f2py_numpy_version__ is not a string") <add> <add> # Check that __f2py_numpy_version__ has the value numpy.__version__ <add> assert_string_equal(np.__version__, self.module.__f2py_numpy_version__)
2
Javascript
Javascript
fix broken lang tests in argentina
26333668cf5c1158ae425af338b3dcc5bf51e645
<ide><path>test/lang/en-ca.js <ide> exports["lang:en-ca"] = { <ide> "weeks year starting tuesday" : function (test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 should be week 1"); <add> test.equal(moment([2007, 11, 29]).week(), 52, "Dec 29 2007 should be week 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 should be week 2"); <ide><path>test/lang/en.js <ide> exports["lang:en"] = { <ide> "weeks year starting tuesday" : function (test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 should be week 1"); <add> test.equal(moment([2007, 11, 29]).week(), 52, "Dec 29 2007 should be week 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 should be week 2"); <ide><path>test/lang/fr-ca.js <ide> exports["lang:fr-ca"] = { <ide> "weeks year starting tuesday" : function (test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 should be week 1"); <add> test.equal(moment([2007, 11, 29]).week(), 52, "Dec 29 2007 should be week 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 should be week 2"); <ide><path>test/lang/he.js <ide> exports["lang:he"] = { <ide> "weeks year starting tuesday" : function (test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 should be week 1"); <add> test.equal(moment([2007, 11, 29]).week(), 52, "Dec 29 2007 should be week 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 should be week 2"); <ide><path>test/lang/hi.js <ide> exports["lang:hi"] = { <ide> "weeks year starting tuesday" : function (test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 should be week 1"); <add> test.equal(moment([2007, 11, 29]).week(), 52, "Dec 29 2007 should be week 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 should be week 2"); <ide><path>test/lang/ja.js <ide> exports["lang:ja"] = { <ide> "weeks year starting tuesday" : function (test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 should be week 1"); <add> test.equal(moment([2007, 11, 29]).week(), 52, "Dec 29 2007 should be week 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 should be week 2"); <ide><path>test/lang/ko.js <ide> exports["lang:kr"] = { <ide> "weeks year starting tuesday" : function (test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 should be week 1"); <add> test.equal(moment([2007, 11, 29]).week(), 52, "Dec 29 2007 should be week 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 should be week 2"); <ide><path>test/lang/pt-br.js <ide> exports["lang:pt-br"] = { <ide> "weeks year starting tuesday" : function (test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 should be week 1"); <add> test.equal(moment([2007, 11, 29]).week(), 52, "Dec 29 2007 should be week 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 should be week 2"); <ide><path>test/lang/th.js <ide> exports["lang:th"] = { <ide> "weeks year starting tuesday" : function (test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 should be week 1"); <add> test.equal(moment([2007, 11, 29]).week(), 52, "Dec 29 2007 should be week 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 should be week 2"); <ide><path>test/lang/zh-cn.js <ide> exports["lang:zh-cn"] = { <ide> "weeks year starting tuesday" : function (test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 should be week 1"); <add> test.equal(moment([2007, 11, 29]).week(), 52, "Dec 29 2007 should be week 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 should be week 2"); <ide><path>test/lang/zh-tw.js <ide> exports["lang:zh-tw"] = { <ide> "weeks year starting tuesday" : function (test) { <ide> test.expect(6); <ide> <del> test.equal(moment([2007, 11, 30]).week(), 1, "Dec 30 2007 should be week 1"); <add> test.equal(moment([2007, 11, 29]).week(), 52, "Dec 29 2007 should be week 52"); <ide> test.equal(moment([2008, 0, 1]).week(), 1, "Jan 1 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 5]).week(), 1, "Jan 5 2008 should be week 1"); <ide> test.equal(moment([2008, 0, 6]).week(), 2, "Jan 6 2008 should be week 2");
11
PHP
PHP
fix memcached engine
4f4e8a5be8ed5acc971cf71d664ddcf7e1d4b895
<ide><path>lib/Cake/Cache/Engine/MemcachedEngine.php <ide> * @since CakePHP(tm) v 2.5.0 <ide> * @license http://www.opensource.org/licenses/mit-license.php MIT License <ide> */ <add>namespace Cake\Cache\Engine; <add> <add>use Cake\Cache\CacheEngine; <add>use Cake\Error; <add>use Cake\Utility\Inflector; <ide> <ide> /** <ide> * Memcached storage engine for cache. Memcached has some limitations in the amount of <ide> * (if memcached extension compiled with --enable-igbinary) <ide> * Compressed keys can also be incremented/decremented <ide> * <del> * @package Cake.Cache.Engine <ide> */ <ide> class MemcachedEngine extends CacheEngine { <ide> <ide> class MemcachedEngine extends CacheEngine { <ide> * <ide> * @param array $settings array of setting for the engine <ide> * @return boolean True if the engine has been successfully initialized, false if not <del> * @throws CacheException when you try use authentication without Memcached compiled with SASL support <add> * @throws Cake\Error\Exception when you try use authentication without Memcached compiled with SASL support <ide> */ <ide> public function init($settings = array()) { <ide> if (!class_exists('Memcached')) { <ide> public function init($settings = array()) { <ide> return true; <ide> } <ide> <del> $this->_Memcached = new Memcached($this->settings['persistent'] ? (string)$this->settings['persistent'] : null); <add> $this->_Memcached = new \Memcached($this->settings['persistent'] ? (string)$this->settings['persistent'] : null); <ide> $this->_setOptions(); <ide> <ide> if (count($this->_Memcached->getServerList())) { <ide> public function init($settings = array()) { <ide> <ide> if ($this->settings['login'] !== null && $this->settings['password'] !== null) { <ide> if (!method_exists($this->_Memcached, 'setSaslAuthData')) { <del> throw new CacheException( <add> throw new Error\Exception( <ide> __d('cake_dev', 'Memcached extension is not build with SASL support') <ide> ); <ide> } <ide> public function init($settings = array()) { <ide> * <ide> */ <ide> protected function _setOptions() { <del> $this->_Memcached->setOption(Memcached::OPT_LIBKETAMA_COMPATIBLE, true); <add> $this->_Memcached->setOption(\Memcached::OPT_LIBKETAMA_COMPATIBLE, true); <ide> <del> if (Memcached::HAVE_IGBINARY) { <del> $this->_Memcached->setOption(Memcached::OPT_SERIALIZER, Memcached::SERIALIZER_IGBINARY); <add> if (\Memcached::HAVE_IGBINARY) { <add> $this->_Memcached->setOption(\Memcached::OPT_SERIALIZER, \Memcached::SERIALIZER_IGBINARY); <ide> } <ide> <ide> // Check for Amazon ElastiCache instance <ide> if (defined('Memcached::OPT_CLIENT_MODE') && defined('Memcached::DYNAMIC_CLIENT_MODE')) { <del> $this->_Memcached->setOption(Memcached::OPT_CLIENT_MODE, Memcached::DYNAMIC_CLIENT_MODE); <add> $this->_Memcached->setOption(\Memcached::OPT_CLIENT_MODE, \Memcached::DYNAMIC_CLIENT_MODE); <ide> } <ide> <del> $this->_Memcached->setOption(Memcached::OPT_COMPRESSION, (bool)$this->settings['compress']); <add> $this->_Memcached->setOption(\Memcached::OPT_COMPRESSION, (bool)$this->settings['compress']); <ide> } <ide> <ide> /** <ide> public function read($key) { <ide> * @param string $key Identifier for the data <ide> * @param integer $offset How much to increment <ide> * @return New incremented value, false otherwise <del> * @throws CacheException when you try to increment with compress = true <add> * @throws Cake\Error\Exception when you try to increment with compress = true <ide> */ <ide> public function increment($key, $offset = 1) { <ide> return $this->_Memcached->increment($key, $offset); <ide> public function increment($key, $offset = 1) { <ide> * @param string $key Identifier for the data <ide> * @param integer $offset How much to subtract <ide> * @return New decremented value, false otherwise <del> * @throws CacheException when you try to decrement with compress = true <add> * @throws Cake\Error\Exception when you try to decrement with compress = true <ide> */ <ide> public function decrement($key, $offset = 1) { <ide> return $this->_Memcached->decrement($key, $offset); <add><path>lib/Cake/Test/TestCase/Cache/Engine/MemcachedEngineTest.php <del><path>lib/Cake/Test/Case/Cache/Engine/MemcachedEngineTest.php <ide> * <ide> * @copyright Copyright (c) Cake Software Foundation, Inc. (http://cakefoundation.org) <ide> * @link http://book.cakephp.org/2.0/en/development/testing.html CakePHP(tm) Tests <del> * @package Cake.Test.Case.Cache.Engine <ide> * @since CakePHP(tm) v 2.5.0 <ide> * @license http://www.opensource.org/licenses/mit-license.php MIT License <ide> */ <add>namespace Cake\Test\TestCase\Cache\Engine; <ide> <del>App::uses('Cache', 'Cache'); <del>App::uses('MemcachedEngine', 'Cache/Engine'); <add>use Cake\Cache\Cache; <add>use Cake\Cache\Engine\MemcachedEngine; <add>use Cake\Core\Configure; <add>use Cake\TestSuite\TestCase; <ide> <ide> /** <ide> * Class TestMemcachedEngine <ide> * <del> * @package Cake.Test.Case.Cache.Engine <ide> */ <ide> class TestMemcachedEngine extends MemcachedEngine { <ide> <ide> public function getMemcached() { <ide> /** <ide> * MemcachedEngineTest class <ide> * <del> * @package Cake.Test.Case.Cache.Engine <ide> */ <del>class MemcachedEngineTest extends CakeTestCase { <add>class MemcachedEngineTest extends TestCase { <ide> <ide> /** <ide> * setUp method <ide> public function tearDown() { <ide> Cache::drop('memcached'); <ide> Cache::drop('memcached_groups'); <ide> Cache::drop('memcached_helper'); <add> Cache::drop('compressed_memcached'); <ide> Cache::config('default'); <ide> } <ide> <ide> public function testCompressionSetting() { <ide> 'compress' => false <ide> )); <ide> <del> $this->assertFalse($Memcached->getMemcached()->getOption(Memcached::OPT_COMPRESSION)); <add> $this->assertFalse($Memcached->getMemcached()->getOption(\Memcached::OPT_COMPRESSION)); <ide> <ide> $MemcachedCompressed = new TestMemcachedEngine(); <ide> $MemcachedCompressed->init(array( <ide> public function testCompressionSetting() { <ide> 'compress' => true <ide> )); <ide> <del> $this->assertTrue($MemcachedCompressed->getMemcached()->getOption(Memcached::OPT_COMPRESSION)); <add> $this->assertTrue($MemcachedCompressed->getMemcached()->getOption(\Memcached::OPT_COMPRESSION)); <ide> } <ide> <ide> /** <ide> public function testSaslAuthException() { <ide> ); <ide> <ide> $this->setExpectedException( <del> 'CacheException', 'Memcached extension is not build with SASL support' <add> 'Cake\Error\Exception', 'Memcached extension is not build with SASL support' <ide> ); <ide> $Memcached->init($settings); <ide> } <ide> public function testSaslAuthException() { <ide> public function testMultipleServers() { <ide> $servers = array('127.0.0.1:11211', '127.0.0.1:11222'); <ide> $available = true; <del> $Memcached = new Memcached(); <add> $Memcached = new \Memcached(); <ide> <ide> foreach ($servers as $server) { <ide> list($host, $port) = explode(':', $server); <ide> public function testParseServerStringUnix() { <ide> * @return void <ide> */ <ide> public function testReadAndWriteCache() { <del> Cache::set(array('duration' => 1), null, 'memcached'); <add> Cache::drop('memcached'); <add> Cache::config('memcached', array( <add> 'engine' => 'Memcached', <add> 'prefix' => 'cake_', <add> 'duration' => 1 <add> )); <ide> <ide> $result = Cache::read('test', 'memcached'); <ide> $expecting = ''; <ide> public function testReadAndWriteCache() { <ide> * @return void <ide> */ <ide> public function testExpiry() { <del> Cache::set(array('duration' => 1), 'memcached'); <add> Cache::drop('memcached'); <add> Cache::config('memcached', array( <add> 'engine' => 'Memcached', <add> 'prefix' => 'cake_', <add> 'duration' => 1 <add> )); <ide> <ide> $result = Cache::read('test', 'memcached'); <ide> $this->assertFalse($result); <ide> public function testExpiry() { <ide> $result = Cache::read('other_test', 'memcached'); <ide> $this->assertFalse($result); <ide> <del> Cache::set(array('duration' => "+1 second"), 'memcached'); <add> Cache::drop('memcached'); <add> Cache::config('memcached', array( <add> 'engine' => 'Memcached', <add> 'prefix' => 'cake_', <add> 'duration' => '+1 second' <add> )); <ide> <ide> $data = 'this is a test of the emergency broadcasting system'; <ide> $result = Cache::write('other_test', $data, 'memcached'); <ide> public function testExpiry() { <ide> $result = Cache::read('other_test', 'memcached'); <ide> $this->assertFalse($result); <ide> <del> Cache::config('memcached', array('duration' => '+1 second')); <add> Cache::drop('memcached'); <add> Cache::config('memcached', array( <add> 'engine' => 'Memcached', <add> 'prefix' => 'cake_', <add> 'duration' => '+1 second' <add> )); <ide> <ide> $result = Cache::read('other_test', 'memcached'); <ide> $this->assertFalse($result); <ide> <del> Cache::config('memcached', array('duration' => '+29 days')); <add> Cache::drop('memcached'); <add> Cache::config('memcached', array( <add> 'engine' => 'Memcached', <add> 'prefix' => 'cake2_', <add> 'duration' => '+29 days' <add> )); <ide> $data = 'this is a test of the emergency broadcasting system'; <ide> $result = Cache::write('long_expiry_test', $data, 'memcached'); <ide> $this->assertTrue($result); <ide> public function testExpiry() { <ide> $result = Cache::read('long_expiry_test', 'memcached'); <ide> $expecting = $data; <ide> $this->assertEquals($expecting, $result); <del> <del> Cache::config('memcached', array('duration' => 3600)); <ide> } <ide> <ide> /** <ide> public function testClear() { <ide> * @return void <ide> */ <ide> public function testZeroDuration() { <del> Cache::config('memcached', array('duration' => 0)); <add> Cache::drop('memcached'); <add> Cache::config('memcached', array( <add> 'engine' => 'Memcached', <add> 'prefix' => 'cake2_', <add> 'duration' => 0 <add> )); <ide> $result = Cache::write('test_key', 'written!', 'memcached'); <ide> <ide> $this->assertTrue($result);
2
Java
Java
delete unused imports in spring-messaging module
9bc32ac199ad649b095ad5f8af63c82c2b9d7dc5
<ide><path>spring-messaging/src/main/java/org/springframework/messaging/simp/config/AbstractMessageBrokerConfiguration.java <ide> import org.springframework.context.ApplicationContextAware; <ide> import org.springframework.context.annotation.Bean; <ide> import org.springframework.messaging.Message; <del>import org.springframework.messaging.MessageChannel; <del>import org.springframework.messaging.SubscribableChannel; <ide> import org.springframework.messaging.converter.*; <ide> import org.springframework.messaging.handler.invocation.HandlerMethodArgumentResolver; <ide> import org.springframework.messaging.handler.invocation.HandlerMethodReturnValueHandler;
1
PHP
PHP
remove behaviors contant
66ae2396295703b6421953c1de3fb2ad057bbfed
<ide><path>lib/Cake/bootstrap.php <ide> define('APP', ROOT.DS.APP_DIR.DS); <ide> } <ide> <del>/** <del> * Path to model behaviors directory. <del> */ <del> define('BEHAVIORS', MODELS.'Behavior'.DS); <del> <ide> /** <ide> * Path to the application's libs directory. <ide> */
1
Python
Python
use official endpoint for ec2 prices
1f58fcea7943e38360267cbbfa8cb23f255d84b1
<ide><path>contrib/scrape-ec2-prices.py <ide> from collections import defaultdict, OrderedDict <ide> <ide> import requests <del>import _jsonnet # pylint: disable=import-error <del> <del>LINUX_PRICING_URLS = [ <del> # Deprecated instances (JSON format) <del> "https://aws.amazon.com/ec2/pricing/json/linux-od.json", <del> # Previous generation instances (JavaScript file) <del> "https://a0.awsstatic.com/pricing/1/ec2/previous-generation/linux-od.min.js", <del> # New generation instances (JavaScript file) <del> # Using other endpoint atm <del> # 'https://a0.awsstatic.com/pricing/1/ec2/linux-od.min.js' <del>] <add>import ijson # pylint: disable=import-error <ide> <del>EC2_REGIONS = [ <del> "us-east-1", <del> "us-east-2", <del> "us-west-1", <del> "us-west-2", <del> "us-gov-west-1", <del> "eu-west-1", <del> "eu-west-2", <del> "eu-west-3", <del> "eu-north-1", <del> "eu-south-1", <del> "eu-central-1", <del> "ca-central-1", <del> "ap-southeast-1", <del> "ap-southeast-2", <del> "ap-northeast-1", <del> "ap-northeast-2", <del> "ap-south-1", <del> "sa-east-1", <del> "cn-north-1", <del> "ap-east-1", <del> "af-south-1", <del>] <add># same URL as the one used by scrape-ec2-sizes.py, now it has official data on pricing <add>URL = "https://pricing.us-east-1.amazonaws.com/offers/v1.0/aws/AmazonEC2/current/index.json" <ide> <del>EC2_INSTANCE_TYPES = [ <del> "t1.micro", <del> "m1.small", <del> "m1.medium", <del> "m1.large", <del> "m1.xlarge", <del> "m2.xlarge", <del> "m2.2xlarge", <del> "m2.4xlarge", <del> "m3.medium", <del> "m3.large", <del> "m3.xlarge", <del> "m3.2xlarge", <del> "c1.medium", <del> "c1.xlarge", <del> "cc1.4xlarge", <del> "cc2.8xlarge", <del> "c3.large", <del> "c3.xlarge", <del> "c3.2xlarge", <del> "c3.4xlarge", <del> "c3.8xlarge", <del> "d2.xlarge", <del> "d2.2xlarge", <del> "d2.4xlarge", <del> "d2.8xlarge", <del> "cg1.4xlarge", <del> "g2.2xlarge", <del> "g2.8xlarge", <del> "cr1.8xlarge", <del> "hs1.4xlarge", <del> "hs1.8xlarge", <del> "i2.xlarge", <del> "i2.2xlarge", <del> "i2.4xlarge", <del> "i2.8xlarge", <del> "i3.large", <del> "i3.xlarge", <del> "i3.2xlarge", <del> "i3.4xlarge", <del> "i3.8xlarge", <del> "i3.16large", <del> "r3.large", <del> "r3.xlarge", <del> "r3.2xlarge", <del> "r3.4xlarge", <del> "r3.8xlarge", <del> "r4.large", <del> "r4.xlarge", <del> "r4.2xlarge", <del> "r4.4xlarge", <del> "r4.8xlarge", <del> "r4.16xlarge", <del> "t2.micro", <del> "t2.small", <del> "t2.medium", <del> "t2.large", <del> "x1.32xlarge", <del>] <add>RE_NUMERIC_OTHER = re.compile(r"(?:([0-9]+)|([-A-Z_a-z]+)|([^-0-9A-Z_a-z]+))") <add> <add>BASE_PATH = os.path.dirname(os.path.abspath(__file__)) <add>PRICING_FILE_PATH = os.path.join(BASE_PATH, "../libcloud/data/pricing.json") <add>PRICING_FILE_PATH = os.path.abspath(PRICING_FILE_PATH) <ide> <del># Maps EC2 region name to region name used in the pricing file <del>REGION_NAME_MAP = { <del> "us-east": "ec2_us_east", <del> "us-east-1": "ec2_us_east", <del> "us-east-2": "ec2_us_east_ohio", <del> "us-west": "ec2_us_west", <del> "us-west-1": "ec2_us_west", <del> "us-west-2": "ec2_us_west_oregon", <del> "eu-west-1": "ec2_eu_west", <del> "eu-west-2": "ec2_eu_west_london", <del> "eu-west-3": "ec2_eu_west_3", <del> "eu-ireland": "ec2_eu_west", <del> "eu-south-1": "ec2_eu_south", <del> "eu-central-1": "ec2_eu_central", <del> "ca-central-1": "ec2_ca_central_1", <del> "apac-sin": "ec2_ap_southeast", <del> "ap-southeast-1": "ec2_ap_southeast", <del> "apac-syd": "ec2_ap_southeast_2", <del> "ap-southeast-2": "ec2_ap_southeast_2", <del> "apac-tokyo": "ec2_ap_northeast", <del> "ap-northeast-1": "ec2_ap_northeast", <del> "ap-northeast-2": "ec2_ap_northeast", <del> "ap-south-1": "ec2_ap_south_1", <del> "sa-east-1": "ec2_sa_east", <del> "us-gov-west-1": "ec2_us_govwest", <del> "cn-north-1": "ec2_cn_north", <del> "ap-east-1": "ec2_ap_east", <del> "af-south-1": "ec2_af_south", <del>} <add>TEMPFILE = os.environ.get("TMP_JSON", "/tmp/ec.json") <ide> <ide> INSTANCE_SIZES = [ <ide> "micro", <ide> "extra-large", <ide> ] <ide> <del>RE_NUMERIC_OTHER = re.compile(r"(?:([0-9]+)|([-A-Z_a-z]+)|([^-0-9A-Z_a-z]+))") <del> <del>BASE_PATH = os.path.dirname(os.path.abspath(__file__)) <del>PRICING_FILE_PATH = os.path.join(BASE_PATH, "../libcloud/data/pricing.json") <del>PRICING_FILE_PATH = os.path.abspath(PRICING_FILE_PATH) <del> <ide> <add>def download_json(): <add> response = requests.get(URL, stream=True) <add> try: <add> return open(TEMPFILE, "r") <add> except IOError: <add> with open(TEMPFILE, "wb") as fo: <add> for chunk in response.iter_content(chunk_size=2**20): <add> if chunk: <add> fo.write(chunk) <add> return open(TEMPFILE, "r") <add> <add> <add>def get_json(): <add> try: <add> return open(TEMPFILE, "r") <add> except IOError: <add> return download_json() <add> <add> <add># Prices and sizes are in different dicts and categorized by sku <add>def get_all_prices(): <add> # return variable <add> # prices = {sku : {price: int, unit: string}} <add> prices = {} <add> current_sku = "" <add> current_rate_code = "" <add> amazonEC2_offer_code = "JRTCKXETXF" <add> json_file = get_json() <add> parser = ijson.parse(json_file) <add> # use parser because file is very large <add> for prefix, event, value in parser: <add> if "products" in prefix: <add> continue <add> if (prefix, event) == ("terms.OnDemand", "map_key"): <add> current_sku = value <add> prices[current_sku] = {} <add> elif (prefix, event) == ( <add> f"terms.OnDemand.{current_sku}.{current_sku}.{amazonEC2_offer_code}.priceDimensions", <add> "map_key", <add> ): <add> current_rate_code = value <add> elif (prefix, event) == ( <add> f"terms.OnDemand.{current_sku}.{current_sku}.{amazonEC2_offer_code}.priceDimensions" <add> f".{current_rate_code}.unit", <add> "string", <add> ): <add> prices[current_sku]["unit"] = value <add> elif (prefix, event) == ( <add> f"terms.OnDemand.{current_sku}.{current_sku}.{amazonEC2_offer_code}.priceDimensions" <add> f".{current_rate_code}.pricePerUnit.USD", <add> "string", <add> ): <add> prices[current_sku]["price"] = value <add> return prices <add> <add> <add># For each combination of location - size - os the file has a different sku. <add># For each sku we have a price <ide> def scrape_ec2_pricing(): <del> result = defaultdict(OrderedDict) <del> os_map = {"linux": "ec2_linux", "windows-std": "ec2_windows"} <del> for item in os_map.values(): <del> result[item] = {} <del> for url in LINUX_PRICING_URLS: <del> response = requests.get(url) <del> <del> if re.match(r".*?\.json$", url): <del> data = response.json() <del> print("Sample response: %s..." % (str(data)[:100])) <del> elif re.match(r".*?\.js$", url): <del> data = response.content.decode("utf-8") <del> print("Sample response: %s..." % (data[:100])) <del> match = re.match(r"^.*callback\((.*?)\);?$", data, re.MULTILINE | re.DOTALL) <del> data = match.group(1) <del> # NOTE: We used to use demjson, but it's not working under Python 3 and new version of <del> # setuptools anymore so we use jsonnet <del> # demjson supports non-strict mode and can parse unquoted objects <del> data = json.loads(_jsonnet.evaluate_snippet("snippet", data)) <del> regions = data["config"]["regions"] <del> <del> for region_data in regions: <del> region_name = region_data["region"] <del> instance_types = region_data["instanceTypes"] <del> <del> for instance_type in instance_types: <del> sizes = instance_type["sizes"] <del> for size in sizes: <del> if not result["ec2_linux"].get(size["size"], False): <del> result["ec2_linux"][size["size"]] = {} <del> price = size["valueColumns"][0]["prices"]["USD"] <del> if str(price).lower() == "n/a": <del> # Price not available <del> continue <del> <del> result["ec2_linux"][size["size"]][region_name] = float(price) <del> <del> res = defaultdict(OrderedDict) <del> url = "https://calculator.aws/pricing/1.0/" "ec2/region/{}/ondemand/{}/index.json" <del> instances = set() <del> for OS in ["linux", "windows-std"]: <del> res[os_map[OS]] = {} <del> for region in EC2_REGIONS: <del> res[os_map[OS]][region] = {} <del> full_url = url.format(region, OS) <del> response = requests.get(full_url) <del> if response.status_code != 200: <del> print( <del> "Skipping URL %s which returned non 200-status code (%s)" <del> % (full_url, response.status_code) <del> ) <del> continue <del> data = response.json() <del> <del> for entry in data["prices"]: <del> instance_type = entry["attributes"].get("aws:ec2:instanceType", "") <del> instances.add(instance_type) <del> price = entry["price"].get("USD", 0) <del> res[os_map[OS]][region][instance_type] = price <del> for item in os_map.values(): <del> for instance in instances: <del> if not result[item].get(instance, False): <del> result[item][instance] = {} <del> for region in EC2_REGIONS: <del> if res[item][region].get(instance, False): <del> result[item][instance][region] = float(res[item][region][instance]) <del> <del> return result <add> skus = {} <add> prices = get_all_prices() <add> json_file = get_json() <add> parser = ijson.parse(json_file) <add> current_sku = "" <add> <add> for prefix, event, value in parser: <add> if "terms" in prefix: <add> break <add> if (prefix, event) == ("products", "map_key"): <add> current_sku = value <add> skus[current_sku] = {"sku": value} <add> elif (prefix, event) == (f"products.{current_sku}.productFamily", "string"): <add> skus[current_sku]["family"] = value <add> elif (prefix, event) == ( <add> f"products.{current_sku}.attributes.location", <add> "string", <add> ): <add> skus[current_sku]["locationName"] = value <add> elif (prefix, event) == ( <add> f"products.{current_sku}.attributes.locationType", <add> "string", <add> ): <add> skus[current_sku]["locationType"] = value <add> elif (prefix, event) == ( <add> f"products.{current_sku}.attributes.instanceType", <add> "string", <add> ): <add> skus[current_sku]["size"] = value <add> elif (prefix, event) == ( <add> f"products.{current_sku}.attributes.operatingSystem", <add> "string", <add> ): <add> skus[current_sku]["os"] = value <add> elif (prefix, event) == ( <add> f"products.{current_sku}.attributes.regionCode", <add> "string", <add> ): <add> skus[current_sku]["location"] = value <add> # only get prices of compute instances atm <add> elif (prefix, event) == (f"products.{current_sku}", "end_map"): <add> if ( <add> "Compute Instance" not in skus[current_sku]["family"] <add> and "Dedicated Host" not in skus[current_sku]["family"] <add> ): <add> del skus[current_sku] <add> ec2_linux = defaultdict(OrderedDict) <add> ec2_windows = defaultdict(OrderedDict) <add> ec2_rhel = defaultdict(OrderedDict) <add> ec2_rhel_ha = defaultdict(OrderedDict) <add> ec2_suse = defaultdict(OrderedDict) <add> <add> os_map = { <add> "Linux": ec2_linux, <add> "Windows": ec2_windows, <add> "RHEL": ec2_rhel, <add> "SUSE": ec2_suse, <add> "Red Hat Enterprise Linux with HA": ec2_rhel_ha, <add> } <add> for sku in skus: <add> if skus[sku]["locationType"] != "AWS Region": <add> continue <add> os = skus[sku]["os"] <add> if os == "NA": <add> os = "Linux" # linux is free <add> os_dict = os_map.get(os) <add> # new OS, until it is documented skip it <add> if os_dict is None: <add> print(f"Unexpected OS {os}") <add> continue <add> size = skus[sku]["size"] <add> location = skus[sku]["location"] <add> # size is first seen <add> if not os_dict.get(size): <add> os_dict[size] = {} <add> # if price is not a number then label it as not available <add> try: <add> price = float(prices[sku]["price"]) <add> if os_dict[size].get(location) and os_dict[size][location] > price: <add> # do nothing, keep the highest price <add> pass <add> else: <add> os_dict[size][location] = price <add> except ValueError: <add> os_dict[size][location] = "n/a" <add> except KeyError: <add> # size is available only reserved <add> del os_dict[size] <add> return { <add> "ec2_linux": ec2_linux, <add> "ec2_windows": ec2_windows, <add> "ec2_rhel": ec2_rhel, <add> "ec2_suse": ec2_suse, <add> "ec2_rhel_ha": ec2_rhel_ha, <add> } <ide> <ide> <ide> def update_pricing_file(pricing_file_path, pricing_data): <ide> def sort_key_by_numeric_other(key_value): <ide> <ide> <ide> def main(): <del> print("Scraping EC2 pricing data (this may take up to 2 minutes)....") <add> print( <add> "Scraping EC2 pricing data (if this runs for the first time " <add> "it has to download a 3GB file, depending on your bandwith " <add> "it might take a while)...." <add> ) <ide> <ide> pricing_data = scrape_ec2_pricing() <ide> update_pricing_file(pricing_file_path=PRICING_FILE_PATH, pricing_data=pricing_data)
1