content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
|---|---|---|---|---|---|
Javascript
|
Javascript
|
fix typos, wrong closing tags
|
f2b824e987fe3a16f744c0286667084428c5b742
|
<ide><path>Libraries/Components/Touchable/TouchableHighlight.js
<ide> var DEFAULT_PROPS = {
<ide> * style={styles.button}
<ide> * source={require('image!myButton')}
<ide> * />
<del> * </View>
<add> * </TouchableHighlight>
<ide> * );
<ide> * },
<ide> * ```
<ide><path>Libraries/Components/Touchable/TouchableOpacity.js
<ide> var onlyChild = require('onlyChild');
<ide> * style={styles.button}
<ide> * source={require('image!myButton')}
<ide> * />
<del> * </View>
<add> * </TouchableOpacity>
<ide> * );
<ide> * },
<ide> * ```
| 2
|
Javascript
|
Javascript
|
improve coverage for `question` in readline
|
ccde7fc2a6287c57d7402fd08aebcfbe3678f786
|
<ide><path>test/parallel/test-readline-interface.js
<ide> for (let i = 0; i < 12; i++) {
<ide> rli.close();
<ide> }
<ide>
<add> // Calling the question multiple times
<add> {
<add> const [rli] = getInterface({ terminal });
<add> rli.question('foo?', common.mustCall((answer) => {
<add> assert.strictEqual(answer, 'baz');
<add> }));
<add> rli.question('bar?', common.mustNotCall(() => {
<add> }));
<add> rli.write('baz\n');
<add> rli.close();
<add> }
<add>
<ide> // Calling the promisified question
<ide> {
<ide> const [rli] = getInterface({ terminal });
| 1
|
Javascript
|
Javascript
|
use explicit `/index` for importing from folder
|
3ea74706473b7f4d14fa14b521e979ea8b72d820
|
<ide><path>packages/ember-template-compiler/lib/index.js
<ide> export {
<ide> registerPlugin,
<ide> unregisterPlugin
<ide> } from './system/compile-options';
<del>export { default as defaultPlugins } from './plugins';
<add>export { default as defaultPlugins } from './plugins/index';
<ide>
<ide> // used for adding Ember.Handlebars.compile for backwards compat
<ide> import './compat';
<ide> import './system/bootstrap';
<ide>
<ide> // add domTemplates initializer (only does something if `ember-template-compiler`
<ide> // is loaded already)
<del>import './system/initializer';
<ide>\ No newline at end of file
<add>import './system/initializer';
<ide><path>packages/ember-template-compiler/lib/system/compile-options.js
<ide> import { assign } from 'ember-utils';
<del>import PLUGINS from '../plugins';
<add>import PLUGINS from '../plugins/index';
<ide>
<ide> let USER_PLUGINS = [];
<ide>
| 2
|
Go
|
Go
|
parse storage-opt in graphdriver init on windows
|
db7b7f6df9151f0eca88af087514825b1a493f32
|
<ide><path>daemon/create.go
<ide> func (daemon *Daemon) create(opts createOpts) (retC *container.Container, retErr
<ide>
<ide> ctr.HostConfig.StorageOpt = opts.params.HostConfig.StorageOpt
<ide>
<del> // Fixes: https://github.com/moby/moby/issues/34074 and
<del> // https://github.com/docker/for-win/issues/999.
<del> // Merge the daemon's storage options if they aren't already present. We only
<del> // do this on Windows as there's no effective sandbox size limit other than
<del> // physical on Linux.
<del> if isWindows {
<del> if ctr.HostConfig.StorageOpt == nil {
<del> ctr.HostConfig.StorageOpt = make(map[string]string)
<del> }
<del> for _, v := range daemon.configStore.GraphOptions {
<del> opt := strings.SplitN(v, "=", 2)
<del> if _, ok := ctr.HostConfig.StorageOpt[opt[0]]; !ok {
<del> ctr.HostConfig.StorageOpt[opt[0]] = opt[1]
<del> }
<del> }
<del> }
<del>
<ide> // Set RWLayer for container after mount labels have been set
<ide> rwLayer, err := daemon.imageService.CreateLayer(ctr, setupInitLayer(daemon.idMapping))
<ide> if err != nil {
<ide><path>daemon/graphdriver/lcow/lcow.go
<ide> type Driver struct {
<ide> cachedScratchMutex sync.Mutex // Protects race conditions from multiple threads creating the cached scratch.
<ide> options []string // Graphdriver options we are initialised with.
<ide> globalMode bool // Indicates if running in an unsafe/global service VM mode.
<add> defaultSandboxSize uint64 // The default sandbox size to use if one is not specified
<ide>
<ide> // NOTE: It is OK to use a cache here because Windows does not support
<ide> // restoring containers when the daemon dies.
<ide> func InitDriver(dataRoot string, options []string, _, _ []idtools.IDMap) (graphd
<ide> serviceVms: &serviceVMMap{
<ide> svms: make(map[string]*serviceVMMapItem),
<ide> },
<del> globalMode: false,
<add> globalMode: false,
<add> defaultSandboxSize: client.DefaultVhdxSizeGB,
<ide> }
<ide>
<ide> // Looks for relevant options
<ide> func InitDriver(dataRoot string, options []string, _, _ []idtools.IDMap) (graphd
<ide> return nil, fmt.Errorf("%s failed to parse value for 'lcow.globalmode' - must be 'true' or 'false'", title)
<ide> }
<ide> break
<add> case "lcow.sandboxsize":
<add> var err error
<add> d.defaultSandboxSize, err = strconv.ParseUint(opt[1], 10, 32)
<add> if err != nil {
<add> return nil, fmt.Errorf("%s failed to parse value '%s' for 'lcow.sandboxsize'", title, v)
<add> }
<add> if d.defaultSandboxSize < client.DefaultVhdxSizeGB {
<add> return nil, fmt.Errorf("%s 'lcow.sandboxsize' option cannot be less than %d", title, client.DefaultVhdxSizeGB)
<add> }
<add> break
<ide> }
<ide> }
<ide> }
<ide> func (d *Driver) CreateReadWrite(id, parent string, opts *graphdriver.CreateOpts
<ide> }
<ide>
<ide> // Look for an explicit sandbox size option.
<del> sandboxSize := uint64(client.DefaultVhdxSizeGB)
<add> sandboxSize := d.defaultSandboxSize
<ide> for k, v := range opts.StorageOpt {
<ide> switch strings.ToLower(k) {
<ide> case "lcow.sandboxsize":
<ide><path>daemon/graphdriver/windows/windows.go
<ide> func InitFilter(home string, options []string, uidMaps, gidMaps []idtools.IDMap)
<ide> return nil, fmt.Errorf("windowsfilter failed to create '%s': %v", home, err)
<ide> }
<ide>
<del> size, err := units.RAMInBytes(defaultSandboxSize)
<add> storageOpt := make(map[string]string)
<add> storageOpt["size"] = defaultSandboxSize
<add>
<add> for _, v := range options {
<add> opt := strings.SplitN(v, "=", 2)
<add> storageOpt[strings.ToLower(opt[0])] = opt[1]
<add> }
<add>
<add> storageOptions, err := parseStorageOpt(storageOpt)
<ide> if err != nil {
<del> return nil, fmt.Errorf("windowsfilter failed to parse default size '%s': %v", defaultSandboxSize, err)
<add> return nil, fmt.Errorf("windowsfilter failed to parse default storage options - %s", err)
<ide> }
<ide>
<ide> d := &Driver{
<ide> info: hcsshim.DriverInfo{
<ide> HomeDir: home,
<ide> Flavour: filterDriver,
<ide> },
<del> cache: make(map[string]string),
<del> ctr: graphdriver.NewRefCounter(&checker{}),
<del> defaultStorageOpts: &storageOptions{
<del> size: uint64(size),
<del> },
<add> cache: make(map[string]string),
<add> ctr: graphdriver.NewRefCounter(&checker{}),
<add> defaultStorageOpts: storageOptions,
<ide> }
<ide> return d, nil
<ide> }
| 3
|
Text
|
Text
|
add docs about locale
|
84cba20ae77527dbfadf470d771f9934c9b7c56a
|
<ide><path>docs/api-reference/data-fetching/get-static-paths.md
<ide> The `paths` key determines which paths will be pre-rendered. For example, suppos
<ide> ```js
<ide> return {
<ide> paths: [
<del> { params: { id: '1' } },
<del> { params: { id: '2' } }
<add> { params: { id: '1' }},
<add> {
<add> params: { id: '2' },
<add> // with i18n configured the locale for the path can be returned as well
<add> locale: "en",
<add> },
<ide> ],
<ide> fallback: ...
<ide> }
<ide> The value for each `params` object must match the parameters used in the page na
<ide> - If the page name uses [catch-all routes](/docs/routing/dynamic-routes.md#catch-all-routes) like `pages/[...slug]`, then `params` should contain `slug` (which is an array). If this array is `['hello', 'world']`, then Next.js will statically generate the page at `/hello/world`.
<ide> - If the page uses an [optional catch-all route](/docs/routing/dynamic-routes.md#optional-catch-all-routes), use `null`, `[]`, `undefined` or `false` to render the root-most route. For example, if you supply `slug: false` for `pages/[[...slug]]`, Next.js will statically generate the page `/`.
<ide>
<add>Separate of the `params` object a `locale` field can be returned when [i18n is configured](/docs/advanced-features/i18n-routing.md), which configures the locale for the path being generated.
<add>
<ide> ### `fallback: false`
<ide>
<ide> If `fallback` is `false`, then any paths not returned by `getStaticPaths` will result in a **404 page**.
| 1
|
Text
|
Text
|
update description of `to_prepare` event
|
4dc21f0a28259bb6268bab9c4b95be0d91a062f6
|
<ide><path>guides/source/configuring.md
<ide> Rails has 5 initialization events which can be hooked into (listed in the order
<ide>
<ide> * `before_initialize`: This is run directly before the initialization process of the application occurs with the `:bootstrap_hook` initializer near the beginning of the Rails initialization process.
<ide>
<del>* `to_prepare`: Run after the initializers are run for all Railties (including the application itself), but before eager loading and the middleware stack is built. More importantly, will run upon every request in `development`, but only once (during boot-up) in `production` and `test`.
<add>* `to_prepare`: Run after the initializers are run for all Railties (including the application itself), but before eager loading and the middleware stack is built. More importantly, will run upon every code reload in `development`, but only once (during boot-up) in `production` and `test`.
<ide>
<ide> * `before_eager_load`: This is run directly before eager loading occurs, which is the default behavior for the `production` environment and not for the `development` environment.
<ide>
| 1
|
Java
|
Java
|
remove code for api level 20 and below
|
49f10fd2e526b64294777357ab2fef8880739f26
|
<ide><path>ReactAndroid/src/main/java/com/facebook/react/modules/accessibilityinfo/AccessibilityInfoModule.java
<ide> public AccessibilityInfoModule(ReactApplicationContext context) {
<ide> mContentResolver = getReactApplicationContext().getContentResolver();
<ide> mTouchExplorationEnabled = mAccessibilityManager.isTouchExplorationEnabled();
<ide> mReduceMotionEnabled = this.getIsReduceMotionEnabledValue();
<del>
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
<del> mTouchExplorationStateChangeListener = new ReactTouchExplorationStateChangeListener();
<del> }
<add> mTouchExplorationStateChangeListener = new ReactTouchExplorationStateChangeListener();
<ide> }
<ide>
<ide> @Override
<ide> public void onHostResume() {
<ide> mAccessibilityManager.addTouchExplorationStateChangeListener(
<ide> mTouchExplorationStateChangeListener);
<ide>
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
<del> Uri transitionUri = Settings.Global.getUriFor(Settings.Global.TRANSITION_ANIMATION_SCALE);
<del> mContentResolver.registerContentObserver(transitionUri, false, animationScaleObserver);
<del> }
<add> Uri transitionUri = Settings.Global.getUriFor(Settings.Global.TRANSITION_ANIMATION_SCALE);
<add> mContentResolver.registerContentObserver(transitionUri, false, animationScaleObserver);
<ide>
<ide> updateAndSendTouchExplorationChangeEvent(mAccessibilityManager.isTouchExplorationEnabled());
<ide> updateAndSendReduceMotionChangeEvent();
<ide><path>ReactAndroid/src/main/java/com/facebook/react/modules/datepicker/DatePickerDialogFragment.java
<ide> import android.content.DialogInterface;
<ide> import android.content.DialogInterface.OnDismissListener;
<ide> import android.graphics.drawable.ColorDrawable;
<del>import android.os.Build;
<ide> import android.os.Bundle;
<ide> import android.widget.DatePicker;
<ide> import androidx.annotation.Nullable;
<ide> public Dialog onCreateDialog(Bundle savedInstanceState) {
<ide>
<ide> DatePickerDialog dialog = null;
<ide>
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
<del> switch (mode) {
<del> case CALENDAR:
<del> dialog =
<del> new DismissableDatePickerDialog(
<del> activityContext,
<del> activityContext
<del> .getResources()
<del> .getIdentifier(
<del> "CalendarDatePickerDialog", "style", activityContext.getPackageName()),
<del> onDateSetListener,
<del> year,
<del> month,
<del> day);
<del> break;
<del> case SPINNER:
<del> dialog =
<del> new DismissableDatePickerDialog(
<del> activityContext,
<del> android.R.style.Theme_Holo_Light_Dialog,
<del> onDateSetListener,
<del> year,
<del> month,
<del> day);
<del> dialog
<del> .getWindow()
<del> .setBackgroundDrawable(new ColorDrawable(android.graphics.Color.TRANSPARENT));
<del> break;
<del> case DEFAULT:
<del> dialog =
<del> new DismissableDatePickerDialog(activityContext, onDateSetListener, year, month, day);
<del> break;
<del> }
<del> } else {
<del> dialog =
<del> new DismissableDatePickerDialog(activityContext, onDateSetListener, year, month, day);
<del>
<del> switch (mode) {
<del> case CALENDAR:
<del> dialog.getDatePicker().setCalendarViewShown(true);
<del> dialog.getDatePicker().setSpinnersShown(false);
<del> break;
<del> case SPINNER:
<del> dialog.getDatePicker().setCalendarViewShown(false);
<del> break;
<del> }
<add> switch (mode) {
<add> case CALENDAR:
<add> dialog =
<add> new DismissableDatePickerDialog(
<add> activityContext,
<add> activityContext
<add> .getResources()
<add> .getIdentifier(
<add> "CalendarDatePickerDialog", "style", activityContext.getPackageName()),
<add> onDateSetListener,
<add> year,
<add> month,
<add> day);
<add> break;
<add> case SPINNER:
<add> dialog =
<add> new DismissableDatePickerDialog(
<add> activityContext,
<add> android.R.style.Theme_Holo_Light_Dialog,
<add> onDateSetListener,
<add> year,
<add> month,
<add> day);
<add> dialog
<add> .getWindow()
<add> .setBackgroundDrawable(new ColorDrawable(android.graphics.Color.TRANSPARENT));
<add> break;
<add> case DEFAULT:
<add> dialog =
<add> new DismissableDatePickerDialog(activityContext, onDateSetListener, year, month, day);
<add> break;
<ide> }
<ide>
<ide> final DatePicker datePicker = dialog.getDatePicker();
<ide><path>ReactAndroid/src/main/java/com/facebook/react/modules/datepicker/DismissableDatePickerDialog.java
<ide> public DismissableDatePickerDialog(
<ide> protected void onStop() {
<ide> // do *not* call super.onStop() on KitKat on lower, as that would erroneously call the
<ide> // OnDateSetListener when the dialog is dismissed, or call it twice when "OK" is pressed.
<del> if (Build.VERSION.SDK_INT > Build.VERSION_CODES.KITKAT) {
<del> super.onStop();
<del> }
<add> super.onStop();
<ide> }
<ide>
<ide> private void fixSpinner(Context context, int year, int month, int dayOfMonth) {
<ide><path>ReactAndroid/src/main/java/com/facebook/react/modules/network/ForwardingCookieHandler.java
<ide> import android.os.Message;
<ide> import android.text.TextUtils;
<ide> import android.webkit.CookieManager;
<del>import android.webkit.CookieSyncManager;
<ide> import android.webkit.ValueCallback;
<ide> import androidx.annotation.Nullable;
<ide> import com.facebook.react.bridge.Callback;
<ide> import com.facebook.react.bridge.GuardedAsyncTask;
<del>import com.facebook.react.bridge.GuardedResultAsyncTask;
<ide> import com.facebook.react.bridge.ReactContext;
<ide> import java.io.IOException;
<ide> import java.net.CookieHandler;
<ide> public class ForwardingCookieHandler extends CookieHandler {
<ide> private static final String VERSION_ONE_HEADER = "Set-cookie2";
<ide> private static final String COOKIE_HEADER = "Cookie";
<ide>
<del> // As CookieManager was synchronous before API 21 this class emulates the async behavior on < 21.
<del> private static final boolean USES_LEGACY_STORE =
<del> Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP;
<del>
<ide> private final CookieSaver mCookieSaver;
<ide> private final ReactContext mContext;
<ide> private @Nullable CookieManager mCookieManager;
<ide> public void put(URI uri, Map<String, List<String>> headers) throws IOException {
<ide> }
<ide>
<ide> public void clearCookies(final Callback callback) {
<del> if (USES_LEGACY_STORE) {
<del> new GuardedResultAsyncTask<Boolean>(mContext) {
<del> @Override
<del> protected Boolean doInBackgroundGuarded() {
<del> CookieManager cookieManager = getCookieManager();
<del> if (cookieManager != null) {
<del> cookieManager.removeAllCookie();
<del> }
<del> mCookieSaver.onCookiesModified();
<del> return true;
<del> }
<del>
<del> @Override
<del> protected void onPostExecuteGuarded(Boolean result) {
<del> callback.invoke(result);
<del> }
<del> }.execute();
<del> } else {
<del> clearCookiesAsync(callback);
<del> }
<add> clearCookiesAsync(callback);
<ide> }
<ide>
<ide> @TargetApi(Build.VERSION_CODES.LOLLIPOP)
<ide> public void onReceiveValue(Boolean value) {
<ide> }
<ide> }
<ide>
<del> public void destroy() {
<del> if (USES_LEGACY_STORE) {
<del> CookieManager cookieManager = getCookieManager();
<del> if (cookieManager != null) {
<del> cookieManager.removeExpiredCookie();
<del> }
<del> mCookieSaver.persistCookies();
<del> }
<del> }
<add> public void destroy() {}
<ide>
<ide> public void addCookies(final String url, final List<String> cookies) {
<ide> final CookieManager cookieManager = getCookieManager();
<ide> if (cookieManager == null) return;
<ide>
<del> if (USES_LEGACY_STORE) {
<del> runInBackground(
<del> new Runnable() {
<del> @Override
<del> public void run() {
<del> for (String cookie : cookies) {
<del> cookieManager.setCookie(url, cookie);
<del> }
<del> mCookieSaver.onCookiesModified();
<del> }
<del> });
<del> } else {
<del> for (String cookie : cookies) {
<del> addCookieAsync(url, cookie);
<del> }
<del> cookieManager.flush();
<del> mCookieSaver.onCookiesModified();
<add> for (String cookie : cookies) {
<add> addCookieAsync(url, cookie);
<ide> }
<add> cookieManager.flush();
<add> mCookieSaver.onCookiesModified();
<ide> }
<ide>
<ide> @TargetApi(Build.VERSION_CODES.LOLLIPOP)
<ide> protected void doInBackgroundGuarded(Void... params) {
<ide> throw exception;
<ide> }
<ide> }
<del>
<del> if (USES_LEGACY_STORE) {
<del> mCookieManager.removeExpiredCookie();
<del> }
<ide> }
<ide>
<ide> return mCookieManager;
<ide> }
<ide>
<del> private static void possiblyWorkaroundSyncManager(Context context) {
<del> if (USES_LEGACY_STORE) {
<del> // This is to work around a bug where CookieManager may fail to instantiate if
<del> // CookieSyncManager has never been created. Note that the sync() may not be required but is
<del> // here of legacy reasons.
<del> CookieSyncManager syncManager = CookieSyncManager.createInstance(context);
<del> syncManager.sync();
<del> }
<del> }
<add> private static void possiblyWorkaroundSyncManager(Context context) {}
<ide>
<ide> /**
<ide> * Responsible for flushing cookies to disk. Flushes to disk with a maximum delay of 30 seconds.
<ide> public boolean handleMessage(Message msg) {
<ide> });
<ide> }
<ide>
<del> public void onCookiesModified() {
<del> if (USES_LEGACY_STORE) {
<del> mHandler.sendEmptyMessageDelayed(MSG_PERSIST_COOKIES, TIMEOUT);
<del> }
<del> }
<add> public void onCookiesModified() {}
<ide>
<ide> public void persistCookies() {
<ide> mHandler.removeMessages(MSG_PERSIST_COOKIES);
<ide> runInBackground(
<ide> new Runnable() {
<ide> @Override
<ide> public void run() {
<del> if (USES_LEGACY_STORE) {
<del> CookieSyncManager syncManager = CookieSyncManager.getInstance();
<del> syncManager.sync();
<del> } else {
<del> flush();
<del> }
<add> flush();
<ide> }
<ide> });
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/react/modules/network/OkHttpClientProvider.java
<ide> package com.facebook.react.modules.network;
<ide>
<ide> import android.content.Context;
<del>import android.os.Build;
<ide> import androidx.annotation.Nullable;
<del>import com.facebook.common.logging.FLog;
<ide> import java.io.File;
<ide> import java.security.Provider;
<ide> import java.security.Security;
<del>import java.util.ArrayList;
<del>import java.util.List;
<ide> import java.util.concurrent.TimeUnit;
<ide> import okhttp3.Cache;
<del>import okhttp3.ConnectionSpec;
<ide> import okhttp3.OkHttpClient;
<del>import okhttp3.TlsVersion;
<ide>
<ide> /**
<ide> * Helper class that provides the same OkHttpClient instance that will be used for all networking
<ide> public static OkHttpClient.Builder createClientBuilder(Context context, int cach
<ide> enables it.
<ide> */
<ide> public static OkHttpClient.Builder enableTls12OnPreLollipop(OkHttpClient.Builder client) {
<del> if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT) {
<del> try {
<del> client.sslSocketFactory(new TLSSocketFactory());
<del>
<del> ConnectionSpec cs =
<del> new ConnectionSpec.Builder(ConnectionSpec.MODERN_TLS)
<del> .tlsVersions(TlsVersion.TLS_1_2)
<del> .build();
<del>
<del> List<ConnectionSpec> specs = new ArrayList<>();
<del> specs.add(cs);
<del> specs.add(ConnectionSpec.COMPATIBLE_TLS);
<del> specs.add(ConnectionSpec.CLEARTEXT);
<del>
<del> client.connectionSpecs(specs);
<del> } catch (Exception ex) {
<del> FLog.e("OkHttpClientProvider", "Error while enabling TLS 1.2", ex);
<del> }
<del> }
<del>
<ide> return client;
<ide> }
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/react/modules/statusbar/StatusBarModule.java
<ide> public String getName() {
<ide> : 0;
<ide> String statusBarColorString = "black";
<ide>
<del> if (activity != null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
<add> if (activity != null) {
<ide> final int statusBarColor = activity.getWindow().getStatusBarColor();
<ide> statusBarColorString = String.format("#%06X", (0xFFFFFF & statusBarColor));
<ide> }
<ide> public void setColor(final double colorDouble, final boolean animated) {
<ide> return;
<ide> }
<ide>
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
<del>
<del> UiThreadUtil.runOnUiThread(
<del> new GuardedRunnable(getReactApplicationContext()) {
<del> @TargetApi(Build.VERSION_CODES.LOLLIPOP)
<del> @Override
<del> public void runGuarded() {
<del> activity
<del> .getWindow()
<del> .addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
<del> if (animated) {
<del> int curColor = activity.getWindow().getStatusBarColor();
<del> ValueAnimator colorAnimation =
<del> ValueAnimator.ofObject(new ArgbEvaluator(), curColor, color);
<del>
<del> colorAnimation.addUpdateListener(
<del> new ValueAnimator.AnimatorUpdateListener() {
<del> @Override
<del> public void onAnimationUpdate(ValueAnimator animator) {
<del> activity
<del> .getWindow()
<del> .setStatusBarColor((Integer) animator.getAnimatedValue());
<del> }
<del> });
<del> colorAnimation.setDuration(300).setStartDelay(0);
<del> colorAnimation.start();
<del> } else {
<del> activity.getWindow().setStatusBarColor(color);
<del> }
<add> UiThreadUtil.runOnUiThread(
<add> new GuardedRunnable(getReactApplicationContext()) {
<add> @TargetApi(Build.VERSION_CODES.LOLLIPOP)
<add> @Override
<add> public void runGuarded() {
<add> activity
<add> .getWindow()
<add> .addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
<add> if (animated) {
<add> int curColor = activity.getWindow().getStatusBarColor();
<add> ValueAnimator colorAnimation =
<add> ValueAnimator.ofObject(new ArgbEvaluator(), curColor, color);
<add>
<add> colorAnimation.addUpdateListener(
<add> new ValueAnimator.AnimatorUpdateListener() {
<add> @Override
<add> public void onAnimationUpdate(ValueAnimator animator) {
<add> activity.getWindow().setStatusBarColor((Integer) animator.getAnimatedValue());
<add> }
<add> });
<add> colorAnimation.setDuration(300).setStartDelay(0);
<add> colorAnimation.start();
<add> } else {
<add> activity.getWindow().setStatusBarColor(color);
<ide> }
<del> });
<del> }
<add> }
<add> });
<ide> }
<ide>
<ide> @Override
<ide> public void setTranslucent(final boolean translucent) {
<ide> return;
<ide> }
<ide>
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
<del> UiThreadUtil.runOnUiThread(
<del> new GuardedRunnable(getReactApplicationContext()) {
<del> @TargetApi(Build.VERSION_CODES.LOLLIPOP)
<del> @Override
<del> public void runGuarded() {
<del> // If the status bar is translucent hook into the window insets calculations
<del> // and consume all the top insets so no padding will be added under the status bar.
<del> View decorView = activity.getWindow().getDecorView();
<del> if (translucent) {
<del> decorView.setOnApplyWindowInsetsListener(
<del> new View.OnApplyWindowInsetsListener() {
<del> @Override
<del> public WindowInsets onApplyWindowInsets(View v, WindowInsets insets) {
<del> WindowInsets defaultInsets = v.onApplyWindowInsets(insets);
<del> return defaultInsets.replaceSystemWindowInsets(
<del> defaultInsets.getSystemWindowInsetLeft(),
<del> 0,
<del> defaultInsets.getSystemWindowInsetRight(),
<del> defaultInsets.getSystemWindowInsetBottom());
<del> }
<del> });
<del> } else {
<del> decorView.setOnApplyWindowInsetsListener(null);
<del> }
<del>
<del> ViewCompat.requestApplyInsets(decorView);
<add> UiThreadUtil.runOnUiThread(
<add> new GuardedRunnable(getReactApplicationContext()) {
<add> @TargetApi(Build.VERSION_CODES.LOLLIPOP)
<add> @Override
<add> public void runGuarded() {
<add> // If the status bar is translucent hook into the window insets calculations
<add> // and consume all the top insets so no padding will be added under the status bar.
<add> View decorView = activity.getWindow().getDecorView();
<add> if (translucent) {
<add> decorView.setOnApplyWindowInsetsListener(
<add> new View.OnApplyWindowInsetsListener() {
<add> @Override
<add> public WindowInsets onApplyWindowInsets(View v, WindowInsets insets) {
<add> WindowInsets defaultInsets = v.onApplyWindowInsets(insets);
<add> return defaultInsets.replaceSystemWindowInsets(
<add> defaultInsets.getSystemWindowInsetLeft(),
<add> 0,
<add> defaultInsets.getSystemWindowInsetRight(),
<add> defaultInsets.getSystemWindowInsetBottom());
<add> }
<add> });
<add> } else {
<add> decorView.setOnApplyWindowInsetsListener(null);
<ide> }
<del> });
<del> }
<add>
<add> ViewCompat.requestApplyInsets(decorView);
<add> }
<add> });
<ide> }
<ide>
<ide> @Override
<ide><path>ReactAndroid/src/main/java/com/facebook/react/uimanager/BaseViewManager.java
<ide> public void setViewState(@NonNull T view, @Nullable ReadableMap accessibilitySta
<ide> && accessibilityState.getType(STATE_CHECKED) == ReadableType.String)) {
<ide> updateViewContentDescription(view);
<ide> break;
<del> } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP
<del> && view.isAccessibilityFocused()) {
<add> } else if (view.isAccessibilityFocused()) {
<ide> // Internally Talkback ONLY uses TYPE_VIEW_CLICKED for "checked" and
<ide> // "selected" announcements. Send a click event to make sure Talkback
<ide> // get notified for the state changes that don't happen upon users' click.
<ide><path>ReactAndroid/src/main/java/com/facebook/react/uimanager/DisplayMetricsHolder.java
<ide> package com.facebook.react.uimanager;
<ide>
<ide> import android.content.Context;
<del>import android.os.Build;
<ide> import android.util.DisplayMetrics;
<ide> import android.view.Display;
<ide> import android.view.WindowManager;
<ide> import androidx.annotation.Nullable;
<del>import com.facebook.common.logging.FLog;
<ide> import com.facebook.infer.annotation.Assertions;
<ide> import com.facebook.react.bridge.WritableNativeMap;
<del>import com.facebook.react.common.ReactConstants;
<del>import java.lang.reflect.InvocationTargetException;
<del>import java.lang.reflect.Method;
<ide> import java.util.HashMap;
<ide> import java.util.Map;
<ide>
<ide> public static void initDisplayMetrics(Context context) {
<ide> //
<ide> // See:
<ide> // http://developer.android.com/reference/android/view/Display.html#getRealMetrics(android.util.DisplayMetrics)
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
<del> display.getRealMetrics(screenDisplayMetrics);
<del> } else {
<del> // For 14 <= API level <= 16, we need to invoke getRawHeight and getRawWidth to get the real
<del> // dimensions.
<del> // Since react-native only supports API level 16+ we don't have to worry about other cases.
<del> //
<del> // Reflection exceptions are rethrown at runtime.
<del> //
<del> // See:
<del> // http://stackoverflow.com/questions/14341041/how-to-get-real-screen-height-and-width/23861333#23861333
<del> try {
<del> Method mGetRawH = Display.class.getMethod("getRawHeight");
<del> Method mGetRawW = Display.class.getMethod("getRawWidth");
<del> screenDisplayMetrics.widthPixels = (Integer) mGetRawW.invoke(display);
<del> screenDisplayMetrics.heightPixels = (Integer) mGetRawH.invoke(display);
<del> } catch (InvocationTargetException | IllegalAccessException | NoSuchMethodException e) {
<del> // this may not be 100% accurate, but it's all we've got
<del> screenDisplayMetrics.widthPixels = display.getWidth();
<del> screenDisplayMetrics.heightPixels = display.getHeight();
<del> FLog.e(
<del> ReactConstants.TAG,
<del> "Unable to access getRawHeight and getRawWidth to get real dimensions.",
<del> e);
<del> }
<del> }
<add> display.getRealMetrics(screenDisplayMetrics);
<ide> DisplayMetricsHolder.setScreenDisplayMetrics(screenDisplayMetrics);
<ide> }
<ide>
<ide><path>ReactAndroid/src/main/java/com/facebook/react/views/text/CustomStyleSpan.java
<ide> import android.content.res.AssetManager;
<ide> import android.graphics.Paint;
<ide> import android.graphics.Typeface;
<del>import android.os.Build;
<ide> import android.text.TextPaint;
<ide> import android.text.style.MetricAffectingSpan;
<ide> import androidx.annotation.NonNull;
<ide> private static void apply(
<ide> AssetManager assetManager) {
<ide> Typeface typeface =
<ide> ReactTypefaceUtils.applyStyles(paint.getTypeface(), style, weight, family, assetManager);
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
<del> paint.setFontFeatureSettings(fontFeatureSettings);
<del> }
<add> paint.setFontFeatureSettings(fontFeatureSettings);
<ide> paint.setTypeface(typeface);
<ide> paint.setSubpixelText(true);
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/react/views/view/ReactViewBackgroundDrawable.java
<ide> import android.graphics.RectF;
<ide> import android.graphics.Region;
<ide> import android.graphics.drawable.Drawable;
<del>import android.os.Build;
<ide> import android.view.View;
<ide> import androidx.annotation.Nullable;
<ide> import com.facebook.react.common.annotations.VisibleForTesting;
<ide> public int getOpacity() {
<ide> /* Android's elevation implementation requires this to be implemented to know where to draw the shadow. */
<ide> @Override
<ide> public void getOutline(Outline outline) {
<del> if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) {
<del> super.getOutline(outline);
<del> return;
<del> }
<ide> if ((!YogaConstants.isUndefined(mBorderRadius) && mBorderRadius > 0)
<ide> || mBorderCornerRadii != null) {
<ide> updatePath();
<ide> private void updatePath() {
<ide> float bottomRightRadius =
<ide> getBorderRadiusOrDefaultTo(borderRadius, BorderRadiusLocation.BOTTOM_RIGHT);
<ide>
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
<del> final boolean isRTL = getResolvedLayoutDirection() == View.LAYOUT_DIRECTION_RTL;
<del> float topStartRadius = getBorderRadius(BorderRadiusLocation.TOP_START);
<del> float topEndRadius = getBorderRadius(BorderRadiusLocation.TOP_END);
<del> float bottomStartRadius = getBorderRadius(BorderRadiusLocation.BOTTOM_START);
<del> float bottomEndRadius = getBorderRadius(BorderRadiusLocation.BOTTOM_END);
<add> final boolean isRTL = getResolvedLayoutDirection() == View.LAYOUT_DIRECTION_RTL;
<add> float topStartRadius = getBorderRadius(BorderRadiusLocation.TOP_START);
<add> float topEndRadius = getBorderRadius(BorderRadiusLocation.TOP_END);
<add> float bottomStartRadius = getBorderRadius(BorderRadiusLocation.BOTTOM_START);
<add> float bottomEndRadius = getBorderRadius(BorderRadiusLocation.BOTTOM_END);
<ide>
<del> if (I18nUtil.getInstance().doLeftAndRightSwapInRTL(mContext)) {
<del> if (YogaConstants.isUndefined(topStartRadius)) {
<del> topStartRadius = topLeftRadius;
<del> }
<add> if (I18nUtil.getInstance().doLeftAndRightSwapInRTL(mContext)) {
<add> if (YogaConstants.isUndefined(topStartRadius)) {
<add> topStartRadius = topLeftRadius;
<add> }
<ide>
<del> if (YogaConstants.isUndefined(topEndRadius)) {
<del> topEndRadius = topRightRadius;
<del> }
<add> if (YogaConstants.isUndefined(topEndRadius)) {
<add> topEndRadius = topRightRadius;
<add> }
<ide>
<del> if (YogaConstants.isUndefined(bottomStartRadius)) {
<del> bottomStartRadius = bottomLeftRadius;
<del> }
<add> if (YogaConstants.isUndefined(bottomStartRadius)) {
<add> bottomStartRadius = bottomLeftRadius;
<add> }
<ide>
<del> if (YogaConstants.isUndefined(bottomEndRadius)) {
<del> bottomEndRadius = bottomRightRadius;
<del> }
<add> if (YogaConstants.isUndefined(bottomEndRadius)) {
<add> bottomEndRadius = bottomRightRadius;
<add> }
<ide>
<del> final float directionAwareTopLeftRadius = isRTL ? topEndRadius : topStartRadius;
<del> final float directionAwareTopRightRadius = isRTL ? topStartRadius : topEndRadius;
<del> final float directionAwareBottomLeftRadius = isRTL ? bottomEndRadius : bottomStartRadius;
<del> final float directionAwareBottomRightRadius = isRTL ? bottomStartRadius : bottomEndRadius;
<add> final float directionAwareTopLeftRadius = isRTL ? topEndRadius : topStartRadius;
<add> final float directionAwareTopRightRadius = isRTL ? topStartRadius : topEndRadius;
<add> final float directionAwareBottomLeftRadius = isRTL ? bottomEndRadius : bottomStartRadius;
<add> final float directionAwareBottomRightRadius = isRTL ? bottomStartRadius : bottomEndRadius;
<ide>
<del> topLeftRadius = directionAwareTopLeftRadius;
<del> topRightRadius = directionAwareTopRightRadius;
<del> bottomLeftRadius = directionAwareBottomLeftRadius;
<del> bottomRightRadius = directionAwareBottomRightRadius;
<del> } else {
<del> final float directionAwareTopLeftRadius = isRTL ? topEndRadius : topStartRadius;
<del> final float directionAwareTopRightRadius = isRTL ? topStartRadius : topEndRadius;
<del> final float directionAwareBottomLeftRadius = isRTL ? bottomEndRadius : bottomStartRadius;
<del> final float directionAwareBottomRightRadius = isRTL ? bottomStartRadius : bottomEndRadius;
<add> topLeftRadius = directionAwareTopLeftRadius;
<add> topRightRadius = directionAwareTopRightRadius;
<add> bottomLeftRadius = directionAwareBottomLeftRadius;
<add> bottomRightRadius = directionAwareBottomRightRadius;
<add> } else {
<add> final float directionAwareTopLeftRadius = isRTL ? topEndRadius : topStartRadius;
<add> final float directionAwareTopRightRadius = isRTL ? topStartRadius : topEndRadius;
<add> final float directionAwareBottomLeftRadius = isRTL ? bottomEndRadius : bottomStartRadius;
<add> final float directionAwareBottomRightRadius = isRTL ? bottomStartRadius : bottomEndRadius;
<ide>
<del> if (!YogaConstants.isUndefined(directionAwareTopLeftRadius)) {
<del> topLeftRadius = directionAwareTopLeftRadius;
<del> }
<add> if (!YogaConstants.isUndefined(directionAwareTopLeftRadius)) {
<add> topLeftRadius = directionAwareTopLeftRadius;
<add> }
<ide>
<del> if (!YogaConstants.isUndefined(directionAwareTopRightRadius)) {
<del> topRightRadius = directionAwareTopRightRadius;
<del> }
<add> if (!YogaConstants.isUndefined(directionAwareTopRightRadius)) {
<add> topRightRadius = directionAwareTopRightRadius;
<add> }
<ide>
<del> if (!YogaConstants.isUndefined(directionAwareBottomLeftRadius)) {
<del> bottomLeftRadius = directionAwareBottomLeftRadius;
<del> }
<add> if (!YogaConstants.isUndefined(directionAwareBottomLeftRadius)) {
<add> bottomLeftRadius = directionAwareBottomLeftRadius;
<add> }
<ide>
<del> if (!YogaConstants.isUndefined(directionAwareBottomRightRadius)) {
<del> bottomRightRadius = directionAwareBottomRightRadius;
<del> }
<add> if (!YogaConstants.isUndefined(directionAwareBottomRightRadius)) {
<add> bottomRightRadius = directionAwareBottomRightRadius;
<ide> }
<ide> }
<ide>
<ide> private void drawRectangularBackgroundWithBorders(Canvas canvas) {
<ide> int colorRight = getBorderColor(Spacing.RIGHT);
<ide> int colorBottom = getBorderColor(Spacing.BOTTOM);
<ide>
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
<del> final boolean isRTL = getResolvedLayoutDirection() == View.LAYOUT_DIRECTION_RTL;
<del> int colorStart = getBorderColor(Spacing.START);
<del> int colorEnd = getBorderColor(Spacing.END);
<add> final boolean isRTL = getResolvedLayoutDirection() == View.LAYOUT_DIRECTION_RTL;
<add> int colorStart = getBorderColor(Spacing.START);
<add> int colorEnd = getBorderColor(Spacing.END);
<ide>
<del> if (I18nUtil.getInstance().doLeftAndRightSwapInRTL(mContext)) {
<del> if (!isBorderColorDefined(Spacing.START)) {
<del> colorStart = colorLeft;
<del> }
<add> if (I18nUtil.getInstance().doLeftAndRightSwapInRTL(mContext)) {
<add> if (!isBorderColorDefined(Spacing.START)) {
<add> colorStart = colorLeft;
<add> }
<ide>
<del> if (!isBorderColorDefined(Spacing.END)) {
<del> colorEnd = colorRight;
<del> }
<add> if (!isBorderColorDefined(Spacing.END)) {
<add> colorEnd = colorRight;
<add> }
<ide>
<del> final int directionAwareColorLeft = isRTL ? colorEnd : colorStart;
<del> final int directionAwareColorRight = isRTL ? colorStart : colorEnd;
<add> final int directionAwareColorLeft = isRTL ? colorEnd : colorStart;
<add> final int directionAwareColorRight = isRTL ? colorStart : colorEnd;
<ide>
<del> colorLeft = directionAwareColorLeft;
<del> colorRight = directionAwareColorRight;
<del> } else {
<del> final int directionAwareColorLeft = isRTL ? colorEnd : colorStart;
<del> final int directionAwareColorRight = isRTL ? colorStart : colorEnd;
<add> colorLeft = directionAwareColorLeft;
<add> colorRight = directionAwareColorRight;
<add> } else {
<add> final int directionAwareColorLeft = isRTL ? colorEnd : colorStart;
<add> final int directionAwareColorRight = isRTL ? colorStart : colorEnd;
<ide>
<del> final boolean isColorStartDefined = isBorderColorDefined(Spacing.START);
<del> final boolean isColorEndDefined = isBorderColorDefined(Spacing.END);
<del> final boolean isDirectionAwareColorLeftDefined =
<del> isRTL ? isColorEndDefined : isColorStartDefined;
<del> final boolean isDirectionAwareColorRightDefined =
<del> isRTL ? isColorStartDefined : isColorEndDefined;
<add> final boolean isColorStartDefined = isBorderColorDefined(Spacing.START);
<add> final boolean isColorEndDefined = isBorderColorDefined(Spacing.END);
<add> final boolean isDirectionAwareColorLeftDefined =
<add> isRTL ? isColorEndDefined : isColorStartDefined;
<add> final boolean isDirectionAwareColorRightDefined =
<add> isRTL ? isColorStartDefined : isColorEndDefined;
<ide>
<del> if (isDirectionAwareColorLeftDefined) {
<del> colorLeft = directionAwareColorLeft;
<del> }
<add> if (isDirectionAwareColorLeftDefined) {
<add> colorLeft = directionAwareColorLeft;
<add> }
<ide>
<del> if (isDirectionAwareColorRightDefined) {
<del> colorRight = directionAwareColorRight;
<del> }
<add> if (isDirectionAwareColorRightDefined) {
<add> colorRight = directionAwareColorRight;
<ide> }
<ide> }
<ide>
<ide><path>ReactAndroid/src/main/java/com/facebook/react/views/view/ReactViewManager.java
<ide> private void handleHotspotUpdate(ReactViewGroup root, @Nullable ReadableArray ar
<ide> throw new JSApplicationIllegalArgumentException(
<ide> "Illegal number of arguments for 'updateHotspot' command");
<ide> }
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
<del> float x = PixelUtil.toPixelFromDIP(args.getDouble(0));
<del> float y = PixelUtil.toPixelFromDIP(args.getDouble(1));
<del> root.drawableHotspotChanged(x, y);
<del> }
<add>
<add> float x = PixelUtil.toPixelFromDIP(args.getDouble(0));
<add> float y = PixelUtil.toPixelFromDIP(args.getDouble(1));
<add> root.drawableHotspotChanged(x, y);
<ide> }
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/systrace/Systrace.java
<ide>
<ide> package com.facebook.systrace;
<ide>
<del>import android.os.Build;
<ide> import android.os.Trace;
<ide>
<ide> /**
<ide> public static boolean isTracing(long tag) {
<ide> public static void traceInstant(long tag, final String title, EventScope scope) {}
<ide>
<ide> public static void beginSection(long tag, final String sectionName) {
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
<del> Trace.beginSection(sectionName);
<del> }
<add> Trace.beginSection(sectionName);
<ide> }
<ide>
<ide> public static void endSection(long tag) {
<del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) {
<del> Trace.endSection();
<del> }
<add> Trace.endSection();
<ide> }
<ide>
<ide> public static void beginAsyncSection(long tag, final String sectionName, final int cookie) {}
| 12
|
Ruby
|
Ruby
|
remove pyc files before bottling
|
0e52cc6188b13b80ac6059361b628ff7e998f987
|
<ide><path>Library/Homebrew/cmd/bottle.rb
<ide> def bottle_formula f
<ide> begin
<ide> keg.relocate_install_names prefix, Keg::PREFIX_PLACEHOLDER,
<ide> cellar, Keg::CELLAR_PLACEHOLDER, :keg_only => f.keg_only?
<add> keg.delete_pyc_files!
<ide>
<ide> HOMEBREW_CELLAR.cd do
<ide> # Use gzip, faster to compress than bzip2, faster to uncompress than bzip2
| 1
|
Javascript
|
Javascript
|
use descriptive name for destination file
|
2a8576dfa5138254d78d47de0f03355984507f67
|
<ide><path>test/parallel/test-http2-pipe-named-pipe.js
<ide> const path = require('path');
<ide> const tmpdir = require('../common/tmpdir');
<ide> tmpdir.refresh();
<ide> const loc = fixtures.path('person-large.jpg');
<del>const fn = path.join(tmpdir.path, 'http2-url-tests.js');
<add>const fn = path.join(tmpdir.path, 'person-large.jpg');
<ide>
<ide> const server = http2.createServer();
<ide>
| 1
|
Python
|
Python
|
add test for ticket #302
|
d79e367279555247cfd3153a7cafac0c1733673d
|
<ide><path>numpy/core/tests/test_regression.py
<ide> def check_mem_lexsort_strings(self, level=rlevel):
<ide> lst = ['abc','cde','fgh']
<ide> N.lexsort((lst,))
<ide>
<add> def check_fancy_index(self, level=rlevel):
<add> """Ticket #302"""
<add> x = N.array([1,2])[N.array([0])]
<add> assert_equal(x.shape,(1,))
<add>
<ide> def check_recarray_copy(self, level=rlevel):
<ide> """Ticket #312"""
<ide> dt = [('x',N.int16),('y',N.float64)]
| 1
|
Python
|
Python
|
add some tests for pep 3118 buffer interface
|
efc06019c3fda0b06f16e4a3affe264c712fc92b
|
<ide><path>numpy/core/tests/test_multiarray.py
<ide> def test_complex_warning(self):
<ide> assert_raises(np.ComplexWarning, x.__setitem__, slice(None), y)
<ide> warnings.simplefilter("default", np.ComplexWarning)
<ide>
<del>if sys.version_info >= (2, 6):
<add>if sys.version_info >= (2, 7):
<ide> class TestNewBufferProtocol(object):
<del> @dec.knownfailureif(True, "No tests for the new buffer interface yet.")
<del> def test_there_are_no_tests_yet_so_fail(self):
<del> raise AssertionError("Need tests for the new buffer interface! "
<del> "For arrays and scalars.")
<add> def _check_roundtrip(self, obj):
<add> obj = np.asarray(obj)
<add> x = memoryview(obj)
<add> y = np.asarray(x)
<add> assert y.dtype == obj.dtype, (obj, y)
<add> assert_array_equal(obj, y)
<add>
<add> def test_roundtrip(self):
<add> x = np.array([1,2,3,4,5], dtype='i4')
<add> self._check_roundtrip(x)
<add>
<add> x = np.array([[1,2],[3,4]], dtype=np.float64)
<add> self._check_roundtrip(x)
<add>
<add> x = np.zeros((3,3,3), dtype=np.float32)[:,0,:]
<add> self._check_roundtrip(x)
<add>
<add> dt = [('a', np.int8),
<add> ('b', np.int16),
<add> ('c', np.int32),
<add> ('d', np.int64),
<add> ('e', np.uint8),
<add> ('f', np.uint16),
<add> ('g', np.uint32),
<add> ('h', np.uint64),
<add> ('i', np.float),
<add> ('j', np.double),
<add> ('k', np.longdouble),
<add> ('l', 'S4'),
<add> ('m', 'U4')]
<add> x = np.array([(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
<add> asbytes('aaaa'), 'bbbb')],
<add> dtype=dt)
<add> self._check_roundtrip(x)
<add>
<add> x = np.array(([[1,2],[3,4]],), dtype=[('a', (int, (2,2)))])
<add> self._check_roundtrip(x)
<add>
<add> x = np.array([1,2,3], dtype='>i4')
<add> self._check_roundtrip(x)
<add>
<add> x = np.array([1,2,3], dtype='<i4')
<add> self._check_roundtrip(x)
<add>
<add> def test_export_simple_1d(self):
<add> x = np.array([1,2,3,4,5], dtype='i4')
<add> y = memoryview(x)
<add> assert y.format == '=l'
<add> assert y.shape == (5,)
<add> assert y.ndim == 1
<add> assert y.strides == (4,)
<add> assert y.suboffsets is None
<add> assert y.itemsize == 4
<add>
<add> def test_export_simple_nd(self):
<add> x = np.array([[1,2],[3,4]], dtype=np.float64)
<add> y = memoryview(x)
<add> assert y.format == '=d'
<add> assert y.shape == (2, 2)
<add> assert y.ndim == 2
<add> assert y.strides == (16, 8)
<add> assert y.suboffsets is None
<add> assert y.itemsize == 8
<add>
<add> def test_export_discontiguous(self):
<add> x = np.zeros((3,3,3), dtype=np.float32)[:,0,:]
<add> y = memoryview(x)
<add> assert y.format == '=f'
<add> assert y.shape == (3, 3)
<add> assert y.ndim == 2
<add> assert y.strides == (36, 4)
<add> assert y.suboffsets is None
<add> assert y.itemsize == 4
<add>
<add> def test_export_record(self):
<add> dt = [('a', np.int8),
<add> ('b', np.int16),
<add> ('c', np.int32),
<add> ('d', np.int64),
<add> ('e', np.uint8),
<add> ('f', np.uint16),
<add> ('g', np.uint32),
<add> ('h', np.uint64),
<add> ('i', np.float),
<add> ('j', np.double),
<add> ('k', np.longdouble),
<add> ('l', 'S4'),
<add> ('m', 'U4')]
<add> x = np.array([(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
<add> asbytes('aaaa'), 'bbbb')],
<add> dtype=dt)
<add> y = memoryview(x)
<add> assert y.format == 'T{b:a:=h:b:=l:c:=q:d:B:e:=H:f:=L:g:=Q:h:=d:i:=d:j:=g:k:4s:l:=4w:m:}'
<add> assert y.shape == (1,)
<add> assert y.ndim == 1
<add> assert y.strides == (78,)
<add> assert y.suboffsets is None
<add> assert y.itemsize == 78
<add>
<add> def test_export_subarray(self):
<add> x = np.array(([[1,2],[3,4]],), dtype=[('a', (int, (2,2)))])
<add> y = memoryview(x)
<add> assert y.format == 'T{(2,2)=l:a:}'
<add> assert y.shape == ()
<add> assert y.ndim == 0
<add> assert y.strides == ()
<add> assert y.suboffsets is None
<add> assert y.itemsize == 16
<add>
<add> def test_export_endian(self):
<add> x = np.array([1,2,3], dtype='>i4')
<add> y = memoryview(x)
<add> if sys.byteorder == 'little':
<add> assert y.format in '>l'
<add> else:
<add> assert y.format == '=l'
<add>
<add> x = np.array([1,2,3], dtype='<i4')
<add> y = memoryview(x)
<add> if sys.byteorder == 'little':
<add> assert y.format in '=l'
<add> else:
<add> assert y.format == '<l'
<ide>
<ide> if __name__ == "__main__":
<ide> run_module_suite()
| 1
|
Javascript
|
Javascript
|
use fallback swc version when binary publish fails
|
d43e05c61c025d66be51b2bb929bd07347bbec7c
|
<ide><path>scripts/publish-native.js
<ide> const cwd = process.cwd()
<ide> let platforms = (await readdir(nativePackagesDir)).filter(
<ide> (name) => !name.startsWith('.')
<ide> )
<add> const publishedPkgs = new Set()
<add> // TODO: update to latest version where all pacakges were
<add> // successfully published
<add> const fallbackVersion = `12.0.1`
<add>
<ide> for (let platform of platforms) {
<ide> try {
<ide> let binaryName = `next-swc.${platform}.node`
<ide> const cwd = process.cwd()
<ide> gitref.includes('canary') ? ' --tag canary' : ''
<ide> }`
<ide> )
<add> publishedPkgs.add(platform)
<ide> } catch (err) {
<ide> // don't block publishing other versions on single platform error
<ide> console.error(`Failed to publish`, platform, err)
<ide> const cwd = process.cwd()
<ide> )
<ide> for (let platform of platforms) {
<ide> let optionalDependencies = nextPkg.optionalDependencies || {}
<del> optionalDependencies['@next/swc-' + platform] = version
<add> optionalDependencies['@next/swc-' + platform] = publishedPkgs.has(
<add> platform
<add> )
<add> ? version
<add> : fallbackVersion
<ide> nextPkg.optionalDependencies = optionalDependencies
<ide> }
<ide> await writeFile(
| 1
|
Javascript
|
Javascript
|
add failing tests for -addon blueprints with --pod
|
5c6eef38fd59b58e1bf39ef3b8e1fc4e4064ab8f
|
<ide><path>node-tests/blueprints/helper-addon-test.js
<ide> describe('Blueprint: helper-addon', function() {
<ide> .to.equal(fixture('helper-addon.js'));
<ide> });
<ide> });
<add>
<add> it('helper-addon foo/bar-baz --pod', function() {
<add> return emberGenerateDestroy(['helper-addon', 'foo/bar-baz', '--pod'], _file => {
<add> expect(_file('app/helpers/foo/bar-baz.js'))
<add> .to.equal(fixture('helper-addon.js'));
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/initializer-addon-test.js
<ide> describe('Blueprint: initializer-addon', function() {
<ide> .to.contain("export { default, initialize } from 'my-addon/initializers/foo';");
<ide> });
<ide> });
<add>
<add> it('initializer-addon foo --pod', function() {
<add> return emberGenerateDestroy(['initializer-addon', 'foo', '--pod'], _file => {
<add> expect(_file('app/initializers/foo.js'))
<add> .to.contain("export { default, initialize } from 'my-addon/initializers/foo';");
<add> });
<add> });
<ide> });
<ide> });
<ide><path>node-tests/blueprints/instance-initializer-addon-test.js
<ide> describe('Blueprint: instance-initializer-addon', function() {
<ide> .to.contain("export { default, initialize } from 'my-addon/instance-initializers/foo';");
<ide> });
<ide> });
<add>
<add> it('instance-initializer-addon foo --pod', function() {
<add> return emberGenerateDestroy(['instance-initializer-addon', 'foo', '--pod'], _file => {
<add> expect(_file('app/instance-initializers/foo.js'))
<add> .to.contain("export { default, initialize } from 'my-addon/instance-initializers/foo';");
<add> });
<add> });
<ide> });
<ide> });
| 3
|
Java
|
Java
|
update javadoc on antpathmatcher
|
b542b5277500cd6261a6126bbd73fec2c9250e5b
|
<ide><path>spring-core/src/main/java/org/springframework/util/AntPathMatcher.java
<ide> * {@code org/springframework/testing/servlet/bla.jsp} and {@code org/servlet/bla.jsp}</li>
<ide> * </ul>
<ide> *
<add> * <p><strong>Note:</strong> a pattern and a path must both be absolute or must
<add> * both be relative in order for the two to match. Therefore it is recommended
<add> * that users of this implementation to sanitize patterns in order to prefix
<add> * them with "/" as it makes sense in the context in which they're used.
<add> *
<ide> * @author Alef Arendsen
<ide> * @author Juergen Hoeller
<ide> * @author Rob Harrop
| 1
|
Python
|
Python
|
fix rstrip for character arrays (ticket #222)
|
33d3fde298fb6ba2da8087f0354f8b6ec9b0d056
|
<ide><path>numpy/core/defchararray.py
<ide> def _generalmethod(self, name, myiter):
<ide> for k, val in enumerate(myiter):
<ide> newval = []
<ide> for chk in val[1:]:
<del> if chk.dtype is object_ and chk.item() is None:
<add> if not chk or (chk.dtype is object_ and chk.item() is None):
<ide> break
<ide> newval.append(chk)
<ide> newitem = getattr(val[0],name)(*newval)
<ide> maxsize = max(len(newitem), maxsize)
<ide> res[k] = newitem
<ide> newarr = chararray(myiter.shape, maxsize, self.dtype is unicode_)
<del> print res, maxsize
<ide> newarr[:] = res
<ide> return newarr
<ide>
<ide><path>numpy/core/tests/test_regression.py
<ide> def index_tmp(): tmp[N.array(10)]
<ide> def check_unique_zero_sized(self,level=rlevel):
<ide> """Ticket #205"""
<ide> assert_array_equal([], N.unique(N.array([])))
<add>
<add> def check_chararray_rstrip(self,level=rlevel):
<add> """Ticket #222"""
<add> x = N.chararray((1,),5)
<add> x[0] = 'a '
<add> x = x.rstrip()
<add> assert_equal(x[0], 'a')
<ide>
<ide> if __name__ == "__main__":
<ide> NumpyTest().run()
| 2
|
Go
|
Go
|
add test to reproduce issue #306
|
7ad2e022fb737286b5803066bde6919e2d544c4e
|
<ide><path>graph_test.go
<ide> func TestMount(t *testing.T) {
<ide> }()
<ide> }
<ide>
<add>// Test that an image can be deleted by its shorthand prefix
<add>func TestDeletePrefix(t *testing.T) {
<add> graph := tempGraph(t)
<add> defer os.RemoveAll(graph.Root)
<add> img := createTestImage(graph, t)
<add> if err := graph.Delete(TruncateId(img.Id)); err != nil {
<add> t.Fatal(err)
<add> }
<add> assertNImages(graph, t, 0)
<add>}
<add>
<add>func createTestImage(graph *Graph, t *testing.T) *Image {
<add> archive, err := fakeTar()
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> img, err := graph.Create(archive, nil, "Test image")
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> return img
<add>}
<add>
<ide> func TestDelete(t *testing.T) {
<ide> graph := tempGraph(t)
<ide> defer os.RemoveAll(graph.Root)
| 1
|
Javascript
|
Javascript
|
simplify deformer parsing
|
3e9f292bcd82a24b90bfa0cc5bda175758b88570
|
<ide><path>examples/js/loaders/FBXLoader.js
<ide> var images = parseImages( FBXTree );
<ide> var textures = parseTextures( FBXTree, new THREE.TextureLoader( this.manager ).setPath( resourceDirectory ), images, connections );
<ide> var materials = parseMaterials( FBXTree, textures, connections );
<del> var deformers = parseDeformers( FBXTree, connections );
<del> var geometryMap = parseGeometries( FBXTree, connections, deformers );
<del> var sceneGraph = parseScene( FBXTree, connections, deformers, geometryMap, materials );
<add> var skeletons = parseDeformers( FBXTree, connections );
<add> var geometryMap = parseGeometries( FBXTree, connections, skeletons );
<add> var sceneGraph = parseScene( FBXTree, connections, skeletons, geometryMap, materials );
<ide>
<ide> return sceneGraph;
<ide>
<ide>
<ide> var texture = loadTexture( textureNode, loader, imageMap, connections );
<ide>
<del> texture.FBX_ID = textureNode.id;
<add> texture.ID = textureNode.id;
<ide>
<ide> texture.name = textureNode.attrName;
<ide>
<ide> // FBX format currently only supports Lambert and Phong shading models
<ide> function parseMaterial( FBXTree, materialNode, textureMap, connections ) {
<ide>
<del> var FBX_ID = materialNode.id;
<add> var ID = materialNode.id;
<ide> var name = materialNode.attrName;
<ide> var type = materialNode.properties.ShadingModel;
<ide>
<ide> }
<ide>
<ide> // Ignore unused materials which don't have any connections.
<del> if ( ! connections.has( FBX_ID ) ) return null;
<add> if ( ! connections.has( ID ) ) return null;
<ide>
<del> var parameters = parseParameters( FBXTree, materialNode.properties, textureMap, FBX_ID, connections );
<add> var parameters = parseParameters( FBXTree, materialNode.properties, textureMap, ID, connections );
<ide>
<ide> var material;
<ide>
<ide>
<ide> // Parse FBX material and return parameters suitable for a three.js material
<ide> // Also parse the texture map and return any textures associated with the material
<del> function parseParameters( FBXTree, properties, textureMap, FBX_ID, connections ) {
<add> function parseParameters( FBXTree, properties, textureMap, ID, connections ) {
<ide>
<ide>
<ide> var parameters = {};
<ide>
<ide> }
<ide>
<del> connections.get( FBX_ID ).children.forEach( function ( child ) {
<add> connections.get( ID ).children.forEach( function ( child ) {
<ide>
<ide> var type = child.relationship;
<ide>
<ide> // Generates map of Skeleton-like objects for use later when generating and binding skeletons.
<ide> function parseDeformers( FBXTree, connections ) {
<ide>
<del> var deformers = {};
<add> var skeletons = {};
<ide>
<ide> if ( 'Deformer' in FBXTree.Objects.subNodes ) {
<ide>
<ide> if ( deformerNode.attrType === 'Skin' ) {
<ide>
<ide> var relationships = connections.get( parseInt( nodeID ) );
<add>
<ide> var skeleton = parseSkeleton( relationships, DeformerNodes );
<del> skeleton.FBX_ID = parseInt( nodeID );
<add> skeleton.ID = nodeID;
<add>
<add> if ( relationships.parents.length > 1 ) console.warn( 'THREE.FBXLoader: skeleton attached to more than one geometry is not supported.' );
<add> skeleton.geometryID = relationships.parents[ 0 ].ID;
<ide>
<del> deformers[ nodeID ] = skeleton;
<add> skeletons[ nodeID ] = skeleton;
<ide>
<ide> }
<ide>
<ide> }
<ide>
<ide> }
<ide>
<del> return deformers;
<add> return skeletons;
<ide>
<ide> }
<ide>
<ide> // Parse single nodes in FBXTree.Objects.subNodes.Deformer
<del> // Generates a "Skeleton Representation" of FBX nodes based on an FBX Skin Deformer's connections
<del> // and an object containing SubDeformer nodes.
<del> function parseSkeleton( connections, DeformerNodes ) {
<add> // The top level deformer nodes have type 'Skin' and subDeformer nodes have type 'Cluster'
<add> // Each skin node represents a skeleton and each cluster node represents a bone
<add> function parseSkeleton( connections, deformerNodes ) {
<add>
<add> var rawBones = [];
<ide>
<del> var subDeformers = {};
<add> connections.children.forEach( function ( child ) {
<ide>
<del> connections.children.forEach( function ( child, i ) {
<add> var subDeformerNode = deformerNodes[ child.ID ];
<ide>
<del> var subDeformerNode = DeformerNodes[ child.ID ];
<add> if ( subDeformerNode.attrType !== 'Cluster' ) return;
<ide>
<del> var subDeformer = {
<add> var rawBone = {
<ide>
<del> FBX_ID: child.ID,
<del> index: i,
<add> ID: child.ID,
<ide> indices: [],
<ide> weights: [],
<ide> transform: new THREE.Matrix4().fromArray( subDeformerNode.subNodes.Transform.properties.a ),
<del> transformLink: new THREE.Matrix4().fromArray( subDeformerNode.subNodes.TransformLink.properties.a ),
<del> linkMode: subDeformerNode.properties.Mode,
<add>
<add> //currently not used
<add> // transformLink: new THREE.Matrix4().fromArray( subDeformerNode.subNodes.TransformLink.properties.a ),
<ide>
<ide> };
<ide>
<ide> if ( 'Indexes' in subDeformerNode.subNodes ) {
<ide>
<del> subDeformer.indices = subDeformerNode.subNodes.Indexes.properties.a;
<del> subDeformer.weights = subDeformerNode.subNodes.Weights.properties.a;
<add> rawBone.indices = subDeformerNode.subNodes.Indexes.properties.a;
<add> rawBone.weights = subDeformerNode.subNodes.Weights.properties.a;
<ide>
<ide> }
<ide>
<del> subDeformers[ child.ID ] = subDeformer;
<add> rawBones.push( rawBone );
<ide>
<ide> } );
<ide>
<ide> return {
<ide>
<del> map: subDeformers,
<add> rawBones: rawBones,
<ide> bones: []
<ide>
<ide> };
<ide>
<ide> }
<ide>
<ide> // Parse nodes in FBXTree.Objects.subNodes.Geometry
<del> function parseGeometries( FBXTree, connections, deformers ) {
<add> function parseGeometries( FBXTree, connections, skeletons ) {
<ide>
<ide> var geometryMap = new Map();
<ide>
<ide> for ( var nodeID in geometryNodes ) {
<ide>
<ide> var relationships = connections.get( parseInt( nodeID ) );
<del> var geo = parseGeometry( FBXTree, relationships, geometryNodes[ nodeID ], deformers );
<add> var geo = parseGeometry( FBXTree, relationships, geometryNodes[ nodeID ], skeletons );
<ide> geometryMap.set( parseInt( nodeID ), geo );
<ide>
<ide> }
<ide> }
<ide>
<ide> // Parse single node in FBXTree.Objects.subNodes.Geometry
<del> function parseGeometry( FBXTree, relationships, geometryNode, deformers ) {
<add> function parseGeometry( FBXTree, relationships, geometryNode, skeletons ) {
<ide>
<ide> switch ( geometryNode.attrType ) {
<ide>
<ide> case 'Mesh':
<del> return parseMeshGeometry( FBXTree, relationships, geometryNode, deformers );
<add> return parseMeshGeometry( FBXTree, relationships, geometryNode, skeletons );
<ide> break;
<ide>
<ide> case 'NurbsCurve':
<ide> }
<ide>
<ide> // Parse single node mesh geometry in FBXTree.Objects.subNodes.Geometry
<del> function parseMeshGeometry( FBXTree, relationships, geometryNode, deformers ) {
<del>
<del> var deformer = relationships.children.reduce( function ( deformer, child ) {
<del>
<del> if ( deformers[ child.ID ] !== undefined ) deformer = deformers[ child.ID ];
<del>
<del> return deformer;
<del>
<del> }, null );
<add> function parseMeshGeometry( FBXTree, relationships, geometryNode, skeletons ) {
<ide>
<ide> var modelNodes = relationships.parents.map( function ( parent ) {
<ide>
<del> var modelNode = FBXTree.Objects.subNodes.Model[ parent.ID ];
<del>
<del> return modelNode;
<add> return FBXTree.Objects.subNodes.Model[ parent.ID ];
<ide>
<ide> } );
<ide>
<ide> // don't create geometry if it is not associated with any models
<ide> if ( modelNodes.length === 0 ) return;
<ide>
<add> var skeleton = relationships.children.reduce( function ( skeleton, child ) {
<add>
<add> if ( skeletons[ child.ID ] !== undefined ) skeleton = skeletons[ child.ID ];
<add>
<add> return skeleton;
<add>
<add> }, null );
<add>
<ide> var preTransform = new THREE.Matrix4();
<ide>
<ide> // TODO: if there is more than one model associated with the geometry, AND the models have
<ide>
<ide> }
<ide>
<del> return genGeometry( FBXTree, relationships, geometryNode, deformer, preTransform );
<add> return genGeometry( FBXTree, relationships, geometryNode, skeleton, preTransform );
<ide>
<ide> }
<ide>
<ide> // Generate a THREE.BufferGeometry from a node in FBXTree.Objects.subNodes.Geometry
<del> function genGeometry( FBXTree, relationships, geometryNode, deformer, preTransform ) {
<add> function genGeometry( FBXTree, relationships, geometryNode, skeleton, preTransform ) {
<ide>
<ide> var subNodes = geometryNode.subNodes;
<ide>
<ide>
<ide> var weightTable = {};
<ide>
<del> if ( deformer ) {
<add> if ( skeleton !== null ) {
<ide>
<del> var subDeformers = deformer.map;
<add> skeleton.rawBones.forEach( function ( rawBone, i ) {
<ide>
<del> for ( var key in subDeformers ) {
<del>
<del> var subDeformer = subDeformers[ key ];
<del>
<del> subDeformer.indices.forEach( function ( index, i ) {
<del>
<del> var weight = subDeformer.weights[ i ];
<add> // loop over the bone's vertex indices and weights
<add> rawBone.indices.forEach( function ( index, j ) {
<ide>
<ide> if ( weightTable[ index ] === undefined ) weightTable[ index ] = [];
<ide>
<ide> weightTable[ index ].push( {
<ide>
<del> id: subDeformer.index,
<del> weight: weight
<add> id: i,
<add> weight: rawBone.weights[ j ],
<ide>
<ide> } );
<ide>
<ide> } );
<ide>
<del> }
<add> } );
<ide>
<ide> }
<ide>
<ide>
<ide> }
<ide>
<del> if ( deformer ) {
<add> if ( skeleton ) {
<ide>
<ide> if ( weightTable[ vertexIndex ] !== undefined ) {
<ide>
<ide> vertexBuffer.push( vertexPositions[ vertexPositionIndexes[ i * 3 + 1 ] ] );
<ide> vertexBuffer.push( vertexPositions[ vertexPositionIndexes[ i * 3 + 2 ] ] );
<ide>
<del> if ( deformer ) {
<add> if ( skeleton ) {
<ide>
<ide> vertexWeightsBuffer.push( faceWeights[ 0 ] );
<ide> vertexWeightsBuffer.push( faceWeights[ 1 ] );
<ide>
<ide> }
<ide>
<del> if ( deformer ) {
<add> if ( skeleton ) {
<ide>
<ide> geo.addAttribute( 'skinIndex', new THREE.Float32BufferAttribute( weightsIndicesBuffer, 4 ) );
<ide>
<ide> geo.addAttribute( 'skinWeight', new THREE.Float32BufferAttribute( vertexWeightsBuffer, 4 ) );
<ide>
<ide> // used later to bind the skeleton to the model
<del> geo.FBX_Deformer = deformer;
<add> geo.FBX_Deformer = skeleton;
<ide>
<ide> }
<ide>
<ide> }
<ide>
<ide> // create the main THREE.Group() to be returned by the loader
<del> function parseScene( FBXTree, connections, deformers, geometryMap, materialMap ) {
<add> function parseScene( FBXTree, connections, skeletons, geometryMap, materialMap ) {
<ide>
<ide> var sceneGraph = new THREE.Group();
<ide>
<del> var modelMap = parseModels( FBXTree, deformers, geometryMap, materialMap, connections );
<add> var modelMap = parseModels( FBXTree, skeletons, geometryMap, materialMap, connections );
<ide>
<ide> var modelNodes = FBXTree.Objects.subNodes.Model;
<ide>
<ide> modelMap.forEach( function ( model ) {
<ide>
<del> var modelNode = modelNodes[ model.FBX_ID ];
<add> var modelNode = modelNodes[ model.ID ];
<add> setLookAtProperties( FBXTree, model, modelNode, connections, sceneGraph );
<ide>
<del> setModelTransforms( FBXTree, model, modelNode, connections, sceneGraph );
<del>
<del> var parentConnections = connections.get( model.FBX_ID ).parents;
<add> var parentConnections = connections.get( model.ID ).parents;
<ide>
<ide> parentConnections.forEach( function ( connection ) {
<ide>
<ide>
<ide> }
<ide>
<add>
<ide> } );
<ide>
<del> bindSkeleton( FBXTree, deformers, geometryMap, modelMap, connections, sceneGraph );
<add>
<add> bindSkeleton( FBXTree, skeletons, geometryMap, modelMap, connections, sceneGraph );
<ide>
<ide> addAnimations( FBXTree, connections, sceneGraph, modelMap );
<ide>
<ide> }
<ide>
<ide> // parse nodes in FBXTree.Objects.subNodes.Model
<del> function parseModels( FBXTree, deformers, geometryMap, materialMap, connections ) {
<add> function parseModels( FBXTree, skeletons, geometryMap, materialMap, connections ) {
<ide>
<ide> var modelMap = new Map();
<ide> var modelNodes = FBXTree.Objects.subNodes.Model;
<ide> var id = parseInt( nodeID );
<ide> var node = modelNodes[ nodeID ];
<ide> var relationships = connections.get( id );
<del> var model = null;
<del>
<del> // create bones
<del> relationships.parents.forEach( function ( parent ) {
<del>
<del> for ( var FBX_ID in deformers ) {
<ide>
<del> var deformer = deformers[ FBX_ID ];
<del> var subDeformers = deformer.map;
<del> var subDeformer = subDeformers[ parent.ID ];
<del>
<del> if ( subDeformer ) {
<del>
<del> var model2 = model;
<del> model = new THREE.Bone();
<del> deformer.bones[ subDeformer.index ] = model;
<del>
<del> // In cases where a bone is shared between multiple meshes
<del> // model will already be defined and we'll hit this case
<del> // TODO: currently doesn't work correctly
<del> if ( model2 !== null ) {
<del>
<del> model.add( model2 );
<del>
<del> }
<del>
<del> }
<del>
<del> }
<del>
<del> } );
<add> var model = buildSkeleton( relationships, skeletons );
<ide>
<ide> if ( ! model ) {
<ide>
<ide> case 'NurbsCurve':
<ide> model = createCurve( relationships, geometryMap );
<ide> break;
<add> case 'LimbNode':
<add> case 'Null':
<ide> default:
<ide> model = new THREE.Group();
<ide> break;
<ide>
<ide> }
<ide>
<del> model.name = THREE.PropertyBinding.sanitizeNodeName( node.attrName );
<del> model.FBX_ID = id;
<add> if ( model ) {
<ide>
<del> modelMap.set( id, model );
<add> setModelTransforms( FBXTree, model, node );
<add>
<add> model.name = THREE.PropertyBinding.sanitizeNodeName( node.attrName );
<add> model.ID = id;
<add>
<add> modelMap.set( id, model );
<add>
<add> }
<ide>
<ide> }
<ide>
<ide> return modelMap;
<ide>
<ide> }
<ide>
<add> function buildSkeleton( relationships, skeletons ) {
<add>
<add> var model = null;
<add>
<add> relationships.parents.forEach( function ( parent ) {
<add>
<add> for ( var ID in skeletons ) {
<add>
<add> var skeleton = skeletons[ ID ];
<add>
<add> skeleton.rawBones.forEach( function ( rawBone, i ) {
<add>
<add> if ( rawBone.ID === parent.ID ) {
<add>
<add> var model2 = model;
<add> model = new THREE.Bone();
<add> skeleton.bones[ i ] = model;
<add>
<add> // In cases where a bone is shared between multiple meshes
<add> // model will already be defined and we'll hit this case
<add> // TODO: currently doesn't work correctly
<add> if ( model2 !== null ) {
<add>
<add> model.add( model2 );
<add>
<add> }
<add>
<add> }
<add>
<add> } );
<add>
<add> }
<add>
<add> } );
<add>
<add> return model;
<add>
<add> }
<add>
<ide> // create a THREE.PerspectiveCamera or THREE.OrthographicCamera
<ide> function createCamera( FBXTree, relationships ) {
<ide>
<ide>
<ide> }
<ide>
<add> function setLookAtProperties( FBXTree, model, modelNode, connections, sceneGraph ) {
<add>
<add> if ( 'LookAtProperty' in modelNode.properties ) {
<add>
<add> var children = connections.get( model.ID ).children;
<add>
<add> children.forEach( function ( child ) {
<add>
<add> if ( child.relationship === 'LookAtProperty' ) {
<add>
<add> var lookAtTarget = FBXTree.Objects.subNodes.Model[ child.ID ];
<add>
<add> if ( 'Lcl_Translation' in lookAtTarget.properties ) {
<add>
<add> var pos = lookAtTarget.properties.Lcl_Translation.value;
<add>
<add> // DirectionalLight, SpotLight
<add> if ( model.target !== undefined ) {
<add>
<add> model.target.position.fromArray( pos );
<add> sceneGraph.add( model.target );
<add>
<add> } else { // Cameras and other Object3Ds
<add>
<add> model.lookAt( new THREE.Vector3().fromArray( pos ) );
<add>
<add> }
<add>
<add> }
<add>
<add> }
<add>
<add> } );
<add>
<add> }
<add>
<add> }
<add>
<ide> // parse the model node for transform details and apply them to the model
<del> function setModelTransforms( FBXTree, model, modelNode, connections, sceneGraph ) {
<add> function setModelTransforms( FBXTree, model, modelNode ) {
<ide>
<ide> // http://help.autodesk.com/view/FBX/2017/ENU/?guid=__cpp_ref_class_fbx_euler_html
<ide> if ( 'RotationOrder' in modelNode.properties ) {
<ide>
<ide> }
<ide>
<del> if ( 'LookAtProperty' in modelNode.properties ) {
<del>
<del> var children = connections.get( model.FBX_ID ).children;
<del>
<del> children.forEach( function ( child ) {
<del>
<del> if ( child.relationship === 'LookAtProperty' ) {
<del>
<del> var lookAtTarget = FBXTree.Objects.subNodes.Model[ child.ID ];
<del>
<del> if ( 'Lcl_Translation' in lookAtTarget.properties ) {
<del>
<del> var pos = lookAtTarget.properties.Lcl_Translation.value;
<del>
<del> // DirectionalLight, SpotLight
<del> if ( model.target !== undefined ) {
<del>
<del> model.target.position.fromArray( pos );
<del> sceneGraph.add( model.target );
<del>
<del> } else { // Cameras and other Object3Ds
<del>
<del> model.lookAt( new THREE.Vector3().fromArray( pos ) );
<del>
<del> }
<del>
<del> }
<del>
<del> }
<del>
<del> } );
<del>
<del> }
<del>
<ide> }
<ide>
<del> function bindSkeleton( FBXTree, deformers, geometryMap, modelMap, connections, sceneGraph ) {
<add> function bindSkeleton( FBXTree, skeletons, geometryMap, modelMap, connections, sceneGraph ) {
<ide>
<ide> // Now with the bones created, we can update the skeletons and bind them to the skinned meshes.
<ide> sceneGraph.updateMatrixWorld( true );
<ide>
<ide> }
<ide>
<del> for ( var FBX_ID in deformers ) {
<add> for ( var ID in skeletons ) {
<ide>
<del> var deformer = deformers[ FBX_ID ];
<del> var subDeformers = deformer.map;
<add> var skeleton = skeletons[ ID ];
<ide>
<del> for ( var key in subDeformers ) {
<add> skeleton.bones.forEach( function ( bone ) {
<ide>
<del> var subDeformer = subDeformers[ key ];
<del> var subDeformerIndex = subDeformer.index;
<add> if ( worldMatrices.has( bone.ID ) ) {
<ide>
<del> var bone = deformer.bones[ subDeformerIndex ];
<del> if ( ! worldMatrices.has( bone.FBX_ID ) ) {
<del>
<del> break;
<add> var mat = worldMatrices.get( bone.ID );
<add> bone.matrixWorld.copy( mat );
<ide>
<ide> }
<del> var mat = worldMatrices.get( bone.FBX_ID );
<del> bone.matrixWorld.copy( mat );
<ide>
<del> }
<add> } );
<ide>
<ide> // Now that skeleton is in bind pose, bind to model.
<del> deformer.skeleton = new THREE.Skeleton( deformer.bones );
<del>
<del> var relationships = connections.get( deformer.FBX_ID );
<del> var parents = relationships.parents;
<add> var parents = connections.get( parseInt( skeleton.ID ) ).parents;
<ide>
<ide> parents.forEach( function ( parent ) {
<ide>
<ide> if ( geometryMap.has( parent.ID ) ) {
<ide>
<ide> var geoID = parent.ID;
<del> var georelationships = connections.get( geoID );
<add> var geoRelationships = connections.get( geoID );
<ide>
<del> georelationships.parents.forEach( function ( geoConnParent ) {
<add> geoRelationships.parents.forEach( function ( geoConnParent ) {
<ide>
<ide> if ( modelMap.has( geoConnParent.ID ) ) {
<ide>
<ide> var model = modelMap.get( geoConnParent.ID );
<ide>
<del> model.bind( deformer.skeleton, model.matrixWorld );
<add> model.bind( new THREE.Skeleton( skeleton.bones ), model.matrixWorld );
<ide>
<ide> }
<ide>
| 1
|
Ruby
|
Ruby
|
suggest full_name when appropriate
|
020d6e28438109849511d00dfb20383c1a8f5619
|
<ide><path>Library/Homebrew/cmd/install.rb
<ide> def install
<ide> optlinked_version = Keg.for(f.opt_prefix).version
<ide> onoe <<~EOS
<ide> #{f.full_name} #{optlinked_version} is already installed
<del> To upgrade to #{f.version}, run `brew upgrade #{f.name}`
<add> To upgrade to #{f.version}, run `brew upgrade #{f.full_name}`
<ide> EOS
<ide> elsif args.only_dependencies?
<ide> formulae << f
<ide><path>Library/Homebrew/formula_installer.rb
<ide> def install
<ide> EOS
<ide> if formula.outdated? && !formula.head?
<ide> message += <<~EOS
<del> To upgrade to #{formula.pkg_version}, run `brew upgrade #{formula.name}`.
<add> To upgrade to #{formula.pkg_version}, run `brew upgrade #{formula.full_name}`.
<ide> EOS
<ide> elsif only_deps?
<ide> message = nil
| 2
|
Text
|
Text
|
add changelogs for repl
|
3115904595249b0e308498698fe0fae7bf972346
|
<ide><path>doc/api/repl.md
<ide> within the action function for commands registered using the
<ide> ## repl.start([options])
<ide> <!-- YAML
<ide> added: v0.1.91
<add>changes:
<add> - version: v5.8.0
<add> pr-url: https://github.com/nodejs/node/pull/5388
<add> description: The `options` parameter is optional now.
<ide> -->
<ide>
<ide> * `options` {Object | String}
| 1
|
Javascript
|
Javascript
|
remove usage of require('util') in `repl.js`
|
415a825dc0f53710394d51482ff9b7b65473d5e2
|
<ide><path>lib/repl.js
<ide> const {
<ide> isIdentifierStart,
<ide> isIdentifierChar
<ide> } = require('internal/deps/acorn/acorn/dist/acorn');
<del>const internalUtil = require('internal/util');
<del>const util = require('util');
<add>const {
<add> decorateErrorStack,
<add> isError,
<add> deprecate
<add>} = require('internal/util');
<add>const { inspect } = require('internal/util/inspect');
<ide> const Stream = require('stream');
<ide> const vm = require('vm');
<ide> const path = require('path');
<ide> const { Interface } = require('readline');
<ide> const { Console } = require('console');
<ide> const CJSModule = require('internal/modules/cjs/loader');
<ide> const domain = require('domain');
<del>const debug = util.debuglog('repl');
<add>const debug = require('internal/util/debuglog').debuglog('repl');
<ide> const {
<ide> ERR_CANNOT_WATCH_SIGINT,
<ide> ERR_INVALID_ARG_TYPE,
<ide> function hasOwnProperty(obj, prop) {
<ide> // This is the default "writer" value, if none is passed in the REPL options,
<ide> // and it can be overridden by custom print functions, such as `probe` or
<ide> // `eyes.js`.
<del>const writer = exports.writer = (obj) => util.inspect(obj, writer.options);
<del>writer.options = { ...util.inspect.defaultOptions, showProxy: true };
<add>const writer = exports.writer = (obj) => inspect(obj, writer.options);
<add>writer.options = { ...inspect.defaultOptions, showProxy: true };
<ide>
<ide> exports._builtinLibs = builtinLibs;
<ide>
<ide> function REPLServer(prompt,
<ide>
<ide> let rli = this;
<ide> Object.defineProperty(this, 'rli', {
<del> get: util.deprecate(() => rli,
<del> 'REPLServer.rli is deprecated', 'DEP0124'),
<del> set: util.deprecate((val) => rli = val,
<del> 'REPLServer.rli is deprecated', 'DEP0124'),
<add> get: deprecate(() => rli,
<add> 'REPLServer.rli is deprecated', 'DEP0124'),
<add> set: deprecate((val) => rli = val,
<add> 'REPLServer.rli is deprecated', 'DEP0124'),
<ide> enumerable: true,
<ide> configurable: true
<ide> });
<ide> function REPLServer(prompt,
<ide> if (typeof e === 'object' && e !== null) {
<ide> const pstrace = Error.prepareStackTrace;
<ide> Error.prepareStackTrace = prepareStackTrace(pstrace);
<del> internalUtil.decorateErrorStack(e);
<add> decorateErrorStack(e);
<ide> Error.prepareStackTrace = pstrace;
<ide>
<ide> if (e.domainThrown) {
<ide> delete e.domain;
<ide> delete e.domainThrown;
<ide> }
<ide>
<del> if (internalUtil.isError(e)) {
<add> if (isError(e)) {
<ide> if (e.stack) {
<ide> if (e.name === 'SyntaxError') {
<ide> // Remove stack trace.
<ide> function REPLServer(prompt,
<ide>
<ide> self.clearBufferedCommand();
<ide> Object.defineProperty(this, 'bufferedCommand', {
<del> get: util.deprecate(() => self[kBufferedCommandSymbol],
<del> 'REPLServer.bufferedCommand is deprecated', 'DEP0074'),
<del> set: util.deprecate((val) => self[kBufferedCommandSymbol] = val,
<del> 'REPLServer.bufferedCommand is deprecated', 'DEP0074'),
<add> get: deprecate(() => self[kBufferedCommandSymbol],
<add> 'REPLServer.bufferedCommand is deprecated',
<add> 'DEP0074'),
<add> set: deprecate((val) => self[kBufferedCommandSymbol] = val,
<add> 'REPLServer.bufferedCommand is deprecated',
<add> 'DEP0074'),
<ide> enumerable: true
<ide> });
<ide>
<ide> function REPLServer(prompt,
<ide> writer.options.colors = self.useColors;
<ide>
<ide> if (options[kStandaloneREPL]) {
<del> Object.defineProperty(util.inspect, 'replDefaults', {
<add> Object.defineProperty(inspect, 'replDefaults', {
<ide> get() {
<ide> return writer.options;
<ide> },
<ide> function REPLServer(prompt,
<ide> return false;
<ide> }
<ide>
<del> self.parseREPLKeyword = util.deprecate(
<add> self.parseREPLKeyword = deprecate(
<ide> _parseREPLKeyword,
<ide> 'REPLServer.parseREPLKeyword() is deprecated',
<ide> 'DEP0075');
<ide> REPLServer.prototype.setPrompt = function setPrompt(prompt) {
<ide> Interface.prototype.setPrompt.call(this, prompt);
<ide> };
<ide>
<del>REPLServer.prototype.turnOffEditorMode = util.deprecate(
<add>REPLServer.prototype.turnOffEditorMode = deprecate(
<ide> function() { _turnOffEditorMode(this); },
<ide> 'REPLServer.turnOffEditorMode() is deprecated',
<ide> 'DEP0078');
<ide> REPLServer.prototype.defineCommand = function(keyword, cmd) {
<ide> this.commands[keyword] = cmd;
<ide> };
<ide>
<del>REPLServer.prototype.memory = util.deprecate(
<add>REPLServer.prototype.memory = deprecate(
<ide> _memory,
<ide> 'REPLServer.memory() is deprecated',
<ide> 'DEP0082');
| 1
|
Javascript
|
Javascript
|
fix length/end of stream handling
|
267f312f135a29c809cdee905624e19700cd927c
|
<ide><path>pdf.js
<ide> var Stream = (function() {
<ide> this.bytes = new Uint8Array(arrayBuffer);
<ide> this.start = start || 0;
<ide> this.pos = this.start;
<del> this.length = (start + length) || arrayBuffer.byteLength;
<add> this.end = (start + length) || arrayBuffer.byteLength;
<ide> this.dict = dict;
<ide> }
<ide>
<ide> constructor.prototype = {
<add> get length() {
<add> return this.end - this.start;
<add> },
<ide> getByte: function() {
<ide> var bytes = this.bytes;
<del> if (this.pos >= this.length)
<add> if (this.pos >= this.end)
<ide> return -1;
<ide> return bytes[this.pos++];
<ide> },
<ide> lookChar: function() {
<ide> var bytes = this.bytes;
<del> if (this.pos >= this.length)
<add> if (this.pos >= this.end)
<ide> return;
<ide> return String.fromCharCode(bytes[this.pos]);
<ide> },
<ide> var PDFDoc = (function() {
<ide> }
<ide>
<ide> function find(stream, needle, limit, backwards) {
<del> var length = stream.length;
<ide> var pos = stream.pos;
<add> var end = stream.end;
<ide> var str = "";
<del> if (pos + limit > length)
<del> limit = length - pos;
<add> if (pos + limit > end)
<add> limit = end - pos;
<ide> for (var n = 0; n < limit; ++n)
<ide> str += stream.getChar();
<ide> stream.pos = pos;
<ide> var PDFDoc = (function() {
<ide> startXRef = stream.pos + 6;
<ide> } else {
<ide> // Find startxref at the end of the file.
<del> var start = stream.length - 1024;
<add> var start = stream.end - 1024;
<ide> if (start < 0)
<ide> start = 0;
<ide> stream.pos = start;
| 1
|
Ruby
|
Ruby
|
fix incorrect replace order
|
6cdc501a75582613b675acd5294ce7d652854c0e
|
<ide><path>Library/Homebrew/keg_relocate.rb
<ide> def replace_text_in_files(relocation, files: nil)
<ide> relocation.old_cellar => relocation.new_cellar,
<ide> relocation.old_repository => relocation.new_repository,
<ide> }
<del> changed = s.gsub!(Regexp.union(replacements.keys), replacements)
<add> changed = s.gsub!(Regexp.union(replacements.keys.sort_by(&:length).reverse), replacements)
<ide> next unless changed
<ide> changed_files += [first, *rest].map { |file| file.relative_path_from(path) }
<ide>
| 1
|
Python
|
Python
|
remove unnecessary parenthesis in numpy.ma.core
|
f31de1fbef7fb5cbec8f791878bfbd7688340d4d
|
<ide><path>numpy/ma/core.py
<ide> class _DomainCheckInterval(object):
<ide>
<ide> def __init__(self, a, b):
<ide> "domain_check_interval(a,b)(x) = true where x < a or y > b"
<del> if (a > b):
<add> if a > b:
<ide> (a, b) = (b, a)
<ide> self.a = a
<ide> self.b = b
<ide> def __call__(self, a, b, *args, **kwargs):
<ide> if domain is not None:
<ide> m |= domain(da, db)
<ide> # Take care of the scalar case first
<del> if (not m.ndim):
<add> if not m.ndim:
<ide> if m:
<ide> return masked
<ide> else:
<ide> def _recursive_mask_or(self, m1, m2, newmask):
<ide> if m1 is m2 and is_mask(m1):
<ide> return m1
<ide> (dtype1, dtype2) = (getattr(m1, 'dtype', None), getattr(m2, 'dtype', None))
<del> if (dtype1 != dtype2):
<add> if dtype1 != dtype2:
<ide> raise ValueError("Incompatible dtypes '%s'<>'%s'" % (dtype1, dtype2))
<ide> if dtype1.names is not None:
<ide> # Allocate an output mask array with the properly broadcast shape.
<ide> def view(self, dtype=None, type=None, fill_value=None):
<ide> # also make the mask be a view (so attr changes to the view's
<ide> # mask do no affect original object's mask)
<ide> # (especially important to avoid affecting np.masked singleton)
<del> if (getmask(output) is not nomask):
<add> if getmask(output) is not nomask:
<ide> output._mask = output._mask.view()
<ide>
<ide> # Make sure to reset the _fill_value if needed
<ide> def __setmask__(self, mask, copy=False):
<ide> if mask is masked:
<ide> mask = True
<ide>
<del> if (current_mask is nomask):
<add> if current_mask is nomask:
<ide> # Make sure the mask is set
<ide> # Just don't do anything if there's nothing to do.
<ide> if mask is nomask:
<ide> def sum(self, axis=None, dtype=None, out=None, keepdims=np._NoValue):
<ide> result = self.filled(0).sum(axis, dtype=dtype, out=out, **kwargs)
<ide> if isinstance(out, MaskedArray):
<ide> outmask = getmask(out)
<del> if (outmask is nomask):
<add> if outmask is nomask:
<ide> outmask = out._mask = make_mask_none(out.shape)
<ide> outmask.flat = newmask
<ide> return out
<ide> def prod(self, axis=None, dtype=None, out=None, keepdims=np._NoValue):
<ide> result = self.filled(1).prod(axis, dtype=dtype, out=out, **kwargs)
<ide> if isinstance(out, MaskedArray):
<ide> outmask = getmask(out)
<del> if (outmask is nomask):
<add> if outmask is nomask:
<ide> outmask = out._mask = make_mask_none(out.shape)
<ide> outmask.flat = newmask
<ide> return out
<ide> def mean(self, axis=None, dtype=None, out=None, keepdims=np._NoValue):
<ide> out.flat = result
<ide> if isinstance(out, MaskedArray):
<ide> outmask = getmask(out)
<del> if (outmask is nomask):
<add> if outmask is nomask:
<ide> outmask = out._mask = make_mask_none(out.shape)
<ide> outmask.flat = getmask(result)
<ide> return out
<ide> def anom(self, axis=None, dtype=None):
<ide> return m
<ide>
<ide> if not axis:
<del> return (self - m)
<add> return self - m
<ide> else:
<del> return (self - expand_dims(m, axis))
<add> return self - expand_dims(m, axis)
<ide>
<ide> def var(self, axis=None, dtype=None, out=None, ddof=0,
<ide> keepdims=np._NoValue):
<ide> def min(self, axis=None, out=None, fill_value=None, keepdims=np._NoValue):
<ide> result = self.filled(fill_value).min(axis=axis, out=out, **kwargs)
<ide> if isinstance(out, MaskedArray):
<ide> outmask = getmask(out)
<del> if (outmask is nomask):
<add> if outmask is nomask:
<ide> outmask = out._mask = make_mask_none(out.shape)
<ide> outmask.flat = newmask
<ide> else:
<ide> def max(self, axis=None, out=None, fill_value=None, keepdims=np._NoValue):
<ide> result = self.filled(fill_value).max(axis=axis, out=out, **kwargs)
<ide> if isinstance(out, MaskedArray):
<ide> outmask = getmask(out)
<del> if (outmask is nomask):
<add> if outmask is nomask:
<ide> outmask = out._mask = make_mask_none(out.shape)
<ide> outmask.flat = newmask
<ide> else:
<ide> def power(a, b, third=None):
<ide> invalid = np.logical_not(np.isfinite(result.view(ndarray)))
<ide> # Add the initial mask
<ide> if m is not nomask:
<del> if not (result.ndim):
<add> if not result.ndim:
<ide> return masked
<ide> result._mask = np.logical_or(m, invalid)
<ide> # Fix the invalid parts
| 1
|
Python
|
Python
|
use better names for filter variable
|
32bd1a2f0bc29480259fcb5ffa17dcb1aa9710a4
|
<ide><path>rest_framework/filters.py
<ide> def filter_queryset(self, request, queryset, view):
<ide> return queryset
<ide>
<ide> def to_html(self, request, queryset, view):
<del> cls = self.get_filter_class(view, queryset)
<del> if cls:
<del> filter_instance = cls(request.query_params, queryset=queryset)
<add> filter_class = self.get_filter_class(view, queryset)
<add> if filter_class:
<add> filter_instance = filter_class(request.query_params, queryset=queryset)
<ide> else:
<ide> filter_instance = None
<ide> context = Context({
| 1
|
Javascript
|
Javascript
|
fix path to module for repl test on windows
|
c0bac9514795346a9d5e2cc37470cdc5caeff234
|
<ide><path>test/addons/repl-domain-abort/test.js
<ide> 'use strict';
<del>require('../../common');
<add>var common = require('../../common');
<ide> var assert = require('assert');
<ide> var repl = require('repl');
<ide> var stream = require('stream');
<add>var path = require('path');
<ide> var buildType = process.config.target_defaults.default_configuration;
<del>var buildPath = __dirname + '/build/' + buildType + '/binding';
<add>var buildPath = path.join(__dirname, 'build', buildType, 'binding');
<add>// On Windows, escape backslashes in the path before passing it to REPL.
<add>if (common.isWindows)
<add> buildPath = buildPath.replace(/\\/g, '/');
<ide> var cb_ran = false;
<ide>
<ide> process.on('exit', function() {
| 1
|
Text
|
Text
|
fix small typos
|
aad27f5925a63ca967b5bee0c53b4ffd360f5c60
|
<ide><path>docs/faq/DesignDecisions.md
<ide> The default utility combineReducers is only one way to build a complex reducer.
<ide>
<ide> <a id="no-asynch-in-mapDispatchToProps"></a>
<ide> ### Why doesn't mapDispatchToProps allow use of return values from getState() or mapStateToProps()?
<del>In general, connect provides some way to generate a props object out of a closure that is injected with both the current state and dispatch. Asynchronous logic does not belong in the mapStateToProps and mapDispatchToProps functions at all. They should be only pure functions which transform the state to props and bind action creators to dispatch.
<add>In general, connect generates a props object out of a closure that is injected with both the current state and dispatch. Asynchronous logic does not belong in the mapStateToProps and mapDispatchToProps functions at all. They should be only pure functions which transform the state to props and bind action creators to dispatch.
<ide>
<del>You cannot modify the state during the execution of mapStateToProps, because modifying the state from these functions could lead to infinite loops because every update would reinvoke the map functions. Calling getState() inside mapStateToProps would always just return the same state that is passed to the function.
<add>You cannot modify the state during the execution of mapStateToProps. Modifying the state from these functions could lead to infinite loops because every update would reinvoke the map functions. Calling getState inside mapStateToProps would always just return the same state that is passed to the function.
<ide>
<del>The designed way to handle this use-case (needing to alter props based on the current state and mapDispatchToProps functions) is to work from the third argument to the connect function, mergeProps. If specified, it is passed the result of mapStateToProps(), mapDispatchToProps(), and the container component's props. The plain object you return from it will be passed as props to the wrapped component.
<add>The preferred way to handle this use-case (needing to alter props based on the current state and mapDispatchToProps functions) is to work from mergeProps, the third argument to the connect function. If specified, it is passed the result of mapStateToProps(), mapDispatchToProps(), and the container component's props. The plain object returned from mergeProps will be passed as props to the wrapped component.
<ide>
<ide> #### Further information
<ide> **Discussions**
| 1
|
Python
|
Python
|
add authprovider for cassandra export
|
f5e53e45667bac10cfd556a7d5562336616c6760
|
<ide><path>glances/exports/glances_cassandra.py
<ide> from glances.exports.glances_export import GlancesExport
<ide> from glances.compat import iteritems
<ide>
<add>from cassandra.auth import PlainTextAuthProvider
<ide> from cassandra.cluster import Cluster
<ide> from cassandra.util import uuid_from_time
<ide> from cassandra import InvalidRequest
<ide> def __init__(self, config=None, args=None):
<ide> self.protocol_version = 3
<ide> self.replication_factor = 2
<ide> self.table = None
<add> self.username = None
<add> self.password = None
<ide>
<ide> # Load the Cassandra configuration file section
<ide> self.export_enable = self.load_conf('cassandra',
<ide> mandatories=['host', 'port', 'keyspace'],
<ide> options=['protocol_version',
<ide> 'replication_factor',
<del> 'table'])
<add> 'table',
<add> 'username',
<add> 'password'])
<ide> if not self.export_enable:
<ide> sys.exit(2)
<ide>
<ide> # Init the Cassandra client
<ide> self.cluster, self.session = self.init()
<ide>
<ide> def init(self):
<del> """Init the connection to the InfluxDB server."""
<add> """Init the connection to the Cassandra server."""
<ide> if not self.export_enable:
<ide> return None
<ide>
<add> # if username and/or password are not set the connection will try to connect with no auth
<add> auth_provider = PlainTextAuthProvider(
<add> username=self.username, password=self.password)
<add>
<ide> # Cluster
<ide> try:
<ide> cluster = Cluster([self.host],
<ide> port=int(self.port),
<del> protocol_version=int(self.protocol_version))
<add> protocol_version=int(self.protocol_version),
<add> auth_provider=auth_provider)
<ide> session = cluster.connect()
<ide> except Exception as e:
<ide> logger.critical("Cannot connect to Cassandra cluster '%s:%s' (%s)" % (self.host, self.port, e))
| 1
|
Javascript
|
Javascript
|
add packager worker for buck
|
fa44607bf6092898b9a27cf69d4d78a132ddfc13
|
<ide><path>local-cli/bundle/buildBundle.js
<ide> * LICENSE file in the root directory of this source tree. An additional grant
<ide> * of patent rights can be found in the PATENTS file in the same directory.
<ide> */
<del>'use strict';
<ide>
<ide> const log = require('../util/log').out('bundle');
<ide> const outputBundle = require('./output/bundle');
<ide> const Promise = require('promise');
<ide> const ReactPackager = require('../../packager/react-packager');
<ide> const saveAssets = require('./saveAssets');
<ide>
<del>function buildBundle(args, config, output = outputBundle) {
<add>function buildBundle(args, config, output = outputBundle, packagerInstance) {
<ide> return new Promise((resolve, reject) => {
<ide>
<ide> // This is used by a bazillion of npm modules we don't control so we don't
<ide> function buildBundle(args, config, output = outputBundle) {
<ide> platform: args.platform,
<ide> };
<ide>
<del> const clientPromise = ReactPackager.createClientFor(options);
<add> var bundlePromise;
<add> if (packagerInstance) {
<add> bundlePromise = output.build(packagerInstance, requestOpts)
<add> .then(bundle => {
<add> output.save(bundle, args, log);
<add> return bundle;
<add> });
<add> } else {
<add> const clientPromise = ReactPackager.createClientFor(options);
<ide>
<del> // Build and save the bundle
<del> const bundlePromise = clientPromise
<del> .then(client => {
<del> log('Created ReactPackager');
<del> return output.build(client, requestOpts);
<del> })
<del> .then(bundle => {
<del> output.save(bundle, args, log);
<del> return bundle;
<del> });
<add> // Build and save the bundle
<add> bundlePromise = clientPromise
<add> .then(client => {
<add> log('Created ReactPackager');
<add> return output.build(client, requestOpts);
<add> })
<add> .then(bundle => {
<add> output.save(bundle, args, log);
<add> return bundle;
<add> });
<ide>
<del> // When we're done bundling, close the client
<del> Promise.all([clientPromise, bundlePromise])
<del> .then(([client]) => {
<del> log('Closing client');
<del> client.close();
<del> });
<add> // When we're done bundling, close the client
<add> Promise.all([clientPromise, bundlePromise])
<add> .then(([client]) => {
<add> log('Closing client');
<add> client.close();
<add> });
<add> }
<ide>
<ide> // Save the assets of the bundle
<ide> const assets = bundlePromise
<ide><path>local-cli/bundle/bundle.js
<ide> * LICENSE file in the root directory of this source tree. An additional grant
<ide> * of patent rights can be found in the PATENTS file in the same directory.
<ide> */
<del>'use strict';
<ide>
<ide> const buildBundle = require('./buildBundle');
<ide> const bundleCommandLineArgs = require('./bundleCommandLineArgs');
<ide> const outputPrepack = require('./output/prepack');
<ide> /**
<ide> * Builds the bundle starting to look for dependencies at the given entry path.
<ide> */
<del>function bundleWithOutput(argv, config, output) {
<add>function bundleWithOutput(argv, config, output, packagerInstance) {
<ide> const args = parseCommandLine(bundleCommandLineArgs, argv);
<ide> if (!output) {
<ide> output = args.prepack ? outputPrepack : outputBundle;
<ide> }
<del> return buildBundle(args, config, output);
<add> return buildBundle(args, config, output, packagerInstance);
<ide>
<ide> }
<ide>
<del>function bundle(argv, config) {
<del> return bundleWithOutput(argv, config);
<add>function bundle(argv, config, packagerInstance) {
<add> return bundleWithOutput(argv, config, undefined, packagerInstance);
<ide> }
<ide>
<ide> module.exports = bundle;
<ide><path>local-cli/bundle/unbundle.js
<ide> * LICENSE file in the root directory of this source tree. An additional grant
<ide> * of patent rights can be found in the PATENTS file in the same directory.
<ide> */
<del>'use strict';
<ide>
<ide> const bundleWithOutput = require('./bundle').withOutput;
<ide> const outputUnbundle = require('./output/unbundle');
<ide>
<ide> /**
<ide> * Builds the bundle starting to look for dependencies at the given entry path.
<ide> */
<del>function unbundle(argv, config) {
<del> return bundleWithOutput(argv, config, outputUnbundle);
<add>function unbundle(argv, config, packagerInstance) {
<add> return bundleWithOutput(argv, config, outputUnbundle, packagerInstance);
<ide> }
<ide>
<ide> module.exports = unbundle;
<ide><path>local-cli/dependencies/dependencies.js
<ide> * LICENSE file in the root directory of this source tree. An additional grant
<ide> * of patent rights can be found in the PATENTS file in the same directory.
<ide> */
<del>'use strict';
<ide>
<ide> const fs = require('fs');
<del>const log = require('../util/log').out('dependencies');
<ide> const parseCommandLine = require('../util/parseCommandLine');
<ide> const path = require('path');
<ide> const Promise = require('promise');
<ide> const ReactPackager = require('../../packager/react-packager');
<ide> /**
<ide> * Returns the dependencies an entry path has.
<ide> */
<del>function dependencies(argv, config) {
<add>function dependencies(argv, config, packagerInstance) {
<ide> return new Promise((resolve, reject) => {
<del> _dependencies(argv, config, resolve, reject);
<add> _dependencies(argv, config, resolve, reject, packagerInstance);
<ide> });
<ide> }
<ide>
<del>function _dependencies(argv, config, resolve, reject) {
<add>function _dependencies(argv, config, resolve, reject, packagerInstance) {
<ide> const args = parseCommandLine([
<ide> {
<ide> command: 'entry-file',
<ide> function _dependencies(argv, config, resolve, reject) {
<ide> ? fs.createWriteStream(args.output)
<ide> : process.stdout;
<ide>
<del> // TODO: allow to configure which logging namespaces should get logged
<del> // log('Running ReactPackager');
<del> // log('Waiting for the packager.');
<del> resolve(ReactPackager.createClientFor(packageOpts).then(client => {
<del> // log('Packager client was created');
<del> return client.getOrderedDependencyPaths(options)
<del> .then(deps => {
<del> // log('Packager returned dependencies');
<del> client.close();
<add> if (packagerInstance) {
<add> resolve(packagerInstance.getOrderedDependencyPaths(options).then(
<add> deps => {
<add> return _dependenciesHandler(
<add> deps,
<add> packageOpts.projectRoots,
<add> outStream,
<add> writeToFile
<add> );
<add> }
<add> ));
<add> } else {
<add> resolve(ReactPackager.createClientFor(packageOpts).then(client => {
<add> return client.getOrderedDependencyPaths(options)
<add> .then(deps => {
<add> client.close();
<add> return _dependenciesHandler(
<add> deps,
<add> packageOpts.projectRoots,
<add> outStream,
<add> writeToFile
<add> );
<add> });
<add> }));
<add> }
<add>}
<ide>
<del> deps.forEach(modulePath => {
<del> // Temporary hack to disable listing dependencies not under this directory.
<del> // Long term, we need either
<del> // (a) JS code to not depend on anything outside this directory, or
<del> // (b) Come up with a way to declare this dependency in Buck.
<del> const isInsideProjectRoots = packageOpts.projectRoots.filter(
<del> root => modulePath.startsWith(root)
<del> ).length > 0;
<add>function _dependenciesHandler(deps, projectRoots, outStream, writeToFile) {
<add> deps.forEach(modulePath => {
<add> // Temporary hack to disable listing dependencies not under this directory.
<add> // Long term, we need either
<add> // (a) JS code to not depend on anything outside this directory, or
<add> // (b) Come up with a way to declare this dependency in Buck.
<add> const isInsideProjectRoots = projectRoots.filter(
<add> root => modulePath.startsWith(root)
<add> ).length > 0;
<ide>
<del> if (isInsideProjectRoots) {
<del> outStream.write(modulePath + '\n');
<del> }
<del> });
<del> return writeToFile
<del> ? Promise.denodeify(outStream.end).bind(outStream)()
<del> : Promise.resolve();
<del> // log('Wrote dependencies to output file');
<del> });
<del> }));
<add> if (isInsideProjectRoots) {
<add> outStream.write(modulePath + '\n');
<add> }
<add> });
<add> return writeToFile
<add> ? Promise.denodeify(outStream.end).bind(outStream)()
<add> : Promise.resolve();
<ide> }
<ide>
<ide> module.exports = dependencies;
| 4
|
Python
|
Python
|
add even tree problem
|
e0733c26f211638dcaadabaa38e010d27acce854
|
<ide><path>data_structures/Graph/even_tree.py
<add>"""
<add>You are given a tree(a simple connected graph with no cycles). The tree has N
<add>nodes numbered from 1 to N and is rooted at node 1.
<add>
<add>Find the maximum number of edges you can remove from the tree to get a forest
<add>such that each connected component of the forest contains an even number of
<add>nodes.
<add>
<add>Constraints
<add>2 <= 2 <= 100
<add>
<add>Note: The tree input will be such that it can always be decomposed into
<add>components containing an even number of nodes.
<add>"""
<add># pylint: disable=invalid-name
<add>from collections import defaultdict
<add>
<add>
<add>def dfs(start):
<add> """DFS traversal"""
<add> # pylint: disable=redefined-outer-name
<add> ret = 1
<add> visited[start] = True
<add> for v in tree.get(start):
<add> if v not in visited:
<add> ret += dfs(v)
<add> if ret % 2 == 0:
<add> cuts.append(start)
<add> return ret
<add>
<add>
<add>def even_tree():
<add> """
<add> 2 1
<add> 3 1
<add> 4 3
<add> 5 2
<add> 6 1
<add> 7 2
<add> 8 6
<add> 9 8
<add> 10 8
<add> On removing edges (1,3) and (1,6), we can get the desired result 2.
<add> """
<add> dfs(1)
<add>
<add>
<add>if __name__ == '__main__':
<add> n, m = 10, 9
<add> tree = defaultdict(list)
<add> visited = {}
<add> cuts = []
<add> count = 0
<add> edges = [
<add> (2, 1),
<add> (3, 1),
<add> (4, 3),
<add> (5, 2),
<add> (6, 1),
<add> (7, 2),
<add> (8, 6),
<add> (9, 8),
<add> (10, 8),
<add> ]
<add> for u, v in edges:
<add> tree[u].append(v)
<add> tree[v].append(u)
<add> even_tree()
<add> print len(cuts) - 1
| 1
|
Ruby
|
Ruby
|
move md5 deprecation warning into pathname#md5
|
c166ce3f581bda226b8fde4852dcf5775255d713
|
<ide><path>Library/Homebrew/compat/compatibility.rb
<ide> def md5(val=nil)
<ide> if val.nil?
<ide> @checksum if checksum.nil? or @checksum.hash_type == :md5
<ide> else
<del> opoo <<-EOS.undent
<del> MD5 support is deprecated and will be removed in a future version.
<del> Please switch this formula to #{Checksum::TYPES.map { |t| t.to_s.upcase } * ' or '}.
<del> EOS
<ide> @checksum = Checksum.new(:md5, val)
<ide> end
<ide> end
<ide> def md5(val=nil)
<ide> class Pathname
<ide> def md5
<ide> require 'digest/md5'
<add> opoo <<-EOS.undent
<add> MD5 support is deprecated and will be removed in a future version.
<add> Please switch this formula to #{Checksum::TYPES.map { |t| t.to_s.upcase } * ' or '}.
<add> EOS
<ide> incremental_hash(Digest::MD5)
<ide> end
<ide> end
| 1
|
Go
|
Go
|
remove obsolete comment
|
3dc8829a83d72d3a8e1d9b6a88c9e6ff9ecaf4a0
|
<ide><path>api/server/server.go
<ide> func (s *Server) addRouter(r router.Router) {
<ide> }
<ide>
<ide> // createMux initializes the main router the server uses.
<del>// we keep enableCors just for legacy usage, need to be removed in the future
<ide> func (s *Server) createMux() *mux.Router {
<ide> m := mux.NewRouter()
<ide> if utils.IsDebugEnabled() {
| 1
|
Text
|
Text
|
revert last grammatical edit
|
7a227276e653b481b73ed9e9178244b53d61f730
|
<ide><path>CODE_OF_CONDUCT.md
<ide> Examples of unacceptable behavior by participants include:
<ide> advances
<ide> * Trolling, insulting/derogatory comments, and personal or political attacks
<ide> * Public or private harassment
<del>* Publishing other's private information, such as a physical or electronic
<add>* Publishing others' private information, such as a physical or electronic
<ide> address, without explicit permission
<ide> * Other conduct which could reasonably be considered inappropriate in a
<ide> professional setting
| 1
|
PHP
|
PHP
|
add tests for complex input rendering
|
e8ed1f58f5e39d79182fda355da30cd35ca4e464
|
<ide><path>tests/TestCase/View/Input/MultiCheckboxTest.php
<ide> public function testRenderSimple() {
<ide> $this->assertTags($result, $expected);
<ide> }
<ide>
<add>/**
<add> * Test render complex and additional attributes.
<add> *
<add> * @return void
<add> */
<add> public function testRenderComplex() {
<add> $input = new MultiCheckbox($this->templates);
<add> $data = [
<add> 'name' => 'Tags[id]',
<add> 'options' => [
<add> ['value' => '1', 'text' => 'CakePHP', 'data-test' => 'val'],
<add> ['value' => '2', 'text' => 'Development', 'class' => 'custom'],
<add> ]
<add> ];
<add> $result = $input->render($data);
<add> $expected = [
<add> ['div' => ['class' => 'checkbox']],
<add> ['input' => [
<add> 'type' => 'checkbox',
<add> 'name' => 'Tags[id][]',
<add> 'value' => 1,
<add> 'id' => 'tags-id-1',
<add> 'data-test' => 'val',
<add> ]],
<add> ['label' => ['for' => 'tags-id-1']],
<add> 'CakePHP',
<add> '/label',
<add> '/div',
<add> ['div' => ['class' => 'checkbox']],
<add> ['input' => [
<add> 'type' => 'checkbox',
<add> 'name' => 'Tags[id][]',
<add> 'value' => 2,
<add> 'id' => 'tags-id-2',
<add> 'class' => 'custom',
<add> ]],
<add> ['label' => ['for' => 'tags-id-2']],
<add> 'Development',
<add> '/label',
<add> '/div',
<add> ];
<add> $this->assertTags($result, $expected);
<add> }
<add>
<ide> /**
<ide> * Test render escpaing options.
<ide> *
| 1
|
Text
|
Text
|
clarify config order of precedence
|
88c24d85ee0d18a9d0c09ef229a76c4a796ccf89
|
<ide><path>README.md
<ide> axios.get('/user', {
<ide> .catch(function (error) {
<ide> console.log(error);
<ide> });
<del>
<add>
<ide> // Want to use async/await? Add the `async` keyword to your outer function/method.
<ide> async function getUser() {
<ide> try {
<ide> async function getUser() {
<ide> }
<ide> ```
<ide>
<del>> **NOTE:** `async/await` is part of ECMAScript 2017 and is not supported in Internet
<add>> **NOTE:** `async/await` is part of ECMAScript 2017 and is not supported in Internet
<ide> > Explorer and older browsers, so use with caution.
<ide>
<ide> Performing a `POST` request
<ide> Config will be merged with an order of precedence. The order is library defaults
<ide> var instance = axios.create();
<ide>
<ide> // Override timeout default for the library
<del>// Now all requests will wait 2.5 seconds before timing out
<add>// Now all requests using this instance will wait 2.5 seconds before timing out
<ide> instance.defaults.timeout = 2500;
<ide>
<ide> // Override timeout for this request as it's known to take a long time
| 1
|
Text
|
Text
|
update a link of npm repository
|
450ab14d4b1c211358df00ddd992023a78ef92f2
|
<ide><path>doc/guides/maintaining-npm.md
<ide> changes can be reviewed and landed via the normal consensus seeking process.
<ide> ## Step 1: Clone npm
<ide>
<ide> ```console
<del>$ git clone https://github.com/npm/npm.git
<add>$ git clone https://github.com/npm/cli.git npm
<ide> $ cd npm
<ide> ```
<ide>
| 1
|
Go
|
Go
|
fix goroutine leak on pull
|
3037e4f7c4eec06cf3a5afca6f58ec58b4bdd63e
|
<ide><path>graph/pull_v2.go
<ide> func (p *v2Puller) pullV2Tag(out io.Writer, tag, taggedName string) (verified bo
<ide> // set the error. All successive reads/writes will return with this
<ide> // error.
<ide> pipeWriter.CloseWithError(errors.New("download canceled"))
<add> } else {
<add> // If no error then just close the pipe.
<add> pipeWriter.Close()
<ide> }
<ide> }()
<ide>
| 1
|
PHP
|
PHP
|
fix resource type in docblocks
|
72da4ec576fb195536a488b1fbe3234bfacbd944
|
<ide><path>src/Illuminate/Http/Client/PendingRequest.php
<ide> public function baseUrl(string $url)
<ide> /**
<ide> * Attach a raw body to the request.
<ide> *
<del> * @param resource|string $content
<add> * @param string $content
<ide> * @param string $contentType
<ide> * @return $this
<ide> */
<ide> public function asForm()
<ide> * Attach a file to the request.
<ide> *
<ide> * @param string|array $name
<del> * @param string $contents
<add> * @param string|resource $contents
<ide> * @param string|null $filename
<ide> * @param array $headers
<ide> * @return $this
| 1
|
Javascript
|
Javascript
|
pass request to .createconnection()"
|
1ccdde9a6986e7e258e9aa0de10ca51fc0c5a9d4
|
<ide><path>lib/http.js
<ide> Agent.prototype.addRequest = function(req, host, port, localAddress) {
<ide> }
<ide> if (this.sockets[name].length < this.maxSockets) {
<ide> // If we are under maxSockets create a new one.
<del> req.onSocket(this.createSocket(name, host, port, localAddress, req));
<add> req.onSocket(this.createSocket(name, host, port, localAddress));
<ide> } else {
<ide> // We are over limit so we'll add it to the queue.
<ide> if (!this.requests[name]) {
<ide> Agent.prototype.addRequest = function(req, host, port, localAddress) {
<ide> this.requests[name].push(req);
<ide> }
<ide> };
<del>Agent.prototype.createSocket = function(name, host, port, localAddress, req) {
<add>Agent.prototype.createSocket = function(name, host, port, localAddress) {
<ide> var self = this;
<ide> var options = util._extend({}, self.options);
<ide> options.port = port;
<ide> options.host = host;
<ide> options.localAddress = localAddress;
<del> var s = self.createConnection.call(req, options);
<add> var s = self.createConnection(options);
<ide> if (!self.sockets[name]) {
<ide> self.sockets[name] = [];
<ide> }
<ide> Agent.prototype.removeSocket = function(s, name, host, port, localAddress) {
<ide> }
<ide> if (this.requests[name] && this.requests[name].length) {
<ide> // If we have pending requests and a socket gets closed a new one
<del> this.createSocket(name,
<del> host,
<del> port,
<del> localAddress,
<del> this.requests[name][0]).emit('free');
<add> this.createSocket(name, host, port, localAddress).emit('free');
<ide> }
<ide> };
<ide>
<ide> function ClientRequest(options, cb) {
<ide> var self = this;
<ide> OutgoingMessage.call(self);
<ide>
<del> this.options = util._extend({}, options);
<ide> self.agent = options.agent === undefined ? globalAgent : options.agent;
<ide>
<ide> var defaultPort = options.defaultPort || 80;
<ide> function ClientRequest(options, cb) {
<ide> self._last = true;
<ide> self.shouldKeepAlive = false;
<ide> if (options.createConnection) {
<del> self.onSocket(options.createConnection.call(self, self.socketPath));
<add> self.onSocket(options.createConnection(self.socketPath));
<ide> } else {
<ide> self.onSocket(net.createConnection(self.socketPath));
<ide> }
<ide> function ClientRequest(options, cb) {
<ide> if (options.createConnection) {
<ide> options.port = port;
<ide> options.host = host;
<del> var conn = options.createConnection.call(self, options);
<add> var conn = options.createConnection(options);
<ide> } else {
<ide> var conn = net.createConnection({
<ide> port: port,
<ide><path>lib/https.js
<ide>
<ide> var tls = require('tls');
<ide> var http = require('http');
<del>var util = require('util');
<del>var inherits = util.inherits;
<add>var inherits = require('util').inherits;
<ide>
<ide> function Server(opts, requestListener) {
<ide> if (!(this instanceof Server)) return new Server(opts, requestListener);
<ide> exports.createServer = function(opts, requestListener) {
<ide> // HTTPS agents.
<ide>
<ide> function createConnection(/* [port, host, options] */) {
<del> var options = util._extend({}, this.options);
<add> var options = {};
<ide>
<ide> if (typeof arguments[0] === 'object') {
<del> options = util._extend(options, arguments[0]);
<add> options = arguments[0];
<ide> } else if (typeof arguments[1] === 'object') {
<del> options = util._extend(options, arguments[1]);
<add> options = arguments[1];
<ide> options.port = arguments[0];
<ide> } else if (typeof arguments[2] === 'object') {
<del> options = util._extend(options, arguments[2]);
<add> options = arguments[2];
<ide> options.port = arguments[0];
<ide> options.host = arguments[1];
<ide> } else {
| 2
|
Go
|
Go
|
update integration tests for server pkg
|
8cf0b80a7843633018b66a35d9a55f30814a56b6
|
<ide><path>integration/buildfile_test.go
<ide> package docker
<ide>
<ide> import (
<ide> "fmt"
<del> "github.com/dotcloud/docker"
<ide> "github.com/dotcloud/docker/archive"
<ide> "github.com/dotcloud/docker/engine"
<ide> "github.com/dotcloud/docker/image"
<ide> "github.com/dotcloud/docker/nat"
<add> "github.com/dotcloud/docker/server"
<ide> "github.com/dotcloud/docker/utils"
<ide> "io/ioutil"
<ide> "net"
<ide> func buildImage(context testContextTemplate, t *testing.T, eng *engine.Engine, u
<ide> }
<ide> dockerfile := constructDockerfile(context.dockerfile, ip, port)
<ide>
<del> buildfile := docker.NewBuildFile(srv, ioutil.Discard, ioutil.Discard, false, useCache, false, ioutil.Discard, utils.NewStreamFormatter(false), nil, nil)
<add> buildfile := server.NewBuildFile(srv, ioutil.Discard, ioutil.Discard, false, useCache, false, ioutil.Discard, utils.NewStreamFormatter(false), nil, nil)
<ide> id, err := buildfile.Build(context.Archive(dockerfile, t))
<ide> if err != nil {
<ide> return nil, err
<ide> func TestForbiddenContextPath(t *testing.T) {
<ide> }
<ide> dockerfile := constructDockerfile(context.dockerfile, ip, port)
<ide>
<del> buildfile := docker.NewBuildFile(srv, ioutil.Discard, ioutil.Discard, false, true, false, ioutil.Discard, utils.NewStreamFormatter(false), nil, nil)
<add> buildfile := server.NewBuildFile(srv, ioutil.Discard, ioutil.Discard, false, true, false, ioutil.Discard, utils.NewStreamFormatter(false), nil, nil)
<ide> _, err = buildfile.Build(context.Archive(dockerfile, t))
<ide>
<ide> if err == nil {
<ide> func TestBuildADDFileNotFound(t *testing.T) {
<ide> }
<ide> dockerfile := constructDockerfile(context.dockerfile, ip, port)
<ide>
<del> buildfile := docker.NewBuildFile(mkServerFromEngine(eng, t), ioutil.Discard, ioutil.Discard, false, true, false, ioutil.Discard, utils.NewStreamFormatter(false), nil, nil)
<add> buildfile := server.NewBuildFile(mkServerFromEngine(eng, t), ioutil.Discard, ioutil.Discard, false, true, false, ioutil.Discard, utils.NewStreamFormatter(false), nil, nil)
<ide> _, err = buildfile.Build(context.Archive(dockerfile, t))
<ide>
<ide> if err == nil {
<ide> func TestBuildFails(t *testing.T) {
<ide> func TestBuildFailsDockerfileEmpty(t *testing.T) {
<ide> _, err := buildImage(testContextTemplate{``, nil, nil}, t, nil, true)
<ide>
<del> if err != docker.ErrDockerfileEmpty {
<del> t.Fatal("Expected: %v, got: %v", docker.ErrDockerfileEmpty, err)
<add> if err != server.ErrDockerfileEmpty {
<add> t.Fatal("Expected: %v, got: %v", server.ErrDockerfileEmpty, err)
<ide> }
<ide> }
<ide>
<ide><path>integration/server_test.go
<ide> package docker
<ide>
<ide> import (
<del> "github.com/dotcloud/docker"
<ide> "github.com/dotcloud/docker/engine"
<ide> "github.com/dotcloud/docker/runconfig"
<add> "github.com/dotcloud/docker/server"
<ide> "strings"
<ide> "testing"
<ide> "time"
<ide> func TestListContainers(t *testing.T) {
<ide> }
<ide> }
<ide>
<del>func assertContainerList(srv *docker.Server, all bool, limit int, since, before string, expected []string) bool {
<add>func assertContainerList(srv *server.Server, all bool, limit int, since, before string, expected []string) bool {
<ide> job := srv.Eng.Job("containers")
<ide> job.SetenvBool("all", all)
<ide> job.SetenvInt("limit", limit)
<ide><path>integration/utils_test.go
<ide> import (
<ide> "testing"
<ide> "time"
<ide>
<del> "github.com/dotcloud/docker"
<ide> "github.com/dotcloud/docker/builtins"
<ide> "github.com/dotcloud/docker/engine"
<ide> "github.com/dotcloud/docker/runconfig"
<ide> "github.com/dotcloud/docker/runtime"
<add> "github.com/dotcloud/docker/server"
<ide> "github.com/dotcloud/docker/utils"
<ide> )
<ide>
<ide> func getContainer(eng *engine.Engine, id string, t utils.Fataler) *runtime.Conta
<ide> return c
<ide> }
<ide>
<del>func mkServerFromEngine(eng *engine.Engine, t utils.Fataler) *docker.Server {
<add>func mkServerFromEngine(eng *engine.Engine, t utils.Fataler) *server.Server {
<ide> iSrv := eng.Hack_GetGlobalVar("httpapi.server")
<ide> if iSrv == nil {
<ide> panic("Legacy server field not set in engine")
<ide> }
<del> srv, ok := iSrv.(*docker.Server)
<add> srv, ok := iSrv.(*server.Server)
<ide> if !ok {
<del> panic("Legacy server field in engine does not cast to *docker.Server")
<add> panic("Legacy server field in engine does not cast to *server.Server")
<ide> }
<ide> return srv
<ide> }
| 3
|
Go
|
Go
|
fix error handling for kill/process not found
|
e55bead518e4c72cdecf7de2e49db6c477cb58eb
|
<ide><path>daemon/kill.go
<ide> import (
<ide> "context"
<ide> "fmt"
<ide> "runtime"
<del> "strings"
<ide> "syscall"
<ide> "time"
<ide>
<add> "github.com/docker/docker/api/errdefs"
<ide> containerpkg "github.com/docker/docker/container"
<ide> "github.com/docker/docker/libcontainerd"
<ide> "github.com/docker/docker/pkg/signal"
<ide> func (daemon *Daemon) killWithSignal(container *containerpkg.Container, sig int)
<ide> }
<ide>
<ide> if err := daemon.kill(container, sig); err != nil {
<del> err = errors.Wrapf(err, "Cannot kill container %s", container.ID)
<del> // if container or process not exists, ignore the error
<del> // TODO: we shouldn't have to parse error strings from containerd
<del> if strings.Contains(err.Error(), "container not found") ||
<del> strings.Contains(err.Error(), "no such process") {
<del> logrus.Warnf("container kill failed because of 'container not found' or 'no such process': %s", err.Error())
<add> if errdefs.IsNotFound(err) {
<ide> unpause = false
<add> logrus.WithError(err).WithField("container", container.ID).WithField("action", "kill").Debug("container kill failed because of 'container not found' or 'no such process'")
<ide> } else {
<del> return err
<add> return errors.Wrapf(err, "Cannot kill container %s", container.ID)
<ide> }
<ide> }
<ide>
<ide> func (daemon *Daemon) Kill(container *containerpkg.Container) error {
<ide> // killPossibleDeadProcess is a wrapper around killSig() suppressing "no such process" error.
<ide> func (daemon *Daemon) killPossiblyDeadProcess(container *containerpkg.Container, sig int) error {
<ide> err := daemon.killWithSignal(container, sig)
<del> if err == syscall.ESRCH {
<add> if errdefs.IsNotFound(err) {
<ide> e := errNoSuchProcess{container.GetPID(), sig}
<ide> logrus.Debug(e)
<ide> return e
<ide><path>libcontainerd/client_daemon.go
<ide> import (
<ide> "github.com/containerd/containerd/archive"
<ide> "github.com/containerd/containerd/cio"
<ide> "github.com/containerd/containerd/content"
<add> "github.com/containerd/containerd/errdefs"
<ide> "github.com/containerd/containerd/images"
<ide> "github.com/containerd/containerd/linux/runctypes"
<ide> "github.com/containerd/typeurl"
<ide> func (c *client) SignalProcess(ctx context.Context, containerID, processID strin
<ide> if err != nil {
<ide> return err
<ide> }
<del> return p.Kill(ctx, syscall.Signal(signal))
<add> return wrapError(p.Kill(ctx, syscall.Signal(signal)))
<ide> }
<ide>
<ide> func (c *client) ResizeTerminal(ctx context.Context, containerID, processID string, width, height int) error {
<ide> func (c *client) writeContent(ctx context.Context, mediaType, ref string, r io.R
<ide> }
<ide>
<ide> func wrapError(err error) error {
<del> if err != nil {
<del> msg := err.Error()
<del> for _, s := range []string{"container does not exist", "not found", "no such container"} {
<del> if strings.Contains(msg, s) {
<del> return wrapNotFoundError(err)
<del> }
<add> if err == nil {
<add> return nil
<add> }
<add>
<add> switch {
<add> case errdefs.IsNotFound(err):
<add> return wrapNotFoundError(err)
<add> }
<add>
<add> msg := err.Error()
<add> for _, s := range []string{"container does not exist", "not found", "no such container"} {
<add> if strings.Contains(msg, s) {
<add> return wrapNotFoundError(err)
<ide> }
<ide> }
<ide> return err
| 2
|
Text
|
Text
|
fix links in test/common/readme.md
|
5570df407aae1e329955b1093e0e5b8bde270213
|
<ide><path>test/common/README.md
<ide> See `common.expectWarning()` for usage.
<ide> Indicates whether 'opensslCli' is supported.
<ide>
<ide> ### platformTimeout(ms)
<del>* `ms` [<number>|<bigint>]
<del>* return [<number>|<bigint>]
<add>* `ms` [<number>] | [<bigint>]
<add>* return [<number>] | [<bigint>]
<ide>
<ide> Returns a timeout value based on detected conditions. For example, a debug build
<ide> may need extra time so the returned value will be larger than on a release
<ide> See [the WPT tests README][] for details.
<ide> [<Function>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function
<ide> [<Object>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object
<ide> [<RegExp>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/RegExp
<add>[<bigint>]: https://github.com/tc39/proposal-bigint
<ide> [<boolean>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type
<ide> [<number>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type
<ide> [<string>]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type
<add>[Web Platform Tests]: https://github.com/web-platform-tests/wpt
<ide> [`hijackstdio.hijackStdErr()`]: #hijackstderrlistener
<ide> [`hijackstdio.hijackStdOut()`]: #hijackstdoutlistener
<ide> [internationalization]: https://github.com/nodejs/node/wiki/Intl
<del>[Web Platform Tests]: https://github.com/web-platform-tests/wpt
<ide> [the WPT tests README]: ../wpt/README.md
| 1
|
Mixed
|
Javascript
|
adjust minimum length in generatekey('hmac', ...)
|
a5b87305255f4757545b1d9f60a5e6f94cb742c8
|
<ide><path>doc/api/crypto.md
<ide> changes:
<ide> * `options`: {Object}
<ide> * `length`: {number} The bit length of the key to generate. This must be a
<ide> value greater than 0.
<del> * If `type` is `'hmac'`, the minimum is 1, and the maximum length is
<add> * If `type` is `'hmac'`, the minimum is 8, and the maximum length is
<ide> 2<sup>31</sup>-1. If the value is not a multiple of 8, the generated
<ide> key will be truncated to `Math.floor(length / 8)`.
<ide> * If `type` is `'aes'`, the length must be one of `128`, `192`, or `256`.
<ide> added: v15.0.0
<ide> accepted values are `'hmac'` and `'aes'`.
<ide> * `options`: {Object}
<ide> * `length`: {number} The bit length of the key to generate.
<del> * If `type` is `'hmac'`, the minimum is 1, and the maximum length is
<add> * If `type` is `'hmac'`, the minimum is 8, and the maximum length is
<ide> 2<sup>31</sup>-1. If the value is not a multiple of 8, the generated
<ide> key will be truncated to `Math.floor(length / 8)`.
<ide> * If `type` is `'aes'`, the length must be one of `128`, `192`, or `256`.
<ide><path>lib/internal/crypto/keygen.js
<ide> function generateKeyJob(mode, keyType, options) {
<ide> const { length } = options;
<ide> switch (keyType) {
<ide> case 'hmac':
<del> validateInteger(length, 'options.length', 1, 2 ** 31 - 1);
<add> validateInteger(length, 'options.length', 8, 2 ** 31 - 1);
<ide> break;
<ide> case 'aes':
<ide> validateOneOf(length, 'options.length', kAesKeyLengths);
<ide><path>test/parallel/test-crypto-secret-keygen.js
<ide> assert.throws(() => generateKey('hmac', { length: -1 }, common.mustNotCall()), {
<ide> code: 'ERR_OUT_OF_RANGE'
<ide> });
<ide>
<add>assert.throws(() => generateKey('hmac', { length: 4 }, common.mustNotCall()), {
<add> code: 'ERR_OUT_OF_RANGE'
<add>});
<add>
<add>assert.throws(() => generateKey('hmac', { length: 7 }, common.mustNotCall()), {
<add> code: 'ERR_OUT_OF_RANGE'
<add>});
<add>
<ide> assert.throws(
<ide> () => generateKey('hmac', { length: 2 ** 31 }, common.mustNotCall()), {
<ide> code: 'ERR_OUT_OF_RANGE'
<ide> assert.throws(() => generateKeySync('hmac', { length: -1 }), {
<ide> code: 'ERR_OUT_OF_RANGE'
<ide> });
<ide>
<add>assert.throws(() => generateKeySync('hmac', { length: 4 }), {
<add> code: 'ERR_OUT_OF_RANGE'
<add>});
<add>
<add>assert.throws(() => generateKeySync('hmac', { length: 7 }), {
<add> code: 'ERR_OUT_OF_RANGE'
<add>});
<add>
<ide> assert.throws(
<ide> () => generateKeySync('hmac', { length: 2 ** 31 }), {
<ide> code: 'ERR_OUT_OF_RANGE'
| 3
|
Text
|
Text
|
add version note for `has` property
|
73e1fb4da4f8db524a572b5391dff65735c7560b
|
<ide><path>docs/api-reference/next.config.js/headers.md
<ide> description: Add custom HTTP headers to your Next.js app.
<ide> </ul>
<ide> </details>
<ide>
<add><details>
<add> <summary><b>Version History</b></summary>
<add>
<add>| Version | Changes |
<add>| --------- | ------------ |
<add>| `v10.2.0` | `has` added. |
<add>
<add></details>
<add>
<ide> Headers allow you to set custom HTTP headers for an incoming request path.
<ide>
<ide> To set custom HTTP headers you can use the `headers` key in `next.config.js`:
<ide><path>docs/api-reference/next.config.js/redirects.md
<ide> description: Add redirects to your Next.js app.
<ide> </ul>
<ide> </details>
<ide>
<add><details>
<add> <summary><b>Version History</b></summary>
<add>
<add>| Version | Changes |
<add>| --------- | ------------ |
<add>| `v10.2.0` | `has` added. |
<add>
<add></details>
<add>
<ide> Redirects allow you to redirect an incoming request path to a different destination path.
<ide>
<ide> Redirects are only available on the Node.js environment and do not affect client-side routing.
<ide><path>docs/api-reference/next.config.js/rewrites.md
<ide> description: Add rewrites to your Next.js app.
<ide> </ul>
<ide> </details>
<ide>
<add><details>
<add> <summary><b>Version History</b></summary>
<add>
<add>| Version | Changes |
<add>| --------- | ------------ |
<add>| `v10.2.0` | `has` added. |
<add>
<add></details>
<add>
<ide> Rewrites allow you to map an incoming request path to a different destination path.
<ide>
<ide> Rewrites are only available on the Node.js environment and do not affect client-side routing.
| 3
|
Javascript
|
Javascript
|
add -webkit-transition for ngclass example
|
8a5daaed42b5b3496fa482a6d7f4a680a4d68680
|
<ide><path>src/ng/directive/ngClass.js
<ide> function classDirective(name, selector) {
<ide> </file>
<ide> <file name="style.css">
<ide> .base-class {
<add> -webkit-transition:all cubic-bezier(0.250, 0.460, 0.450, 0.940) 0.5s;
<ide> transition:all cubic-bezier(0.250, 0.460, 0.450, 0.940) 0.5s;
<ide> }
<ide>
| 1
|
Java
|
Java
|
improve access to raw content in webtestclient
|
8df0bc88d2f9f662919881ed3081f133656e37c8
|
<ide><path>spring-test/src/main/java/org/springframework/test/web/reactive/server/DefaultWebTestClient.java
<ide> private static class DefaultResponseSpec implements ResponseSpec {
<ide> DefaultResponseSpec(WiretapConnector.Info wiretapInfo, ClientResponse response,
<ide> @Nullable String uriTemplate, Duration timeout) {
<ide>
<del> this.exchangeResult = wiretapInfo.createExchangeResult(uriTemplate);
<add> this.exchangeResult = wiretapInfo.createExchangeResult(timeout, uriTemplate);
<ide> this.response = response;
<ide> this.timeout = timeout;
<ide> }
<ide> public BodyContentSpec expectBody() {
<ide> @Override
<ide> public <T> FluxExchangeResult<T> returnResult(Class<T> elementType) {
<ide> Flux<T> body = this.response.bodyToFlux(elementType);
<del> return new FluxExchangeResult<>(this.exchangeResult, body, this.timeout);
<add> return new FluxExchangeResult<>(this.exchangeResult, body);
<ide> }
<ide>
<ide> @Override
<ide> public <T> FluxExchangeResult<T> returnResult(ParameterizedTypeReference<T> elementType) {
<ide> Flux<T> body = this.response.bodyToFlux(elementType);
<del> return new FluxExchangeResult<>(this.exchangeResult, body, this.timeout);
<add> return new FluxExchangeResult<>(this.exchangeResult, body);
<ide> }
<ide> }
<ide>
<ide><path>spring-test/src/main/java/org/springframework/test/web/reactive/server/ExchangeResult.java
<ide> import java.util.List;
<ide> import java.util.stream.Collectors;
<ide>
<del>import reactor.core.publisher.MonoProcessor;
<add>import reactor.core.publisher.Mono;
<ide>
<ide> import org.springframework.http.HttpHeaders;
<ide> import org.springframework.http.HttpMethod;
<ide> import org.springframework.lang.Nullable;
<ide> import org.springframework.util.Assert;
<ide> import org.springframework.util.MultiValueMap;
<del>import org.springframework.util.ObjectUtils;
<ide>
<ide> /**
<ide> * Container for request and response details for exchanges performed through
<ide> public class ExchangeResult {
<ide>
<ide> private final ClientHttpResponse response;
<ide>
<del> private final MonoProcessor<byte[]> requestBody;
<add> private final Mono<byte[]> requestBody;
<ide>
<del> private final MonoProcessor<byte[]> responseBody;
<add> private final Mono<byte[]> responseBody;
<add>
<add> private final Duration timeout;
<ide>
<ide> @Nullable
<ide> private final String uriTemplate;
<ide> public class ExchangeResult {
<ide> * @param response the HTTP response
<ide> * @param requestBody capture of serialized request body content
<ide> * @param responseBody capture of serialized response body content
<add> * @param timeout how long to wait for content to materialize
<ide> * @param uriTemplate the URI template used to set up the request, if any
<ide> */
<ide> ExchangeResult(ClientHttpRequest request, ClientHttpResponse response,
<del> MonoProcessor<byte[]> requestBody, MonoProcessor<byte[]> responseBody,
<del> @Nullable String uriTemplate) {
<add> Mono<byte[]> requestBody, Mono<byte[]> responseBody, Duration timeout, @Nullable String uriTemplate) {
<ide>
<ide> Assert.notNull(request, "ClientHttpRequest is required");
<ide> Assert.notNull(response, "ClientHttpResponse is required");
<ide> public class ExchangeResult {
<ide> this.response = response;
<ide> this.requestBody = requestBody;
<ide> this.responseBody = responseBody;
<add> this.timeout = timeout;
<ide> this.uriTemplate = uriTemplate;
<ide> }
<ide>
<ide> public class ExchangeResult {
<ide> this.response = other.response;
<ide> this.requestBody = other.requestBody;
<ide> this.responseBody = other.responseBody;
<add> this.timeout = other.timeout;
<ide> this.uriTemplate = other.uriTemplate;
<ide> }
<ide>
<ide> public HttpHeaders getRequestHeaders() {
<ide> }
<ide>
<ide> /**
<del> * Return the raw request body content written as a {@code byte[]}.
<del> * @throws IllegalStateException if the request body is not fully written yet.
<add> * Return the raw request body content written through the request.
<add> * <p><strong>Note:</strong> If the request content has not been consumed
<add> * for any reason yet, use of this method will trigger consumption.
<add> * @throws IllegalStateException if the request body is not been fully written.
<ide> */
<ide> @Nullable
<ide> public byte[] getRequestBodyContent() {
<del> MonoProcessor<byte[]> body = this.requestBody;
<del> Assert.isTrue(body.isTerminated(), "Request body incomplete.");
<del> return body.block(Duration.ZERO);
<add> return this.requestBody.block(this.timeout);
<ide> }
<ide>
<ide>
<ide> public MultiValueMap<String, ResponseCookie> getResponseCookies() {
<ide> }
<ide>
<ide> /**
<del> * Return the raw request body content written as a {@code byte[]}.
<del> * @throws IllegalStateException if the response is not fully read yet.
<add> * Return the raw request body content written to the response.
<add> * <p><strong>Note:</strong> If the response content has not been consumed
<add> * yet, use of this method will trigger consumption.
<add> * @throws IllegalStateException if the response is not been fully read.
<ide> */
<ide> @Nullable
<ide> public byte[] getResponseBodyContent() {
<del> MonoProcessor<byte[]> body = this.responseBody;
<del> Assert.state(body.isTerminated(), "Response body incomplete");
<del> return body.block(Duration.ZERO);
<add> return this.responseBody.block(this.timeout);
<ide> }
<ide>
<ide>
<ide> private String formatHeaders(HttpHeaders headers, String delimiter) {
<ide> .collect(Collectors.joining(delimiter));
<ide> }
<ide>
<del> private String formatBody(@Nullable MediaType contentType, MonoProcessor<byte[]> body) {
<del> if (body.isSuccess()) {
<del> byte[] bytes = body.block(Duration.ZERO);
<del> if (ObjectUtils.isEmpty(bytes)) {
<del> return "No content";
<del> }
<del> if (contentType == null) {
<del> return "Unknown content type (" + bytes.length + " bytes)";
<del> }
<del> Charset charset = contentType.getCharset();
<del> if (charset != null) {
<del> return new String(bytes, charset);
<del> }
<del> if (PRINTABLE_MEDIA_TYPES.stream().anyMatch(contentType::isCompatibleWith)) {
<del> return new String(bytes, StandardCharsets.UTF_8);
<del> }
<del> return "Unknown charset (" + bytes.length + " bytes)";
<del> }
<del> else if (body.isError()) {
<del> return "I/O failure: " + body.getError();
<del> }
<del> else {
<del> return "Content not available yet";
<del> }
<add> @Nullable
<add> private String formatBody(@Nullable MediaType contentType, Mono<byte[]> body) {
<add> return body
<add> .map(bytes -> {
<add> if (contentType == null) {
<add> return "Unknown content type (" + bytes.length + " bytes)";
<add> }
<add> Charset charset = contentType.getCharset();
<add> if (charset != null) {
<add> return new String(bytes, charset);
<add> }
<add> if (PRINTABLE_MEDIA_TYPES.stream().anyMatch(contentType::isCompatibleWith)) {
<add> return new String(bytes, StandardCharsets.UTF_8);
<add> }
<add> return "Unknown charset (" + bytes.length + " bytes)";
<add> })
<add> .defaultIfEmpty("No content")
<add> .onErrorResume(ex -> Mono.just("Failed to obtain content: " + ex.getMessage()))
<add> .block(this.timeout);
<ide> }
<ide>
<ide> }
<ide><path>spring-test/src/main/java/org/springframework/test/web/reactive/server/FluxExchangeResult.java
<ide>
<ide> package org.springframework.test.web.reactive.server;
<ide>
<del>import java.time.Duration;
<ide> import java.util.function.Consumer;
<ide>
<ide> import reactor.core.publisher.Flux;
<del>import reactor.core.publisher.Mono;
<del>
<del>import org.springframework.lang.Nullable;
<ide>
<ide> /**
<ide> * {@code ExchangeResult} variant with the response body decoded as
<ide> */
<ide> public class FluxExchangeResult<T> extends ExchangeResult {
<ide>
<del> private static final IllegalStateException TIMEOUT_ERROR =
<del> new IllegalStateException("Response timeout: for infinite streams " +
<del> "use getResponseBody() first with explicit cancellation, e.g. via take(n).");
<del>
<del>
<ide> private final Flux<T> body;
<ide>
<del> private final Duration timeout;
<del>
<ide>
<del> FluxExchangeResult(ExchangeResult result, Flux<T> body, Duration timeout) {
<add> FluxExchangeResult(ExchangeResult result, Flux<T> body) {
<ide> super(result);
<ide> this.body = body;
<del> this.timeout = timeout;
<ide> }
<ide>
<ide>
<ide> public Flux<T> getResponseBody() {
<ide> return this.body;
<ide> }
<ide>
<del> /**
<del> * {@inheritDoc}
<del> * <p><strong>Note:</strong> this method should typically be called after
<del> * the response has been consumed in full via {@link #getResponseBody()}.
<del> * Calling it first will cause the response {@code Flux<T>} to be consumed
<del> * via {@code getResponseBody.ignoreElements()}.
<del> */
<del> @Override
<del> @Nullable
<del> public byte[] getResponseBodyContent() {
<del> return this.body.ignoreElements()
<del> .timeout(this.timeout, Mono.error(TIMEOUT_ERROR))
<del> .then(Mono.defer(() -> Mono.justOrEmpty(super.getResponseBodyContent())))
<del> .block();
<del> }
<del>
<ide> /**
<ide> * Invoke the given consumer within {@link #assertWithDiagnostics(Runnable)}
<ide> * passing {@code "this"} instance to it. This method allows the following,
<ide><path>spring-test/src/main/java/org/springframework/test/web/reactive/server/WiretapConnector.java
<ide> package org.springframework.test.web.reactive.server;
<ide>
<ide> import java.net.URI;
<add>import java.time.Duration;
<ide> import java.util.Map;
<ide> import java.util.concurrent.ConcurrentHashMap;
<ide> import java.util.concurrent.atomic.AtomicReference;
<ide> import java.util.function.Function;
<ide>
<ide> import org.reactivestreams.Publisher;
<add>import org.reactivestreams.Subscription;
<ide> import reactor.core.publisher.Flux;
<ide> import reactor.core.publisher.Mono;
<ide> import reactor.core.publisher.MonoProcessor;
<ide> public Info(WiretapClientHttpRequest request, WiretapClientHttpResponse response
<ide> }
<ide>
<ide>
<del> public ExchangeResult createExchangeResult(@Nullable String uriTemplate) {
<add> public ExchangeResult createExchangeResult(Duration timeout, @Nullable String uriTemplate) {
<ide> return new ExchangeResult(this.request, this.response,
<del> this.request.getRecorder().getContent(), this.response.getRecorder().getContent(), uriTemplate);
<add> Mono.defer(() -> this.request.getRecorder().getContent()),
<add> Mono.defer(() -> this.response.getRecorder().getContent()),
<add> timeout, uriTemplate);
<ide> }
<ide> }
<ide>
<ide> final static class WiretapRecorder {
<ide>
<ide> private static final DataBufferFactory bufferFactory = new DefaultDataBufferFactory();
<ide>
<del> public static final byte[] EMPTY_CONTENT = new byte[0];
<del>
<ide>
<ide> @Nullable
<del> private final Publisher<? extends DataBuffer> publisher;
<add> private final Flux<? extends DataBuffer> publisher;
<ide>
<ide> @Nullable
<del> private final Publisher<? extends Publisher<? extends DataBuffer>> publisherNested;
<add> private final Flux<? extends Publisher<? extends DataBuffer>> publisherNested;
<ide>
<ide> private final DataBuffer buffer;
<ide>
<ide> private final MonoProcessor<byte[]> content;
<ide>
<add> private volatile boolean subscriberRegistered;
<add>
<ide>
<del> private WiretapRecorder(@Nullable Publisher<? extends DataBuffer> publisher,
<add> public WiretapRecorder(@Nullable Publisher<? extends DataBuffer> publisher,
<ide> @Nullable Publisher<? extends Publisher<? extends DataBuffer>> publisherNested) {
<ide>
<ide> if (publisher != null && publisherNested != null) {
<ide> private WiretapRecorder(@Nullable Publisher<? extends DataBuffer> publisher,
<ide>
<ide> this.publisher = publisher != null ?
<ide> Flux.from(publisher)
<add> .doOnSubscribe(this::handleOnSubscribe)
<ide> .doOnNext(this::handleOnNext)
<ide> .doOnError(this::handleOnError)
<ide> .doOnCancel(this::handleOnComplete)
<ide> .doOnComplete(this::handleOnComplete) : null;
<ide>
<ide> this.publisherNested = publisherNested != null ?
<ide> Flux.from(publisherNested)
<add> .doOnSubscribe(this::handleOnSubscribe)
<ide> .map(p -> Flux.from(p).doOnNext(this::handleOnNext).doOnError(this::handleOnError))
<ide> .doOnError(this::handleOnError)
<ide> .doOnCancel(this::handleOnComplete)
<ide> .doOnComplete(this::handleOnComplete) : null;
<ide>
<ide> this.buffer = bufferFactory.allocateBuffer();
<ide> this.content = MonoProcessor.create();
<del>
<del> if (this.publisher == null && this.publisherNested == null) {
<del> this.content.onNext(EMPTY_CONTENT);
<del> }
<ide> }
<ide>
<ide>
<ide> public Publisher<? extends Publisher<? extends DataBuffer>> getNestedPublisherTo
<ide> return this.publisherNested;
<ide> }
<ide>
<del> public MonoProcessor<byte[]> getContent() {
<del> return this.content;
<add> public Mono<byte[]> getContent() {
<add> // No publisher (e.g. request#setComplete)
<add> if (this.publisher == null && this.publisherNested == null) {
<add> return Mono.empty();
<add> }
<add> if (this.content.isTerminated()) {
<add> return this.content;
<add> }
<add> if (this.subscriberRegistered) {
<add> return Mono.error(new IllegalStateException(
<add> "Subscriber registered but content is not yet fully consumed."));
<add> }
<add> else {
<add> // No subscriber, e.g.:
<add> // - mock server request body never consumed (error before read)
<add> // - FluxExchangeResult#getResponseBodyContent called
<add> (this.publisher != null ? this.publisher : this.publisherNested)
<add> .onErrorMap(ex -> new IllegalStateException(
<add> "Content was not been consumed and " +
<add> "an error was raised on attempt to produce it:", ex))
<add> .subscribe();
<add> return this.content;
<add> }
<ide> }
<ide>
<ide>
<add> private void handleOnSubscribe(Subscription subscription) {
<add> this.subscriberRegistered = true;
<add> }
<add>
<ide> private void handleOnNext(DataBuffer nextBuffer) {
<ide> this.buffer.write(nextBuffer);
<ide> }
<ide><path>spring-test/src/test/java/org/springframework/test/web/reactive/server/HeaderAssertionTests.java
<ide> package org.springframework.test.web.reactive.server;
<ide>
<ide> import java.net.URI;
<add>import java.time.Duration;
<ide> import java.time.ZoneId;
<ide> import java.time.ZonedDateTime;
<ide> import java.util.concurrent.TimeUnit;
<ide> private HeaderAssertions headerAssertions(HttpHeaders responseHeaders) {
<ide> MonoProcessor<byte[]> emptyContent = MonoProcessor.create();
<ide> emptyContent.onComplete();
<ide>
<del> ExchangeResult result = new ExchangeResult(request, response, emptyContent, emptyContent, null);
<add> ExchangeResult result = new ExchangeResult(request, response, emptyContent, emptyContent, Duration.ZERO, null);
<ide> return new HeaderAssertions(result, mock(WebTestClient.ResponseSpec.class));
<ide> }
<ide>
<ide><path>spring-test/src/test/java/org/springframework/test/web/reactive/server/MockServerTests.java
<ide> import java.util.Arrays;
<ide>
<ide> import org.junit.Test;
<add>import reactor.core.publisher.Flux;
<ide> import reactor.core.publisher.Mono;
<ide>
<ide> import org.springframework.core.io.buffer.DataBuffer;
<ide> import org.springframework.core.io.buffer.DefaultDataBufferFactory;
<ide> import org.springframework.http.HttpHeaders;
<ide> import org.springframework.http.HttpStatus;
<add>import org.springframework.http.MediaType;
<ide> import org.springframework.http.ResponseCookie;
<ide> import org.springframework.http.server.reactive.ServerHttpResponse;
<ide>
<del>import static java.nio.charset.StandardCharsets.UTF_8;
<del>import static org.junit.Assert.assertEquals;
<add>import static java.nio.charset.StandardCharsets.*;
<add>import static org.junit.Assert.*;
<ide>
<ide> /**
<ide> * Test scenarios involving a mock server.
<ide> public class MockServerTests {
<ide>
<ide>
<ide> @Test // SPR-15674 (in comments)
<del> public void mutateDoesNotCreateNewSession() throws Exception {
<add> public void mutateDoesNotCreateNewSession() {
<ide>
<ide> WebTestClient client = WebTestClient
<ide> .bindToWebHandler(exchange -> {
<ide> public void mutateDoesNotCreateNewSession() throws Exception {
<ide> return exchange.getSession()
<ide> .map(session -> session.getAttributeOrDefault("foo", "none"))
<ide> .flatMap(value -> {
<del> byte[] bytes = value.getBytes(UTF_8);
<del> DataBuffer buffer = new DefaultDataBufferFactory().wrap(bytes);
<add> DataBuffer buffer = toDataBuffer(value);
<ide> return exchange.getResponse().writeWith(Mono.just(buffer));
<ide> });
<ide> }
<ide> public void mutateDoesNotCreateNewSession() throws Exception {
<ide> }
<ide>
<ide> @Test // SPR-16059
<del> public void mutateDoesCopy() throws Exception {
<add> public void mutateDoesCopy() {
<ide>
<ide> WebTestClient.Builder builder = WebTestClient
<ide> .bindToWebHandler(exchange -> exchange.getResponse().setComplete())
<ide> public void mutateDoesCopy() throws Exception {
<ide> }
<ide>
<ide> @Test // SPR-16124
<del> public void exchangeResultHasCookieHeaders() throws Exception {
<add> public void exchangeResultHasCookieHeaders() {
<ide>
<ide> ExchangeResult result = WebTestClient
<ide> .bindToWebHandler(exchange -> {
<ide> public void exchangeResultHasCookieHeaders() throws Exception {
<ide> result.getRequestHeaders().get(HttpHeaders.COOKIE));
<ide> }
<ide>
<add> @Test
<add> public void responseBodyContentWithFluxExchangeResult() {
<add>
<add> FluxExchangeResult<String> result = WebTestClient
<add> .bindToWebHandler(exchange -> {
<add> ServerHttpResponse response = exchange.getResponse();
<add> response.getHeaders().setContentType(MediaType.TEXT_PLAIN);
<add> return response.writeWith(Flux.just(toDataBuffer("body")));
<add> })
<add> .build()
<add> .get().uri("/")
<add> .exchange()
<add> .expectStatus().isOk()
<add> .returnResult(String.class);
<add>
<add> // Get the raw content without consuming the response body flux..
<add> byte[] bytes = result.getResponseBodyContent();
<add>
<add> assertNotNull(bytes);
<add> assertEquals("body", new String(bytes, UTF_8));
<add> }
<add>
<add>
<add> private DataBuffer toDataBuffer(String value) {
<add> byte[] bytes = value.getBytes(UTF_8);
<add> return new DefaultDataBufferFactory().wrap(bytes);
<add> }
<add>
<ide> }
<ide><path>spring-test/src/test/java/org/springframework/test/web/reactive/server/StatusAssertionTests.java
<ide> package org.springframework.test.web.reactive.server;
<ide>
<ide> import java.net.URI;
<add>import java.time.Duration;
<ide>
<ide> import org.junit.Test;
<ide> import reactor.core.publisher.MonoProcessor;
<ide> private StatusAssertions statusAssertions(HttpStatus status) {
<ide> MonoProcessor<byte[]> emptyContent = MonoProcessor.create();
<ide> emptyContent.onComplete();
<ide>
<del> ExchangeResult result = new ExchangeResult(request, response, emptyContent, emptyContent, null);
<add> ExchangeResult result = new ExchangeResult(request, response, emptyContent, emptyContent, Duration.ZERO, null);
<ide> return new StatusAssertions(result, mock(WebTestClient.ResponseSpec.class));
<ide> }
<ide>
<ide><path>spring-test/src/test/java/org/springframework/test/web/reactive/server/WiretapConnectorTests.java
<ide> package org.springframework.test.web.reactive.server;
<ide>
<ide> import java.net.URI;
<add>import java.time.Duration;
<ide>
<ide> import org.junit.Test;
<ide> import reactor.core.publisher.Mono;
<ide> public void captureAndClaim() {
<ide> function.exchange(clientRequest).block(ofMillis(0));
<ide>
<ide> WiretapConnector.Info actual = wiretapConnector.claimRequest("1");
<del> ExchangeResult result = actual.createExchangeResult(null);
<add> ExchangeResult result = actual.createExchangeResult(Duration.ZERO, null);
<ide> assertEquals(HttpMethod.GET, result.getMethod());
<ide> assertEquals("/test", result.getUrl().toString());
<ide> }
<ide><path>spring-test/src/test/java/org/springframework/test/web/reactive/server/samples/ErrorTests.java
<ide>
<ide> package org.springframework.test.web.reactive.server.samples;
<ide>
<add>import java.nio.charset.StandardCharsets;
<add>
<ide> import org.junit.Test;
<ide>
<ide> import org.springframework.http.HttpStatus;
<add>import org.springframework.http.MediaType;
<add>import org.springframework.test.web.reactive.server.EntityExchangeResult;
<ide> import org.springframework.test.web.reactive.server.WebTestClient;
<ide> import org.springframework.web.bind.annotation.GetMapping;
<add>import org.springframework.web.bind.annotation.PostMapping;
<add>import org.springframework.web.bind.annotation.RequestBody;
<ide> import org.springframework.web.bind.annotation.RestController;
<ide>
<add>import static org.junit.Assert.*;
<add>
<ide> /**
<ide> * Tests with error status codes or error conditions.
<ide> *
<ide> public class ErrorTests {
<ide>
<ide>
<ide> @Test
<del> public void notFound() throws Exception {
<add> public void notFound(){
<ide> this.client.get().uri("/invalid")
<ide> .exchange()
<ide> .expectStatus().isNotFound()
<ide> .expectBody(Void.class);
<ide> }
<ide>
<ide> @Test
<del> public void serverException() throws Exception {
<add> public void serverException() {
<ide> this.client.get().uri("/server-error")
<ide> .exchange()
<ide> .expectStatus().isEqualTo(HttpStatus.INTERNAL_SERVER_ERROR)
<ide> .expectBody(Void.class);
<ide> }
<ide>
<add> @Test // SPR-17363
<add> public void badRequestBeforeRequestBodyConsumed() {
<add> EntityExchangeResult<Void> result = this.client.post()
<add> .uri("/post")
<add> .contentType(MediaType.APPLICATION_JSON_UTF8)
<add> .syncBody(new Person("Dan"))
<add> .exchange()
<add> .expectStatus().isBadRequest()
<add> .expectBody().isEmpty();
<add>
<add> byte[] content = result.getRequestBodyContent();
<add> assertNotNull(content);
<add> assertEquals("{\"name\":\"Dan\"}", new String(content, StandardCharsets.UTF_8));
<add> }
<add>
<ide>
<ide> @RestController
<ide> static class TestController {
<ide> static class TestController {
<ide> void handleAndThrowException() {
<ide> throw new IllegalStateException("server error");
<ide> }
<add>
<add> @PostMapping(path = "/post", params = "p")
<add> void handlePost(@RequestBody Person person) {
<add> }
<ide> }
<ide>
<ide> }
| 9
|
Python
|
Python
|
use set.discard instead of set.remove
|
e24d6ae3b6d543aba6ab9ed176a550c409dfe04e
|
<ide><path>celery/worker/state.py
<ide> def task_accepted(request):
<ide>
<ide> def task_ready(request):
<ide> """Updates global state when a task is ready."""
<del> try:
<del> active_requests.remove(request)
<del> except KeyError:
<del> pass
<add> active_requests.discard(request)
<ide>
<ide>
<ide> class Persistent(object):
| 1
|
Text
|
Text
|
move changelog entry to the top [ci skip]
|
f9e0ec544989c2d93ed40d0acdbc627f6a624eb1
|
<ide><path>actionmailer/CHANGELOG.md
<add>* Add `_mailer` suffix to mailers created via generator, following the same
<add> naming convention used in controllers and jobs.
<add>
<add> *Carlos Souza*
<add>
<ide> * Remove deprecate `*_path` helpers in email views.
<ide>
<ide> *Rafael Mendonça França*
<ide>
<ide> *Rafael Mendonça França*
<ide>
<del>* Add `_mailer` suffix to mailers created via generator, following the same
<del>naming convention used in controllers and jobs.
<del>
<del> Closes #18074.
<del>
<del> *Carlos Souza*
<del>
<ide> Please check [4-2-stable](https://github.com/rails/rails/blob/4-2-stable/actionmailer/CHANGELOG.md) for previous changes.
| 1
|
Python
|
Python
|
fix fp16_backend field
|
51adb97cd644a5840d971868d18c1d436fd6ff5d
|
<ide><path>src/transformers/training_args.py
<ide> class TrainingArguments:
<ide> )
<ide> fp16_backend: str = field(
<ide> default="auto",
<del> metadata={"help": "The backend to be used for mixed precision. Should be one of 'auto', 'amp' or 'apex'."},
<add> metadata={"help": "The backend to be used for mixed precision.", "choices": ["auto", "amp", "apex"]},
<ide> )
<ide>
<ide> def __post_init__(self):
| 1
|
Text
|
Text
|
replace http to https of link urls
|
3975799f260bb1aea77f3e8f4546182b53f2d6b8
|
<ide><path>doc/api/cli.md
<ide> $ node --max-old-space-size=1536 index.js
<ide> [emit_warning]: process.html#process_process_emitwarning_warning_type_code_ctor
<ide> [experimental ECMAScript Module loader]: esm.html#esm_experimental_loaders
<ide> [jitless]: https://v8.dev/blog/jitless
<del>[libuv threadpool documentation]: http://docs.libuv.org/en/latest/threadpool.html
<add>[libuv threadpool documentation]: https://docs.libuv.org/en/latest/threadpool.html
<ide> [remote code execution]: https://www.owasp.org/index.php/Code_Injection
<ide><path>doc/api/dns.md
<ide> The [`dns.setServers()`][] method affects only [`dns.resolve()`][],
<ide> [`dns.lookup()`][]).
<ide>
<ide> This method works much like
<del>[resolve.conf](http://man7.org/linux/man-pages/man5/resolv.conf.5.html).
<add>[resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html).
<ide> That is, if attempting to resolve with the first server provided results in a
<ide> `NOTFOUND` error, the `resolve()` method will *not* attempt to resolve with
<ide> subsequent servers provided. Fallback DNS servers will only be used if the
<ide> The `dnsPromises.setServers()` method must not be called while a DNS query is in
<ide> progress.
<ide>
<ide> This method works much like
<del>[resolve.conf](http://man7.org/linux/man-pages/man5/resolv.conf.5.html).
<add>[resolve.conf](https://man7.org/linux/man-pages/man5/resolv.conf.5.html).
<ide> That is, if attempting to resolve with the first server provided results in a
<ide> `NOTFOUND` error, the `resolve()` method will *not* attempt to resolve with
<ide> subsequent servers provided. Fallback DNS servers will only be used if the
<ide><path>doc/api/errors.md
<ide> closed.
<ide> [`dgram.createSocket()`]: dgram.html#dgram_dgram_createsocket_options_callback
<ide> [`dgram.disconnect()`]: dgram.html#dgram_socket_disconnect
<ide> [`dgram.remoteAddress()`]: dgram.html#dgram_socket_remoteaddress
<del>[`errno`(3) man page]: http://man7.org/linux/man-pages/man3/errno.3.html
<add>[`errno`(3) man page]: https://man7.org/linux/man-pages/man3/errno.3.html
<ide> [`fs.Dir`]: fs.html#fs_class_fs_dir
<ide> [`fs.readFileSync`]: fs.html#fs_fs_readfilesync_path_options
<ide> [`fs.readdir`]: fs.html#fs_fs_readdir_path_options_callback
<ide> closed.
<ide> [`hash.update()`]: crypto.html#crypto_hash_update_data_inputencoding
<ide> [`http`]: http.html
<ide> [`https`]: https.html
<del>[`libuv Error handling`]: http://docs.libuv.org/en/v1.x/errors.html
<add>[`libuv Error handling`]: https://docs.libuv.org/en/v1.x/errors.html
<ide> [`net`]: net.html
<ide> [`new URL(input)`]: url.html#url_new_url_input_base
<ide> [`new URLSearchParams(iterable)`]: url.html#url_new_urlsearchparams_iterable
<ide> closed.
<ide> [policy]: policy.html
<ide> [RFC 7230 Section 3]: https://tools.ietf.org/html/rfc7230#section-3
<ide> [stream-based]: stream.html
<del>[syscall]: http://man7.org/linux/man-pages/man2/syscalls.2.html
<add>[syscall]: https://man7.org/linux/man-pages/man2/syscalls.2.html
<ide> [Subresource Integrity specification]: https://www.w3.org/TR/SRI/#the-integrity-attribute
<ide> [try-catch]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/try...catch
<ide> [vm]: vm.html
<ide><path>doc/api/esm.md
<ide> success!
<ide> [`module.createRequire()`]: modules.html#modules_module_createrequire_filename
<ide> [`module.syncBuiltinESMExports()`]: modules.html#modules_module_syncbuiltinesmexports
<ide> [`transformSource` hook]: #esm_code_transformsource_code_hook
<del>[ArrayBuffer]: http://www.ecma-international.org/ecma-262/6.0/#sec-arraybuffer-constructor
<add>[ArrayBuffer]: https://www.ecma-international.org/ecma-262/6.0/#sec-arraybuffer-constructor
<ide> [SharedArrayBuffer]: https://tc39.es/ecma262/#sec-sharedarraybuffer-constructor
<del>[string]: http://www.ecma-international.org/ecma-262/6.0/#sec-string-constructor
<del>[TypedArray]: http://www.ecma-international.org/ecma-262/6.0/#sec-typedarray-objects
<del>[Uint8Array]: http://www.ecma-international.org/ecma-262/6.0/#sec-uint8array
<add>[string]: https://www.ecma-international.org/ecma-262/6.0/#sec-string-constructor
<add>[TypedArray]: https://www.ecma-international.org/ecma-262/6.0/#sec-typedarray-objects
<add>[Uint8Array]: https://www.ecma-international.org/ecma-262/6.0/#sec-uint8array
<ide> [`util.TextDecoder`]: util.html#util_class_util_textdecoder
<ide> [import an ES or CommonJS module for its side effects only]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import#Import_a_module_for_its_side_effects_only
<ide> [special scheme]: https://url.spec.whatwg.org/#special-scheme
<ide><path>doc/api/fs.md
<ide> the file contents.
<ide> [`fsPromises.open()`]: #fs_fspromises_open_path_flags_mode
<ide> [`fsPromises.opendir()`]: #fs_fspromises_opendir_path_options
<ide> [`fsPromises.utimes()`]: #fs_fspromises_utimes_path_atime_mtime
<del>[`inotify(7)`]: http://man7.org/linux/man-pages/man7/inotify.7.html
<add>[`inotify(7)`]: https://man7.org/linux/man-pages/man7/inotify.7.html
<ide> [`kqueue(2)`]: https://www.freebsd.org/cgi/man.cgi?query=kqueue&sektion=2
<ide> [`net.Socket`]: net.html#net_class_net_socket
<ide> [`stat()`]: fs.html#fs_fs_stat_path_options_callback
<ide><path>doc/api/n-api.md
<ide> This API may only be called from the main thread.
<ide> [`napi_wrap`]: #n_api_napi_wrap
<ide> [`node_api.h`]: https://github.com/nodejs/node/blob/master/src/node_api.h
<ide> [`process.release`]: process.html#process_process_release
<del>[`uv_ref`]: http://docs.libuv.org/en/v1.x/handle.html#c.uv_ref
<del>[`uv_unref`]: http://docs.libuv.org/en/v1.x/handle.html#c.uv_unref
<add>[`uv_ref`]: https://docs.libuv.org/en/v1.x/handle.html#c.uv_ref
<add>[`uv_unref`]: https://docs.libuv.org/en/v1.x/handle.html#c.uv_unref
<ide> [async_hooks `type`]: async_hooks.html#async_hooks_type
<ide> [context-aware addons]: addons.html#addons_context_aware_addons
<ide> [docs]: https://github.com/nodejs/node-addon-api#api-documentation
<ide><path>doc/api/net.md
<ide> Returns `true` if input is a version 6 IP address, otherwise returns `false`.
<ide> [`server.listen(handle)`]: #net_server_listen_handle_backlog_callback
<ide> [`server.listen(options)`]: #net_server_listen_options_callback
<ide> [`server.listen(path)`]: #net_server_listen_path_backlog_callback
<del>[`socket(7)`]: http://man7.org/linux/man-pages/man7/socket.7.html
<add>[`socket(7)`]: https://man7.org/linux/man-pages/man7/socket.7.html
<ide> [`socket.connect()`]: #net_socket_connect
<ide> [`socket.connect(options)`]: #net_socket_connect_options_connectlistener
<ide> [`socket.connect(path)`]: #net_socket_connect_path_connectlistener
<ide><path>doc/api/process.md
<ide> cases:
<ide> [process_warning]: #process_event_warning
<ide> [report documentation]: report.html
<ide> [terminal raw mode]: tty.html#tty_readstream_setrawmode_mode
<del>[uv_rusage_t]: http://docs.libuv.org/en/v1.x/misc.html#c.uv_rusage_t
<add>[uv_rusage_t]: https://docs.libuv.org/en/v1.x/misc.html#c.uv_rusage_t
<ide> [wikipedia_minor_fault]: https://en.wikipedia.org/wiki/Page_fault#Minor
<ide> [wikipedia_major_fault]: https://en.wikipedia.org/wiki/Page_fault#Major
<ide><path>doc/changelogs/CHANGELOG_ARCHIVE.md
<ide> https://github.com/nodejs/node/commit/bb0d1e65e1671aaeb21fac186b066701da0bc33b
<ide>
<ide> * Major API Changes
<ide> * Promises removed. See
<del> http://groups.google.com/group/nodejs/msg/426f3071f3eec16b
<del> http://groups.google.com/group/nodejs/msg/df199d233ff17efa
<add> https://groups.google.com/group/nodejs/msg/426f3071f3eec16b
<add> https://groups.google.com/group/nodejs/msg/df199d233ff17efa
<ide> The API for fs was
<ide> fs.readdir("/usr").addCallback(function (files) {
<ide> puts("/usr files: " + files);
<ide> https://github.com/nodejs/node/commit/77d407df2826b20e9177c26c0d2bb4481e497937
<ide> * Move EventEmitter.prototype.emit() completely into C++.
<ide>
<ide> * Bugfix: Fix memory leak in event emitters.
<del> http://groups.google.com/group/nodejs/browse_thread/thread/a8d1dfc2fd57a6d1
<add> https://groups.google.com/group/nodejs/browse_thread/thread/a8d1dfc2fd57a6d1
<ide>
<ide> * Bugfix: Had problems reading scripts with non-ascii characters.
<ide> * Bugfix: Fix Detach() in node::Server
<ide><path>doc/changelogs/CHANGELOG_IOJS.md
<ide> See https://github.com/nodejs/io.js/labels/confirmed-bug for complete and curren
<ide> * **dgram**: If an error occurs within `socket.send()` and a callback has been provided, the error is only passed as the first argument to the callback and not emitted on the `socket` object; previous behavior was to do both (Matteo Collina & Chris Dickinson) [#1796](https://github.com/nodejs/node/pull/1796)
<ide> * **freelist**: Deprecate the undocumented `freelist` core module (Sakthipriyan Vairamani) [#2176](https://github.com/nodejs/node/pull/2176).
<ide> * **http**:
<del> * Status codes now all use the official [IANA names](http://www.iana.org/assignments/http-status-codes) as per [RFC7231](https://tools.ietf.org/html/rfc7231), e.g. `http.STATUS_CODES[414]` now returns `'URI Too Long'` rather than `'Request-URI Too Large'` (jomo) [#1470](https://github.com/nodejs/node/pull/1470).
<add> * Status codes now all use the official [IANA names](https://www.iana.org/assignments/http-status-codes) as per [RFC7231](https://tools.ietf.org/html/rfc7231), e.g. `http.STATUS_CODES[414]` now returns `'URI Too Long'` rather than `'Request-URI Too Large'` (jomo) [#1470](https://github.com/nodejs/node/pull/1470).
<ide> * Calling .getName() on an HTTP agent no longer returns a trailing colon, HTTPS agents will no longer return an extra colon near the middle of the string (Brendan Ashworth) [#1617](https://github.com/nodejs/node/pull/1617).
<ide> * **node**:
<ide> * `NODE_MODULE_VERSION` has been bumped to `45` to reflect the break in ABI (Rod Vagg) [#2096](https://github.com/nodejs/node/pull/2096).
<ide><path>doc/guides/cpp-style-guide.md
<ide> side effects.
<ide> Node.js is built [without C++ exception handling][], so code using `throw` or
<ide> even `try` and `catch` **will** break.
<ide>
<del>[C++ Core Guidelines]: http://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines
<add>[C++ Core Guidelines]: https://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines
<ide> [Google C++ Style Guide]: https://google.github.io/styleguide/cppguide.html
<ide> [Google’s `cpplint`]: https://github.com/google/styleguide
<ide> [errors]: https://github.com/nodejs/node/blob/master/doc/guides/using-internal-errors.md
<del>[ES.47]: http://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines#Res-nullptr
<del>[ES.48]: http://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines#Res-casts
<del>[ES.49]: http://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines#Res-casts-named
<del>[R.20]: http://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines#Rr-owner
<del>[R.21]: http://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines#Rr-unique
<add>[ES.47]: https://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines#Res-nullptr
<add>[ES.48]: https://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines#Res-casts
<add>[ES.49]: https://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines#Res-casts-named
<add>[R.20]: https://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines#Rr-owner
<add>[R.21]: https://isocpp.github.io/CppCoreGuidelines/CppCoreGuidelines#Rr-unique
<ide> [Run Time Type Information]: https://en.wikipedia.org/wiki/Run-time_type_information
<ide> [cppref_auto_ptr]: https://en.cppreference.com/w/cpp/memory/auto_ptr
<ide> [without C++ exception handling]: https://gcc.gnu.org/onlinedocs/libstdc++/manual/using_exceptions.html#intro.using.exception.no
<ide><path>doc/guides/maintaining-icu.md
<ide> main data files do not need to be upgraded in order to apply time zone data file
<ide> fixes.
<ide>
<ide> The [IANA tzdata](https://www.iana.org/time-zones) project releases new versions
<del>and announces them on the [`tz-announce`](http://mm.icann.org/pipermail/tz-announce/)
<add>and announces them on the [`tz-announce`](https://mm.icann.org/pipermail/tz-announce/)
<ide> mailing list.
<ide>
<ide> The Unicode project takes new releases and publishes
<ide> Node.js is built.
<ide>
<ide> * Make sure your Node.js workspace is clean (`git status`
<ide> should be sufficient).
<del>* Configure Node.js with the specific [ICU version](http://icu-project.org/download)
<add>* Configure Node.js with the specific [ICU version](http://site.icu-project.org/download)
<ide> you want to upgrade to, for example:
<ide>
<ide> ```bash
<ide><path>doc/guides/maintaining-openssl.md
<ide> This document describes how to update `deps/openssl/`.
<ide> ## Requirements
<ide> * Linux environment.
<ide> * `perl` Only Perl version 5 is tested.
<del>* `nasm` (<http://www.nasm.us/>) Version 2.11 or higher is needed.
<add>* `nasm` (<https://www.nasm.us/>) Version 2.11 or higher is needed.
<ide> * GNU `as` in binutils. Version 2.26 or higher is needed.
<ide>
<ide> ## 0. Check Requirements
<ide><path>doc/guides/writing-tests.md
<ide> To generate a test coverage report, see the
<ide> Nightly coverage reports for the Node.js master branch are available at
<ide> <https://coverage.nodejs.org/>.
<ide>
<del>[ASCII]: http://man7.org/linux/man-pages/man7/ascii.7.html
<add>[ASCII]: https://man7.org/linux/man-pages/man7/ascii.7.html
<ide> [Google Test]: https://github.com/google/googletest
<ide> [`common` module]: https://github.com/nodejs/node/blob/master/test/common/README.md
<ide> [all maintained branches]: https://github.com/nodejs/lts
| 14
|
Javascript
|
Javascript
|
fix code samples and example
|
39915836e0604723c5edb7d915d3f41c0abda65e
|
<ide><path>src/ng/sce.js
<ide> function adjustMatchers(matchers) {
<ide> *
<ide> * Here is what a secure configuration for this scenario might look like:
<ide> *
<del> * <pre class="prettyprint">
<del> * angular.module('myApp', []).config(function($sceDelegateProvider) {
<del> * $sceDelegateProvider.resourceUrlWhitelist([
<del> * // Allow same origin resource loads.
<del> * 'self',
<del> * // Allow loading from our assets domain. Notice the difference between * and **.
<del> * 'http://srv*.assets.example.com/**']);
<add> * ```
<add> * angular.module('myApp', []).config(function($sceDelegateProvider) {
<add> * $sceDelegateProvider.resourceUrlWhitelist([
<add> * // Allow same origin resource loads.
<add> * 'self',
<add> * // Allow loading from our assets domain. Notice the difference between * and **.
<add> * 'http://srv*.assets.example.com/**'
<add> * ]);
<ide> *
<del> * // The blacklist overrides the whitelist so the open redirect here is blocked.
<del> * $sceDelegateProvider.resourceUrlBlacklist([
<del> * 'http://myapp.example.com/clickThru**']);
<del> * });
<del> * </pre>
<add> * // The blacklist overrides the whitelist so the open redirect here is blocked.
<add> * $sceDelegateProvider.resourceUrlBlacklist([
<add> * 'http://myapp.example.com/clickThru**'
<add> * ]);
<add> * });
<add> * ```
<ide> */
<ide>
<ide> function $SceDelegateProvider() {
<ide> function $SceDelegateProvider() {
<ide> *
<ide> * Here's an example of a binding in a privileged context:
<ide> *
<del> * <pre class="prettyprint">
<del> * <input ng-model="userHtml">
<del> * <div ng-bind-html="userHtml">
<del> * </pre>
<add> * ```
<add> * <input ng-model="userHtml">
<add> * <div ng-bind-html="userHtml"></div>
<add> * ```
<ide> *
<ide> * Notice that `ng-bind-html` is bound to `userHtml` controlled by the user. With SCE
<ide> * disabled, this application allows the user to render arbitrary HTML into the DIV.
<ide> function $SceDelegateProvider() {
<ide> * ng.$sce#parseAsHtml $sce.parseAsHtml(binding expression)}. Here's the actual code (slightly
<ide> * simplified):
<ide> *
<del> * <pre class="prettyprint">
<del> * var ngBindHtmlDirective = ['$sce', function($sce) {
<del> * return function(scope, element, attr) {
<del> * scope.$watch($sce.parseAsHtml(attr.ngBindHtml), function(value) {
<del> * element.html(value || '');
<del> * });
<del> * };
<del> * }];
<del> * </pre>
<add> * ```
<add> * var ngBindHtmlDirective = ['$sce', function($sce) {
<add> * return function(scope, element, attr) {
<add> * scope.$watch($sce.parseAsHtml(attr.ngBindHtml), function(value) {
<add> * element.html(value || '');
<add> * });
<add> * };
<add> * }];
<add> * ```
<ide> *
<ide> * ## Impact on loading templates
<ide> *
<ide> function $SceDelegateProvider() {
<ide> *
<ide> * ## Show me an example using SCE.
<ide> *
<del> * @example
<del><example module="mySceApp" deps="angular-sanitize.js">
<del><file name="index.html">
<del> <div ng-controller="myAppController as myCtrl">
<del> <i ng-bind-html="myCtrl.explicitlyTrustedHtml" id="explicitlyTrustedHtml"></i><br><br>
<del> <b>User comments</b><br>
<del> By default, HTML that isn't explicitly trusted (e.g. Alice's comment) is sanitized when
<del> $sanitize is available. If $sanitize isn't available, this results in an error instead of an
<del> exploit.
<del> <div class="well">
<del> <div ng-repeat="userComment in myCtrl.userComments">
<del> <b>{{userComment.name}}</b>:
<del> <span ng-bind-html="userComment.htmlComment" class="htmlComment"></span>
<del> <br>
<del> </div>
<del> </div>
<del> </div>
<del></file>
<del>
<del><file name="script.js">
<del> var mySceApp = angular.module('mySceApp', ['ngSanitize']);
<del>
<del> mySceApp.controller("myAppController", function myAppController($http, $templateCache, $sce) {
<del> var self = this;
<del> $http.get("test_data.json", {cache: $templateCache}).success(function(userComments) {
<del> self.userComments = userComments;
<del> });
<del> self.explicitlyTrustedHtml = $sce.trustAsHtml(
<del> '<span onmouseover="this.textContent="Explicitly trusted HTML bypasses ' +
<del> 'sanitization."">Hover over this text.</span>');
<del> });
<del></file>
<del>
<del><file name="test_data.json">
<del>[
<del> { "name": "Alice",
<del> "htmlComment":
<del> "<span onmouseover='this.textContent=\"PWN3D!\"'>Is <i>anyone</i> reading this?</span>"
<del> },
<del> { "name": "Bob",
<del> "htmlComment": "<i>Yes!</i> Am I the only other one?"
<del> }
<del>]
<del></file>
<del>
<del><file name="protractor.js" type="protractor">
<del> describe('SCE doc demo', function() {
<del> it('should sanitize untrusted values', function() {
<del> expect(element(by.css('.htmlComment')).getInnerHtml())
<del> .toBe('<span>Is <i>anyone</i> reading this?</span>');
<del> });
<del>
<del> it('should NOT sanitize explicitly trusted values', function() {
<del> expect(element(by.id('explicitlyTrustedHtml')).getInnerHtml()).toBe(
<del> '<span onmouseover="this.textContent="Explicitly trusted HTML bypasses ' +
<del> 'sanitization."">Hover over this text.</span>');
<del> });
<del> });
<del></file>
<del></example>
<add> * <example module="mySceApp" deps="angular-sanitize.js">
<add> * <file name="index.html">
<add> * <div ng-controller="myAppController as myCtrl">
<add> * <i ng-bind-html="myCtrl.explicitlyTrustedHtml" id="explicitlyTrustedHtml"></i><br><br>
<add> * <b>User comments</b><br>
<add> * By default, HTML that isn't explicitly trusted (e.g. Alice's comment) is sanitized when
<add> * $sanitize is available. If $sanitize isn't available, this results in an error instead of an
<add> * exploit.
<add> * <div class="well">
<add> * <div ng-repeat="userComment in myCtrl.userComments">
<add> * <b>{{userComment.name}}</b>:
<add> * <span ng-bind-html="userComment.htmlComment" class="htmlComment"></span>
<add> * <br>
<add> * </div>
<add> * </div>
<add> * </div>
<add> * </file>
<add> *
<add> * <file name="script.js">
<add> * var mySceApp = angular.module('mySceApp', ['ngSanitize']);
<add> *
<add> * mySceApp.controller("myAppController", function myAppController($http, $templateCache, $sce) {
<add> * var self = this;
<add> * $http.get("test_data.json", {cache: $templateCache}).success(function(userComments) {
<add> * self.userComments = userComments;
<add> * });
<add> * self.explicitlyTrustedHtml = $sce.trustAsHtml(
<add> * '<span onmouseover="this.textContent="Explicitly trusted HTML bypasses ' +
<add> * 'sanitization."">Hover over this text.</span>');
<add> * });
<add> * </file>
<add> *
<add> * <file name="test_data.json">
<add> * [
<add> * { "name": "Alice",
<add> * "htmlComment":
<add> * "<span onmouseover='this.textContent=\"PWN3D!\"'>Is <i>anyone</i> reading this?</span>"
<add> * },
<add> * { "name": "Bob",
<add> * "htmlComment": "<i>Yes!</i> Am I the only other one?"
<add> * }
<add> * ]
<add> * </file>
<add> *
<add> * <file name="protractor.js" type="protractor">
<add> * describe('SCE doc demo', function() {
<add> * it('should sanitize untrusted values', function() {
<add> * expect(element(by.css('.htmlComment')).getInnerHtml())
<add> * .toBe('<span>Is <i>anyone</i> reading this?</span>');
<add> * });
<add> *
<add> * it('should NOT sanitize explicitly trusted values', function() {
<add> * expect(element(by.id('explicitlyTrustedHtml')).getInnerHtml()).toBe(
<add> * '<span onmouseover="this.textContent="Explicitly trusted HTML bypasses ' +
<add> * 'sanitization."">Hover over this text.</span>');
<add> * });
<add> * });
<add> * </file>
<add> * </example>
<ide> *
<ide> *
<ide> *
<ide> function $SceDelegateProvider() {
<ide> *
<ide> * That said, here's how you can completely disable SCE:
<ide> *
<del> * <pre class="prettyprint">
<del> * angular.module('myAppWithSceDisabledmyApp', []).config(function($sceProvider) {
<del> * // Completely disable SCE. For demonstration purposes only!
<del> * // Do not use in new projects.
<del> * $sceProvider.enabled(false);
<del> * });
<del> * </pre>
<add> * ```
<add> * angular.module('myAppWithSceDisabledmyApp', []).config(function($sceProvider) {
<add> * // Completely disable SCE. For demonstration purposes only!
<add> * // Do not use in new projects.
<add> * $sceProvider.enabled(false);
<add> * });
<add> * ```
<ide> *
<ide> */
<ide> /* jshint maxlen: 100 */
| 1
|
Ruby
|
Ruby
|
update documentation for button_to
|
42eda20cce08c5006e71f0098067cc098fc3970c
|
<ide><path>actionpack/lib/action_view/helpers/url_helper.rb
<ide> def link_to(*args, &block)
<ide> #
<ide> # <%= button_to "Create", :action => "create", :remote => true, :form => { "data-type" => "json" } %>
<ide> # # => "<form method="post" action="/images/create" class="button_to" data-remote="true" data-type="json">
<del> # # <div><input value="Create" type="submit" /></div>
<add> # # <div>
<add> # # <input value="Create" type="submit" />
<add> # # <input name="authenticity_token" type="hidden" value="10f2163b45388899ad4d5ae948988266befcb6c3d1b2451cf657a0c293d605a6"/>
<add> # # </div>
<ide> # # </form>"
<ide> #
<ide> #
<ide> def link_to(*args, &block)
<ide> # # <div>
<ide> # # <input type="hidden" name="_method" value="delete" />
<ide> # # <input data-confirm='Are you sure?' value="Delete" type="submit" />
<add> # # <input name="authenticity_token" type="hidden" value="10f2163b45388899ad4d5ae948988266befcb6c3d1b2451cf657a0c293d605a6"/>
<ide> # # </div>
<ide> # # </form>"
<ide> #
<ide> def link_to(*args, &block)
<ide> # # <div>
<ide> # # <input name='_method' value='delete' type='hidden' />
<ide> # # <input value='Destroy' type='submit' disable_with='loading...' data-confirm='Are you sure?' />
<add> # # <input name="authenticity_token" type="hidden" value="10f2163b45388899ad4d5ae948988266befcb6c3d1b2451cf657a0c293d605a6"/>
<ide> # # </div>
<ide> # # </form>"
<ide> # #
| 1
|
Text
|
Text
|
mention `largepagedatabytes` in warning docs
|
a799d25cfaf602c898ba3afa3a8cbd6f6eb7957b
|
<ide><path>errors/large-page-data.md
<ide>
<ide> #### Why This Error Occurred
<ide>
<del>One of your pages includes a large amount of page data (>= 128KB). This can negatively impact performance since page data must be parsed by the client before the page is hydrated.
<add>One of your pages includes a large amount of page data (>= 128kB). This can negatively impact performance since page data must be parsed by the client before the page is hydrated.
<ide>
<ide> #### Possible Ways to Fix It
<ide>
<del>Reduce the amount of data returned from `getStaticProps`, `getServerSideProps`, or `getInitialProps` to only the essential data to render the page.
<add>Reduce the amount of data returned from `getStaticProps`, `getServerSideProps`, or `getInitialProps` to only the essential data to render the page. The default threshold of 128kB can be configured in `largePageDataBytes` if absolutely necessary and the performance implications are understood.
<ide>
<ide> ### Useful Links
<ide>
| 1
|
PHP
|
PHP
|
fix coding standards
|
11227f54903c52a43faabd16fe951e8db127d927
|
<ide><path>lib/Cake/Test/Case/Console/Command/SchemaShellTest.php
<ide> public function testGenerateModels() {
<ide> CakePlugin::unload();
<ide> }
<ide>
<del>
<ide> /**
<ide> * Test schema run create with no table args.
<ide> *
<ide><path>lib/Cake/Test/Case/Controller/Component/RequestHandlerComponentTest.php
<ide> public function ajax2_layout() {
<ide> *
<ide> * @package Cake.Test.Case.Controller.Component
<ide> */
<del>class CustomJsonView extends JsonView {}
<add>class CustomJsonView extends JsonView {
<add>
<add>}
<ide>
<ide> /**
<ide> * RequestHandlerComponentTest class
| 2
|
Javascript
|
Javascript
|
add test for d3.values
|
15d7e87fdb1dd18e1202a5f2d15d97eaff609010
|
<ide><path>test/core/keys-test.js
<ide> suite.addBatch({
<ide> return d3.keys;
<ide> },
<ide> "enumerates every defined key": function(keys) {
<del> assert.deepEqual(d3.keys({a: 1, b: 1}), ["a", "b"]);
<add> assert.deepEqual(keys({a: 1, b: 1}), ["a", "b"]);
<ide> },
<ide> "includes keys defined on prototypes": function(keys) {
<ide> function abc() {
<ide> this.a = 1;
<ide> this.b = 2;
<ide> }
<ide> abc.prototype.c = 3;
<del> assert.deepEqual(d3.keys(new abc()), ["a", "b", "c"]);
<add> assert.deepEqual(keys(new abc()), ["a", "b", "c"]);
<ide> },
<ide> "includes keys with null or undefined values": function(keys) {
<del> assert.deepEqual(d3.keys({a: undefined, b: null, c: NaN}), ["a", "b", "c"]);
<add> assert.deepEqual(keys({a: undefined, b: null, c: NaN}), ["a", "b", "c"]);
<ide> }
<ide> }
<ide> });
<ide><path>test/core/values-test.js
<add>require("../env");
<add>require("../../d3");
<add>
<add>var vows = require("vows"),
<add> assert = require("assert");
<add>
<add>var suite = vows.describe("d3.values");
<add>
<add>suite.addBatch({
<add> "values": {
<add> topic: function() {
<add> return d3.values;
<add> },
<add> "enumerates every value": function(values) {
<add> assert.deepEqual(values({a: 1, b: 2}), [1, 2]);
<add> },
<add> "includes values defined on prototypes": function(values) {
<add> function abc() {
<add> this.a = 1;
<add> this.b = 2;
<add> }
<add> abc.prototype.c = 3;
<add> assert.deepEqual(values(new abc()), [1, 2, 3]);
<add> },
<add> "includes null or undefined values": function(values) {
<add> var v = values({a: undefined, b: null, c: NaN});
<add> assert.isUndefined(v[0]);
<add> assert.isNull(v[1]);
<add> assert.isNaN(v[2]);
<add> assert.equal(v.length, 3);
<add> }
<add> }
<add>});
<add>
<add>suite.export(module);
| 2
|
Javascript
|
Javascript
|
increase workers to 8 in cluster-disconnect
|
dbe9f8da6b114873e781791bb31761d3161cad31
|
<ide><path>test/simple/test-cluster-disconnect.js
<ide> if (cluster.isWorker) {
<ide>
<ide> // start two workers and execute callback when both is listening
<ide> var startCluster = function(cb) {
<del> var workers = 2;
<add> var workers = 8;
<ide> var online = 0;
<ide>
<ide> for (var i = 0, l = workers; i < l; i++) {
| 1
|
Text
|
Text
|
use the correct "it's."
|
2b0f88383afba28fe7b0bba989d115c2f5e2cc87
|
<ide><path>docs/sources/use/working_with_volumes.md
<ide> more new volumes to any container created from that image:
<ide> ### Creating and mounting a Data Volume Container
<ide>
<ide> If you have some persistent data that you want to share between
<del>containers, or want to use from non-persistent containers, its best to
<add>containers, or want to use from non-persistent containers, it's best to
<ide> create a named Data Volume Container, and then to mount the data from
<ide> it.
<ide>
| 1
|
PHP
|
PHP
|
add `notifynow` method to notifiables
|
727bd9eb4dacd504ec6c34516485694225971c31
|
<ide><path>src/Illuminate/Notifications/RoutesNotifications.php
<ide> public function notify($instance)
<ide> app(Dispatcher::class)->send($this, $instance);
<ide> }
<ide>
<add> /**
<add> * Send the given notification immediately.
<add> *
<add> * @param mixed $instance
<add> * @param array|null $channels
<add> * @return void
<add> */
<add> public function notifyNow($instance, array $channels = null)
<add> {
<add> app(Dispatcher::class)->sendNow($this, $instance, $channels);
<add> }
<add>
<ide> /**
<ide> * Get the notification routing information for the given driver.
<ide> *
<ide><path>tests/Notifications/NotificationRoutesNotificationsTest.php
<ide> public function testNotificationCanBeDispatched()
<ide> $notifiable->notify($instance);
<ide> }
<ide>
<add> public function testNotificationCanBeSentNow()
<add> {
<add> $container = new Container;
<add> $factory = Mockery::mock(Dispatcher::class);
<add> $container->instance(Dispatcher::class, $factory);
<add> $notifiable = new RoutesNotificationsTestInstance;
<add> $instance = new stdClass;
<add> $factory->shouldReceive('sendNow')->with($notifiable, $instance, null);
<add> Container::setInstance($container);
<add>
<add> $notifiable->notifyNow($instance);
<add> }
<add>
<ide> public function testNotificationOptionRouting()
<ide> {
<ide> $instance = new RoutesNotificationsTestInstance;
| 2
|
Python
|
Python
|
add classifiers in setup.py
|
3c31489046d144aa09b68f611c241d9ed770c07a
|
<ide><path>setup.py
<ide> 'pytest-xdist',
<ide> 'pytest-cov'],
<ide> },
<add> classifiers=[
<add> 'Development Status :: 5 - Production/Stable',
<add> 'Intended Audience :: Developers',
<add> 'Intended Audience :: Education',
<add> 'Intended Audience :: Science/Research',
<add> 'License :: OSI Approved :: MIT License',
<add> 'Programming Language :: Python :: 2',
<add> 'Programming Language :: Python :: 2.7',
<add> 'Programming Language :: Python :: 3',
<add> 'Programming Language :: Python :: 3.6',
<add> 'Topic :: Software Development :: Libraries',
<add> 'Topic :: Software Development :: Libraries :: Python Modules'
<add> ],
<ide> packages=find_packages())
| 1
|
PHP
|
PHP
|
set the user resolver on form requests
|
79506f3b7d550afbc1c13972b997f06b7f5c75e8
|
<ide><path>src/Illuminate/Foundation/Providers/FormRequestServiceProvider.php
<ide> protected function initializeRequest(FormRequest $form, Request $current)
<ide> $current->query->all(), $current->request->all(), $current->attributes->all(),
<ide> $current->cookies->all(), $files, $current->server->all(), $current->getContent()
<ide> );
<add>
<add> $form->setUserResolver($current->getUserResolver());
<ide> }
<ide>
<ide> }
<ide><path>src/Illuminate/Http/Request.php
<ide> public function session()
<ide> return $this->getSession();
<ide> }
<ide>
<add> /**
<add> * Get the user resolver callback.
<add> *
<add> * @return \Closure
<add> */
<add> public function getUserResolver()
<add> {
<add> return $this->userResolver ?: function() {};
<add> }
<add>
<ide> /**
<ide> * Set the user resolver callback.
<ide> *
<ide> * @param \Closure $callback
<del> * @return void
<add> * @return $this
<ide> */
<ide> public function setUserResolver(Closure $callback)
<ide> {
<ide> $this->userResolver = $callback;
<add>
<add> return $this;
<ide> }
<ide>
<ide> /**
| 2
|
Javascript
|
Javascript
|
update stdout and stderror to be array of lines
|
4dcbf96e7239289eeeff4463488622919584111d
|
<ide><path>test/BinTestCases.test.js
<ide> function getTestSpecificArguments(testDirectory) {
<ide> }
<ide> }
<ide>
<add>function convertToArrayOfLines(outputArray) {
<add> if(outputArray.length === 0) return outputArray;
<add> return outputArray.join('').split('\n');
<add>}
<add>
<ide> const casesPath = path.join(__dirname, "binCases");
<ide> const defaultArgs = loadOptsFile(path.join(casesPath, "test.opts"));
<ide>
<ide> describe("BinTestCases", function() {
<ide> describe(testName, function() {
<ide> before(function(done) {
<ide> this.timeout(20000);
<del> const child = spawn(cmd, args, opts);
<add>
<add> const child = spawn(process.execPath, [cmd].concat(args), opts);
<ide>
<ide> child.on("close", function(code) {
<ide> env.code = code;
<ide> describe("BinTestCases", function() {
<ide> });
<ide>
<ide> it("should run successfully", function() {
<del> testAssertions(env.code, env.stdout, env.stderr);
<add> const stdout = convertToArrayOfLines(env.stdout);
<add> const stderr = convertToArrayOfLines(env.stderr);
<add> testAssertions(env.code, stdout, stderr);
<ide> });
<ide> });
<ide> });
<ide><path>test/binCases/help/help-output/test.js
<ide> module.exports = function testAssertions(code, stdout, stderr) {
<ide> code.should.be.exactly(0);
<ide>
<del> stdout.length.should.be.exactly(1);
<del> const output = stdout[0].toString();
<add> stdout.should.be.ok();
<add> stdout[0].should.startWith("webpack");
<add> stdout.should.containEql("Config options:");
<add> stdout.should.containEql("Basic options:");
<add> stdout.should.containEql("Module options:");
<add> stdout.should.containEql("Output options:");
<add> stdout.should.containEql("Advanced options:");
<add> stdout.should.containEql("Resolving options:");
<add> stdout.should.containEql("Optimizing options:");
<add> stdout.should.containEql("Stats options:");
<add> stdout.should.containEql("Options:");
<ide>
<del> output.should.be.ok();
<del> output.should.startWith("webpack");
<del> output.should.containEql("\nConfig options:");
<del> output.should.containEql("\nBasic options:");
<del> output.should.containEql("\nModule options:");
<del> output.should.containEql("\nOutput options:");
<del> output.should.containEql("\nAdvanced options:");
<del> output.should.containEql("\nResolving options:");
<del> output.should.containEql("\nOptimizing options:");
<del> output.should.containEql("\nStats options:");
<del> output.should.containEql("\nOptions:");
<del>
<del> stderr.map((data) => {
<del> data.toString().should.be.ok();
<del> throw new Error(data.toString());
<del> });
<add> stderr.should.be.empty();
<ide> }
<ide><path>test/binCases/stats/single-config/test.js
<ide> module.exports = function testAssertions(code, stdout, stderr) {
<ide> code.should.be.oneOf(0, 1);
<ide>
<del> stdout.length.should.be.exactly(2);
<del>
<del> const dateOutput = stdout[0].toString();
<del> dateOutput.should.be.ok();
<del> isNaN(new Date(dateOutput)).should.be.false();
<del>
<del> const buildOutput = stdout[1].toString();
<del> buildOutput.should.be.ok();
<del> buildOutput.should.startWith("Hash: ");
<del> buildOutput.should.containEql("\nVersion:");
<del> buildOutput.should.containEql("\nTime:");
<del> buildOutput.should.containEql("null.js");
<del> buildOutput.should.containEql("./index.js");
<del> buildOutput.should.containEql("[built]");
<del>
<del> stderr.map((data) => {
<del> data.toString().should.be.ok();
<del> throw new Error(data.toString());
<del> });
<add> stdout.should.be.ok();
<add> stdout[3].should.containEql("Hash: ");
<add> stdout[4].should.containEql("Version: ");
<add> stdout[5].should.containEql("Time: ");
<add> stdout[7].should.containEql("null.js");
<add> stdout[8].should.containEql("./index.js");
<add> stdout[8].should.containEql("[built]");
<add>
<add> stderr.should.be.empty();
<ide> }
| 3
|
Python
|
Python
|
use keepdims in a couple docstrings
|
09154cfa6dea50a6ac24ae1062095c9e98026bbc
|
<ide><path>numpy/lib/shape_base.py
<ide> def take_along_axis(arr, indices, axis):
<ide> >>> np.sort(a, axis=1)
<ide> array([[10, 20, 30],
<ide> [40, 50, 60]])
<del> >>> ai = np.argsort(a, axis=1); ai
<add> >>> ai = np.argsort(a, axis=1)
<add> >>> ai
<ide> array([[0, 2, 1],
<ide> [1, 2, 0]])
<ide> >>> np.take_along_axis(a, ai, axis=1)
<ide> array([[10, 20, 30],
<ide> [40, 50, 60]])
<ide>
<del> The same works for max and min, if you expand the dimensions:
<add> The same works for max and min, if you maintain the trivial dimension
<add> with ``keepdims``:
<ide>
<del> >>> np.expand_dims(np.max(a, axis=1), axis=1)
<add> >>> np.max(a, axis=1, keepdims=True)
<ide> array([[30],
<ide> [60]])
<del> >>> ai = np.expand_dims(np.argmax(a, axis=1), axis=1)
<add> >>> ai = np.argmax(a, axis=1, keepdims=True)
<ide> >>> ai
<ide> array([[1],
<ide> [0]])
<ide> def take_along_axis(arr, indices, axis):
<ide> If we want to get the max and min at the same time, we can stack the
<ide> indices first
<ide>
<del> >>> ai_min = np.expand_dims(np.argmin(a, axis=1), axis=1)
<del> >>> ai_max = np.expand_dims(np.argmax(a, axis=1), axis=1)
<add> >>> ai_min = np.argmin(a, axis=1, keepdims=True)
<add> >>> ai_max = np.argmax(a, axis=1, keepdims=True)
<ide> >>> ai = np.concatenate([ai_min, ai_max], axis=1)
<ide> >>> ai
<ide> array([[0, 1],
<ide> def put_along_axis(arr, indices, values, axis):
<ide>
<ide> We can replace the maximum values with:
<ide>
<del> >>> ai = np.expand_dims(np.argmax(a, axis=1), axis=1)
<add> >>> ai = np.argmax(a, axis=1, keepdims=True)
<ide> >>> ai
<ide> array([[1],
<ide> [0]])
| 1
|
Javascript
|
Javascript
|
switch keyboardavoidingview to class syntax
|
c017dcb0f2903b49b2f21cc150226aeb7f5026ee
|
<ide><path>Libraries/Components/Keyboard/Keyboard.js
<ide> type KeyboardEventName =
<ide> | 'keyboardWillChangeFrame'
<ide> | 'keyboardDidChangeFrame';
<ide>
<del>export type KeyboardEvent = {|
<del> +duration?: number,
<del> +easing?: string,
<del> +endCoordinates: {|
<del> +width: number,
<del> +height: number,
<del> +screenX: number,
<del> +screenY: number,
<del> |},
<del>|};
<add>type ScreenRect = $ReadOnly<{|
<add> screenX: number,
<add> screenY: number,
<add> width: number,
<add> height: number,
<add>|}>;
<add>
<add>export type KeyboardEvent = $ReadOnly<{|
<add> duration?: number,
<add> easing?: string,
<add> endCoordinates: ScreenRect,
<add> startCoordinates?: ScreenRect,
<add>|}>;
<ide>
<ide> type KeyboardEventListener = (e: KeyboardEvent) => void;
<ide>
<ide><path>Libraries/Components/Keyboard/KeyboardAvoidingView.js
<ide>
<ide> 'use strict';
<ide>
<del>const createReactClass = require('create-react-class');
<ide> const Keyboard = require('Keyboard');
<ide> const LayoutAnimation = require('LayoutAnimation');
<ide> const Platform = require('Platform');
<del>const PropTypes = require('prop-types');
<ide> const React = require('React');
<del>/* $FlowFixMe(>=0.54.0 site=react_native_oss) This comment suppresses an error
<del> * found when Flow v0.54 was deployed. To see the error delete this comment and
<del> * run Flow. */
<del>const TimerMixin = require('react-timer-mixin');
<add>const StyleSheet = require('StyleSheet');
<ide> const View = require('View');
<del>const ViewPropTypes = require('ViewPropTypes');
<ide>
<ide> import type EmitterSubscription from 'EmitterSubscription';
<ide> import type {ViewStyleProp} from 'StyleSheet';
<ide> import type {ViewProps, ViewLayout, ViewLayoutEvent} from 'ViewPropTypes';
<del>
<del>type ScreenRect = {
<del> screenX: number,
<del> screenY: number,
<del> width: number,
<del> height: number,
<del>};
<del>type KeyboardChangeEvent = {
<del> startCoordinates?: ScreenRect,
<del> endCoordinates: ScreenRect,
<del> duration?: number,
<del> easing?: string,
<del>};
<add>import type {KeyboardEvent} from 'Keyboard';
<ide>
<ide> type Props = $ReadOnly<{|
<ide> ...ViewProps,
<add>
<add> /**
<add> * Specify how to react to the presence of the keyboard.
<add> */
<ide> behavior?: ?('height' | 'position' | 'padding'),
<add>
<add> /**
<add> * Style of the content container when `behavior` is 'position'.
<add> */
<ide> contentContainerStyle?: ?ViewStyleProp,
<del> enabled?: ?boolean,
<del> keyboardVerticalOffset?: ?number,
<add>
<add> /**
<add> * Controls whether this `KeyboardAvoidingView` instance should take effect.
<add> * This is useful when more than one is on the screen. Defaults to true.
<add> */
<add> enabled: ?boolean,
<add>
<add> /**
<add> * Distance between the top of the user screen and the React Native view. This
<add> * may be non-zero in some cases. Defaults to 0.
<add> */
<add> keyboardVerticalOffset: number,
<ide> |}>;
<ide>
<add>type State = {|
<add> bottom: number,
<add>|};
<add>
<ide> const viewRef = 'VIEW';
<ide>
<ide> /**
<del> * This is a component to solve the common problem of views that need to move out of the way of the virtual keyboard.
<del> * It can automatically adjust either its height, position or bottom padding based on the position of the keyboard.
<add> * View that moves out of the way when the keyboard appears by automatically
<add> * adjusting its height, position, or bottom padding.
<ide> */
<del>const KeyboardAvoidingView = ((createReactClass({
<del> displayName: 'KeyboardAvoidingView',
<del> mixins: [TimerMixin],
<del>
<del> propTypes: {
<del> ...ViewPropTypes,
<del> /**
<del> * Specify how the `KeyboardAvoidingView` will react to the presence of
<del> * the keyboard. It can adjust the height, position or bottom padding of the view
<del> */
<del> behavior: PropTypes.oneOf(['height', 'position', 'padding']),
<del>
<del> /**
<del> * The style of the content container(View) when behavior is 'position'.
<del> */
<del> contentContainerStyle: ViewPropTypes.style,
<del>
<del> /**
<del> * This is the distance between the top of the user screen and the react native view,
<del> * may be non-zero in some use cases. The default value is 0.
<del> */
<del> keyboardVerticalOffset: PropTypes.number.isRequired,
<del> /**
<del> * This is to allow us to manually control which KAV shuld take effect when
<del> * having more than one KAV at the same screen
<del> */
<del> enabled: PropTypes.bool.isRequired,
<del> },
<del>
<del> getDefaultProps() {
<del> return {
<del> enabled: true,
<del> keyboardVerticalOffset: 0,
<del> };
<del> },
<del>
<del> getInitialState() {
<del> return {
<del> bottom: 0,
<del> };
<del> },
<del>
<del> subscriptions: ([]: Array<EmitterSubscription>),
<del> frame: (null: ?ViewLayout),
<del>
<del> _relativeKeyboardHeight(keyboardFrame: ScreenRect): number {
<del> const frame = this.frame;
<add>class KeyboardAvoidingView extends React.Component<Props, State> {
<add> static defaultProps = {
<add> enabled: true,
<add> keyboardVerticalOffset: 0,
<add> };
<add>
<add> _frame: ?ViewLayout = null;
<add> _subscriptions: Array<EmitterSubscription> = [];
<add>
<add> state = {
<add> bottom: 0,
<add> };
<add>
<add> _relativeKeyboardHeight(keyboardFrame): number {
<add> const frame = this._frame;
<ide> if (!frame || !keyboardFrame) {
<ide> return 0;
<ide> }
<ide> const KeyboardAvoidingView = ((createReactClass({
<ide> // Calculate the displacement needed for the view such that it
<ide> // no longer overlaps with the keyboard
<ide> return Math.max(frame.y + frame.height - keyboardY, 0);
<del> },
<add> }
<ide>
<del> _onKeyboardChange(event: ?KeyboardChangeEvent) {
<del> if (!event) {
<add> _onKeyboardChange = (event: ?KeyboardEvent) => {
<add> if (event == null) {
<ide> this.setState({bottom: 0});
<ide> return;
<ide> }
<ide> const KeyboardAvoidingView = ((createReactClass({
<ide> });
<ide> }
<ide> this.setState({bottom: height});
<del> },
<add> };
<ide>
<del> _onLayout(event: ViewLayoutEvent) {
<del> this.frame = event.nativeEvent.layout;
<del> },
<add> _onLayout = (event: ViewLayoutEvent) => {
<add> this._frame = event.nativeEvent.layout;
<add> };
<ide>
<del> UNSAFE_componentWillUpdate(
<del> nextProps: Object,
<del> nextState: Object,
<del> nextContext?: Object,
<del> ): void {
<add> UNSAFE_componentWillUpdate(nextProps: Props, nextState: State): void {
<ide> if (
<ide> nextState.bottom === this.state.bottom &&
<ide> this.props.behavior === 'height' &&
<ide> const KeyboardAvoidingView = ((createReactClass({
<ide> // triggered by parent component re-rendering, no need for bottom to change.
<ide> nextState.bottom = 0;
<ide> }
<del> },
<add> }
<ide>
<del> UNSAFE_componentWillMount() {
<add> componentDidMount(): void {
<ide> if (Platform.OS === 'ios') {
<del> this.subscriptions = [
<add> this._subscriptions = [
<ide> Keyboard.addListener('keyboardWillChangeFrame', this._onKeyboardChange),
<ide> ];
<ide> } else {
<del> this.subscriptions = [
<add> this._subscriptions = [
<ide> Keyboard.addListener('keyboardDidHide', this._onKeyboardChange),
<ide> Keyboard.addListener('keyboardDidShow', this._onKeyboardChange),
<ide> ];
<ide> }
<del> },
<del>
<del> componentWillUnmount() {
<del> this.subscriptions.forEach(sub => sub.remove());
<del> },
<del>
<del> render(): React.Element<any> {
<del> // $FlowFixMe(>=0.41.0)
<del> const {behavior, children, style, ...props} = this.props;
<del> const bottomHeight = this.props.enabled ? this.state.bottom : 0;
<add> }
<add>
<add> componentWillUnmount(): void {
<add> this._subscriptions.forEach(subscription => {
<add> subscription.remove();
<add> });
<add> }
<add>
<add> render(): React.Node {
<add> const {
<add> behavior,
<add> children,
<add> contentContainerStyle,
<add> enabled,
<add> keyboardVerticalOffset, // eslint-disable-line no-unused-vars
<add> style,
<add> ...props
<add> } = this.props;
<add> const bottomHeight = enabled ? this.state.bottom : 0;
<ide> switch (behavior) {
<ide> case 'height':
<ide> let heightStyle;
<del> if (this.frame) {
<add> if (this._frame != null) {
<ide> // Note that we only apply a height change when there is keyboard present,
<ide> // i.e. this.state.bottom is greater than 0. If we remove that condition,
<ide> // this.frame.height will never go back to its original value.
<ide> // When height changes, we need to disable flex.
<del> heightStyle = {height: this.frame.height - bottomHeight, flex: 0};
<add> heightStyle = {
<add> height: this._frame.height - bottomHeight,
<add> flex: 0,
<add> };
<ide> }
<ide> return (
<del> // $FlowFixMe - Typing ReactNativeComponent revealed errors
<ide> <View
<ide> ref={viewRef}
<del> style={[style, heightStyle]}
<add> style={StyleSheet.compose(style, heightStyle)}
<ide> onLayout={this._onLayout}
<ide> {...props}>
<ide> {children}
<ide> </View>
<ide> );
<ide>
<ide> case 'position':
<del> const positionStyle = {bottom: bottomHeight};
<del> const {contentContainerStyle} = this.props;
<del>
<ide> return (
<del> // $FlowFixMe - Typing ReactNativeComponent revealed errors
<ide> <View
<ide> ref={viewRef}
<ide> style={style}
<ide> onLayout={this._onLayout}
<ide> {...props}>
<del> <View style={[contentContainerStyle, positionStyle]}>
<add> <View
<add> style={StyleSheet.compose(contentContainerStyle, {
<add> bottom: bottomHeight,
<add> })}>
<ide> {children}
<ide> </View>
<ide> </View>
<ide> );
<ide>
<ide> case 'padding':
<del> const paddingStyle = {paddingBottom: bottomHeight};
<ide> return (
<del> // $FlowFixMe - Typing ReactNativeComponent revealed errors
<ide> <View
<ide> ref={viewRef}
<del> style={[style, paddingStyle]}
<add> style={StyleSheet.compose(style, {paddingBottom: bottomHeight})}
<ide> onLayout={this._onLayout}
<ide> {...props}>
<ide> {children}
<ide> const KeyboardAvoidingView = ((createReactClass({
<ide>
<ide> default:
<ide> return (
<del> // $FlowFixMe - Typing ReactNativeComponent revealed errors
<ide> <View
<ide> ref={viewRef}
<ide> onLayout={this._onLayout}
<ide> const KeyboardAvoidingView = ((createReactClass({
<ide> </View>
<ide> );
<ide> }
<del> },
<del>}): any): React.ComponentType<Props>);
<add> }
<add>}
<ide>
<ide> module.exports = KeyboardAvoidingView;
| 2
|
Javascript
|
Javascript
|
add array annotation to all injectable parameters
|
6874cca1589a2a4c28f3caa036897c70e57763ef
|
<ide><path>src/auto/injector.js
<ide> function annotate(fn, strictDi, name) {
<ide> * @description
<ide> * Invoke the method and supply the method arguments from the `$injector`.
<ide> *
<del> * @param {!Function} fn The function to invoke. Function parameters are injected according to the
<del> * {@link guide/di $inject Annotation} rules.
<add> * @param {Function|Array.<string|Function>} fn The injectable function to invoke. Function parameters are
<add> * injected according to the {@link guide/di $inject Annotation} rules.
<ide> * @param {Object=} self The `this` for the invoked method.
<ide> * @param {Object=} locals Optional object. If preset then any argument names are read from this
<ide> * object first, before the `$injector` is consulted.
<ide> function annotate(fn, strictDi, name) {
<ide> * configure your service in a provider.
<ide> *
<ide> * @param {string} name The name of the instance.
<del> * @param {function()} $getFn The $getFn for the instance creation. Internally this is a short hand
<del> * for `$provide.provider(name, {$get: $getFn})`.
<add> * @param {Function|Array.<string|Function>} $getFn The injectable $getFn for the instance creation.
<add> * Internally this is a short hand for `$provide.provider(name, {$get: $getFn})`.
<ide> * @returns {Object} registered provider instance
<ide> *
<ide> * @example
<ide> function annotate(fn, strictDi, name) {
<ide> * as a type/class.
<ide> *
<ide> * @param {string} name The name of the instance.
<del> * @param {Function} constructor A class (constructor function) that will be instantiated.
<add> * @param {Function|Array.<string|Function>} constructor An injectable class (constructor function)
<add> * that will be instantiated.
<ide> * @returns {Object} registered provider instance
<ide> *
<ide> * @example
<ide> function annotate(fn, strictDi, name) {
<ide> * object which replaces or wraps and delegates to the original service.
<ide> *
<ide> * @param {string} name The name of the service to decorate.
<del> * @param {Function|Array} decorator This function will be invoked when the service needs to be
<add> * @param {Function|Array.<string|Function>} decorator This function will be invoked when the service needs to be
<ide> * instantiated and should return the decorated service instance. The function is called using
<ide> * the {@link auto.$injector#invoke injector.invoke} method and is therefore fully injectable.
<ide> * Local injection arguments:
| 1
|
Go
|
Go
|
amend run help for --user to include guid
|
46d4e491c8a1394596e824ac65b360f6d7211c6f
|
<ide><path>runconfig/parse.go
<ide> func Parse(cmd *flag.FlagSet, args []string) (*Config, *HostConfig, *flag.FlagSe
<ide> flHostname = cmd.String([]string{"h", "-hostname"}, "", "Container host name")
<ide> flMemoryString = cmd.String([]string{"m", "-memory"}, "", "Memory limit")
<ide> flMemorySwap = cmd.String([]string{"-memory-swap"}, "", "Total memory (memory + swap), '-1' to disable swap")
<del> flUser = cmd.String([]string{"u", "-user"}, "", "Username or UID")
<add> flUser = cmd.String([]string{"u", "-user"}, "", "Username or UID (format: <name|uid>[:<group|gid>])")
<ide> flWorkingDir = cmd.String([]string{"w", "-workdir"}, "", "Working directory inside the container")
<ide> flCpuShares = cmd.Int64([]string{"c", "-cpu-shares"}, 0, "CPU shares (relative weight)")
<ide> flCpuset = cmd.String([]string{"-cpuset"}, "", "CPUs in which to allow execution (0-3, 0,1)")
| 1
|
Mixed
|
Ruby
|
add config option for cookies digest
|
cfbedd3479d5021b9fb862ecfa49fc6bc8602994
|
<ide><path>actionpack/CHANGELOG.md
<add>* Add `config.action_dispatch.cookies_digest` option for setting custom
<add> digest. The default remains the same - 'SHA1'.
<add>
<add> *Łukasz Strzałkowski*
<add>
<ide> * Extract source code for the entire exception stack trace for
<ide> better debugging and diagnosis.
<ide>
<ide><path>actionpack/lib/action_dispatch/middleware/cookies.rb
<ide> class Cookies
<ide> SECRET_TOKEN = "action_dispatch.secret_token".freeze
<ide> SECRET_KEY_BASE = "action_dispatch.secret_key_base".freeze
<ide> COOKIES_SERIALIZER = "action_dispatch.cookies_serializer".freeze
<add> COOKIES_DIGEST = "action_dispatch.cookies_digest".freeze
<ide>
<ide> # Cookies can typically store 4096 bytes.
<ide> MAX_COOKIE_SIZE = 4096
<ide> def self.options_for_env(env) #:nodoc:
<ide> secret_token: env[SECRET_TOKEN],
<ide> secret_key_base: env[SECRET_KEY_BASE],
<ide> upgrade_legacy_signed_cookies: env[SECRET_TOKEN].present? && env[SECRET_KEY_BASE].present?,
<del> serializer: env[COOKIES_SERIALIZER]
<add> serializer: env[COOKIES_SERIALIZER],
<add> digest: env[COOKIES_DIGEST]
<ide> }
<ide> end
<ide>
<ide> def serializer
<ide> serializer
<ide> end
<ide> end
<add>
<add> def digest
<add> @options[:digest] || 'SHA1'
<add> end
<ide> end
<ide>
<ide> class SignedCookieJar #:nodoc:
<ide> def initialize(parent_jar, key_generator, options = {})
<ide> @parent_jar = parent_jar
<ide> @options = options
<ide> secret = key_generator.generate_key(@options[:signed_cookie_salt])
<del> @verifier = ActiveSupport::MessageVerifier.new(secret, serializer: NullSerializer)
<add> @verifier = ActiveSupport::MessageVerifier.new(secret, digest: digest, serializer: NullSerializer)
<ide> end
<ide>
<ide> def [](name)
<ide><path>actionpack/test/dispatch/cookies_test.rb
<ide> def test_read_permanent_cookie
<ide> assert_equal 'Jamie', @controller.send(:cookies).permanent[:user_name]
<ide> end
<ide>
<add> def test_signed_cookie_using_default_digest
<add> get :set_signed_cookie
<add> cookies = @controller.send :cookies
<add> assert_not_equal 45, cookies[:user_id]
<add> assert_equal 45, cookies.signed[:user_id]
<add> assert_equal 'SHA1', cookies.signed.instance_variable_get(:"@verifier").instance_variable_get(:"@digest")
<add> end
<add>
<add> def test_signed_cookie_using_custom_digest
<add> @request.env["action_dispatch.cookies_digest"] = 'SHA256'
<add> get :set_signed_cookie
<add> cookies = @controller.send :cookies
<add> assert_not_equal 45, cookies[:user_id]
<add> assert_equal 45, cookies.signed[:user_id]
<add> assert_equal 'SHA256', cookies.signed.instance_variable_get(:"@verifier").instance_variable_get(:"@digest")
<add> end
<add>
<ide> def test_signed_cookie_using_default_serializer
<ide> get :set_signed_cookie
<ide> cookies = @controller.send :cookies
<ide><path>railties/lib/rails/application.rb
<ide> def env_config
<ide> "action_dispatch.encrypted_cookie_salt" => config.action_dispatch.encrypted_cookie_salt,
<ide> "action_dispatch.encrypted_signed_cookie_salt" => config.action_dispatch.encrypted_signed_cookie_salt,
<ide> "action_dispatch.cookies_serializer" => config.action_dispatch.cookies_serializer
<add> "action_dispatch.cookies_digest" => config.action_dispatch.cookies_digest
<ide> })
<ide> end
<ide> end
| 4
|
Ruby
|
Ruby
|
remove duplication from date_time calculations
|
89060b8ef9b56e69e5f28754b5b633f6c8b8c909
|
<ide><path>activesupport/lib/active_support/core_ext/date_time/calculations.rb
<ide> def current
<ide> end
<ide> end
<ide>
<del> # Tells whether the DateTime object's datetime lies in the past.
<del> def past?
<del> self < ::DateTime.current
<del> end
<del>
<del> # Tells whether the DateTime object's datetime lies in the future.
<del> def future?
<del> self > ::DateTime.current
<del> end
<del>
<ide> # Seconds since midnight: DateTime.now.seconds_since_midnight.
<ide> def seconds_since_midnight
<ide> sec + (min * 60) + (hour * 3600)
| 1
|
Javascript
|
Javascript
|
add more documentation
|
96840c6b861603a312e28fa1ab5944328fcb6372
|
<ide><path>packages/ember-views/lib/views/view.js
<ide> var invokeForState = {
<ide> isEnabled: true
<ide> });
<ide>
<add> Will result in view instances with an HTML representation of:
<add>
<add> <div id="ember1" class="ember-view enabled"></div>
<add>
<add> When isEnabled is `false`, the resulting HTML reprensentation looks like this:
<add>
<add> <div id="ember1" class="ember-view disabled"></div>
<add>
<ide>
<ide> Updates to the the value of a class name binding will result in automatic update
<ide> of the HTML `class` attribute in the view's rendered HTML representation.
| 1
|
Text
|
Text
|
note curl usage on <=10.7
|
f18e4e330241017d3e2503eeecba7925c799eefd
|
<ide><path>docs/Installation.md
<ide> it does it too. And you have to confirm everything it will do before it starts.
<ide>
<ide> ## Alternative Installs
<ide>
<add>### OS X Lion 10.7 and below
<add>
<add>Using the instructions on https://brew.sh or below whenever you call `curl` you must pass `--insecure` as an argument. This is because your system `curl` is too old to speak to GitHub using HTTPS. Don't worry, on the first `brew update` Homebrew will install a newer, more secure `curl` for your machine.
<add>
<ide> ### Untar anywhere
<ide> Just extract (or `git clone`) Homebrew wherever you want. Just
<ide> avoid:
| 1
|
Python
|
Python
|
fix mypy docker provider
|
b20e6d3f060bc385e350433070d5707ae6d6d0b0
|
<ide><path>airflow/decorators/base.py
<ide> import inspect
<ide> import re
<ide> from inspect import signature
<del>from typing import Any, Callable, Dict, Optional, Tuple, TypeVar, cast
<add>from typing import Any, Callable, Dict, Iterable, Optional, Tuple, Type, TypeVar, cast
<ide>
<ide> from airflow.exceptions import AirflowException
<ide> from airflow.models import BaseOperator
<ide> class DecoratedOperator(BaseOperator):
<ide> :type kwargs_to_upstream: dict
<ide> """
<ide>
<del> template_fields = ('op_args', 'op_kwargs')
<add> template_fields: Iterable[str] = ('op_args', 'op_kwargs')
<ide> template_fields_renderers = {"op_args": "py", "op_kwargs": "py"}
<ide>
<ide> # since we won't mutate the arguments, we should just do the shallow copy
<ide> def _hook_apply_defaults(self, *args, **kwargs):
<ide> def task_decorator_factory(
<ide> python_callable: Optional[Callable] = None,
<ide> multiple_outputs: Optional[bool] = None,
<del> decorated_operator_class: BaseOperator = None,
<add> decorated_operator_class: Type[BaseOperator] = None,
<ide> **kwargs,
<ide> ) -> Callable[[T], T]:
<ide> """
<ide><path>airflow/providers/docker/decorators/docker.py
<ide> class _DockerDecoratedOperator(DecoratedOperator, DockerOperator):
<ide> :type multiple_outputs: bool
<ide> """
<ide>
<del> template_fields = ('op_args', 'op_kwargs')
<add> template_fields: Iterable[str] = ('op_args', 'op_kwargs')
<ide>
<ide> # since we won't mutate the arguments, we should just do the shallow copy
<ide> # there are some cases we can't deepcopy the objects (e.g protobuf).
<ide><path>airflow/providers/docker/hooks/docker.py
<ide> def get_ui_field_behaviour() -> Dict:
<ide>
<ide> def __init__(
<ide> self,
<del> docker_conn_id: str = default_conn_name,
<add> docker_conn_id: Optional[str] = default_conn_name,
<ide> base_url: Optional[str] = None,
<ide> version: Optional[str] = None,
<ide> tls: Optional[str] = None,
<ide> def __init__(
<ide> if not version:
<ide> raise AirflowException('No Docker API version provided')
<ide>
<add> if not docker_conn_id:
<add> raise AirflowException('No Docker connection id provided')
<add>
<ide> conn = self.get_connection(docker_conn_id)
<add>
<ide> if not conn.host:
<ide> raise AirflowException('No Docker URL provided')
<ide> if not conn.login:
<ide> def get_conn(self) -> APIClient:
<ide> self.__login(client)
<ide> return client
<ide>
<del> def __login(self, client) -> int:
<add> def __login(self, client) -> None:
<ide> self.log.debug('Logging into Docker')
<ide> try:
<ide> client.login(
<ide><path>airflow/providers/docker/operators/docker.py
<ide> class DockerOperator(BaseOperator):
<ide> :type retrieve_output_path: Optional[str]
<ide> """
<ide>
<del> template_fields = ('image', 'command', 'environment', 'container_name')
<add> template_fields: Iterable[str] = ('image', 'command', 'environment', 'container_name')
<ide> template_ext = (
<ide> '.sh',
<ide> '.bash',
<ide> def _run_image_with_mounts(self, target_mounts, add_tmp_variable: bool) -> Optio
<ide> self.environment['AIRFLOW_TMP_DIR'] = self.tmp_dir
<ide> else:
<ide> self.environment.pop('AIRFLOW_TMP_DIR', None)
<add> if not self.cli:
<add> raise Exception("The 'cli' should be initialized before!")
<ide> self.container = self.cli.create_container(
<ide> command=self.format_command(self.command),
<ide> name=self.container_name,
| 4
|
Javascript
|
Javascript
|
bind this in onreadystatechange call
|
b8ff6212f84efebcf3ba2de3e1999d3e68e0c574
|
<ide><path>Libraries/Network/XMLHttpRequestBase.js
<ide> class XMLHttpRequestBase {
<ide> if (onreadystatechange) {
<ide> // We should send an event to handler, but since we don't process that
<ide> // event anywhere, let's leave it empty
<del> onreadystatechange(null);
<add> onreadystatechange.call(this, null);
<ide> }
<ide> if (newState === this.DONE && !this._aborted) {
<ide> this._sendLoad();
| 1
|
Javascript
|
Javascript
|
remove redundant initialization
|
ccdaa434b8118a3f5d7dd6d2fc20affacbf98368
|
<ide><path>lib/repl.js
<ide> function complete(line, callback) {
<ide>
<ide> // REPL commands (e.g. ".break").
<ide> var filter;
<del> var match = null;
<del> match = line.match(/^\s*\.(\w*)$/);
<add> let match = line.match(/^\s*\.(\w*)$/);
<ide> if (match) {
<ide> completionGroups.push(Object.keys(this.commands));
<ide> completeOn = match[1];
| 1
|
Javascript
|
Javascript
|
fix flaky test-repl
|
9b23777442f9af01b25b8752e3d1767036b62f14
|
<ide><path>test/parallel/test-repl.js
<ide> function event(ee, expected) {
<ide> const data = inspect(expected, { compact: false });
<ide> const msg = `The REPL did not reply as expected for:\n\n${data}`;
<ide> reject(new Error(msg));
<del> }, common.platformTimeout(1000));
<add> }, common.platformTimeout(9999));
<ide> ee.once('data', common.mustCall((...args) => {
<ide> clearTimeout(timeout);
<ide> resolve(...args);
| 1
|
Javascript
|
Javascript
|
update the players source cache on sourceset
|
ba2ae7868b5fa42b9eee65146aa84ba58f749315
|
<ide><path>src/js/player.js
<ide> import Tech from './tech/tech.js';
<ide> import * as middleware from './tech/middleware.js';
<ide> import {ALL as TRACK_TYPES} from './tracks/track-types';
<ide> import filterSource from './utils/filter-source';
<add>import {findMimetype} from './utils/mimetypes';
<ide>
<ide> // The following imports are used only to ensure that the corresponding modules
<ide> // are always included in the video.js package. Importing the modules will
<ide> class Player extends Component {
<ide> }
<ide> }
<ide>
<add> /**
<add> * Update the internal source caches so that we return the correct source from
<add> * `src()`, `currentSource()`, and `currentSources()`.
<add> *
<add> * > Note: `currentSources` will not be updated if the source that is passed in exists
<add> * in the current `currentSources` cache.
<add> *
<add> *
<add> * @param {Tech~SourceObject} srcObj
<add> * A string or object source to update our caches to.
<add> */
<add> updateSourceCaches_(srcObj = '') {
<add>
<add> let src = srcObj;
<add> let type = '';
<add>
<add> if (typeof src !== 'string') {
<add> src = srcObj.src;
<add> type = srcObj.type;
<add> }
<add> // make sure all the caches are set to default values
<add> // to prevent null checking
<add> this.cache_.source = this.cache_.source || {};
<add> this.cache_.sources = this.cache_.sources || [];
<add>
<add> // try to get the type of the src that was passed in
<add> if (src && !type) {
<add> type = findMimetype(this, src);
<add> }
<add>
<add> // update `currentSource` cache always
<add> this.cache_.source = {src, type};
<add>
<add> const matchingSources = this.cache_.sources.filter((s) => s.src && s.src === src);
<add> const sourceElSources = [];
<add> const sourceEls = this.$$('source');
<add> const matchingSourceEls = [];
<add>
<add> for (let i = 0; i < sourceEls.length; i++) {
<add> const sourceObj = Dom.getAttributes(sourceEls[i]);
<add>
<add> sourceElSources.push(sourceObj);
<add>
<add> if (sourceObj.src && sourceObj.src === src) {
<add> matchingSourceEls.push(sourceObj.src);
<add> }
<add> }
<add>
<add> // if we have matching source els but not matching sources
<add> // the current source cache is not up to date
<add> if (matchingSourceEls.length && !matchingSources.length) {
<add> this.cache_.sources = sourceElSources;
<add> // if we don't have matching source or source els set the
<add> // sources cache to the `currentSource` cache
<add> } else if (!matchingSources.length) {
<add> this.cache_.sources = [this.cache_.source];
<add> }
<add>
<add> // update the tech `src` cache
<add> this.cache_.src = src;
<add> }
<add>
<ide> /**
<ide> * *EXPERIMENTAL* Fired when the source is set or changed on the {@link Tech}
<ide> * causing the media element to reload.
<ide> class Player extends Component {
<ide> * @private
<ide> */
<ide> handleTechSourceset_(event) {
<add> // only update the source cache when the source
<add> // was not updated using the player api
<add> if (!this.changingSrc_) {
<add> // update the source to the intial source right away
<add> // in some cases this will be empty string
<add> this.updateSourceCaches_(event.src);
<add>
<add> // if the `sourceset` `src` was an empty string
<add> // wait for a `loadstart` to update the cache to `currentSrc`.
<add> // If a sourceset happens before a `loadstart`, we reset the state
<add> // as this function will be called again.
<add> if (!event.src) {
<add> const updateCache = (e) => {
<add> if (e.type !== 'sourceset') {
<add> this.updateSourceCaches_(this.techGet_('currentSrc'));
<add> }
<add>
<add> this.tech_.off(['sourceset', 'loadstart'], updateCache);
<add> };
<add>
<add> this.tech_.one(['sourceset', 'loadstart'], updateCache);
<add> }
<add> }
<add>
<ide> this.trigger({
<ide> src: event.src,
<ide> type: 'sourceset'
<ide> class Player extends Component {
<ide> }
<ide>
<ide> // intial sources
<del> this.cache_.sources = sources;
<ide> this.changingSrc_ = true;
<ide>
<del> // intial source
<del> this.cache_.source = sources[0];
<add> this.cache_.sources = sources;
<add> this.updateSourceCaches_(sources[0]);
<ide>
<ide> // middlewareSource is the source after it has been changed by middleware
<ide> middleware.setSource(this, sources[0], (middlewareSource, mws) => {
<ide> this.middleware_ = mws;
<ide>
<add> // since sourceSet is async we have to update the cache again after we select a source since
<add> // the source that is selected could be out of order from the cache update above this callback.
<add> this.cache_.sources = sources;
<add> this.updateSourceCaches_(middlewareSource);
<add>
<ide> const err = this.src_(middlewareSource);
<ide>
<ide> if (err) {
<ide> if (sources.length > 1) {
<ide> return this.src(sources.slice(1));
<ide> }
<ide>
<add> this.changingSrc_ = false;
<add>
<ide> // We need to wrap this in a timeout to give folks a chance to add error event handlers
<ide> this.setTimeout(function() {
<ide> this.error({ code: 4, message: this.localize(this.options_.notSupportedMessage) });
<ide> class Player extends Component {
<ide> return;
<ide> }
<ide>
<del> this.changingSrc_ = false;
<del> // video element listed source
<del> this.cache_.src = middlewareSource.src;
<del>
<ide> middleware.setTech(mws, this.tech_);
<ide> });
<ide> }
<ide> class Player extends Component {
<ide> this.techCall_('src', source.src);
<ide> }
<ide>
<add> this.changingSrc_ = false;
<ide> }, true);
<ide>
<ide> return false;
<ide><path>src/js/utils/filter-source.js
<ide> * @module filter-source
<ide> */
<ide> import {isObject} from './obj';
<del>import {MimetypesKind} from './mimetypes';
<del>import * as Url from '../utils/url.js';
<add>import {getMimetype} from './mimetypes';
<ide>
<ide> /**
<ide> * Filter out single bad source objects or multiple source objects in an
<ide> const filterSource = function(src) {
<ide> * src Object with known type
<ide> */
<ide> function checkMimetype(src) {
<del> const ext = Url.getFileExtension(src.src);
<del> const mimetype = MimetypesKind[ext.toLowerCase()];
<add> const mimetype = getMimetype(src.src);
<ide>
<ide> if (!src.type && mimetype) {
<ide> src.type = mimetype;
<ide><path>src/js/utils/mimetypes.js
<add>import * as Url from '../utils/url.js';
<add>
<ide> /**
<ide> * Mimetypes
<ide> *
<ide> export const MimetypesKind = {
<ide> oga: 'audio/ogg',
<ide> m3u8: 'application/x-mpegURL'
<ide> };
<add>
<add>/**
<add> * Get the mimetype of a given src url if possible
<add> *
<add> * @param {string} src
<add> * The url to the src
<add> *
<add> * @return {string}
<add> * return the mimetype if it was known or empty string otherwise
<add> */
<add>export const getMimetype = function(src = '') {
<add> const ext = Url.getFileExtension(src);
<add> const mimetype = MimetypesKind[ext.toLowerCase()];
<add>
<add> return mimetype || '';
<add>};
<add>
<add>/**
<add> * Find the mime type of a given source string if possible. Uses the player
<add> * source cache.
<add> *
<add> * @param {Player} player
<add> * The player object
<add> *
<add> * @param {string} src
<add> * The source string
<add> *
<add> * @return {string}
<add> * The type that was found
<add> */
<add>export const findMimetype = (player, src) => {
<add> if (!src) {
<add> return '';
<add> }
<add>
<add> // 1. check for the type in the `source` cache
<add> if (player.cache_.source.src === src && player.cache_.source.type) {
<add> return player.cache_.source.type;
<add> }
<add>
<add> // 2. see if we have this source in our `currentSources` cache
<add> const matchingSources = player.cache_.sources.filter((s) => s.src === src);
<add>
<add> if (matchingSources.length) {
<add> return matchingSources[0].type;
<add> }
<add>
<add> // 3. look for the src url in source elements and use the type there
<add> const sources = player.$$('source');
<add>
<add> for (let i = 0; i < sources.length; i++) {
<add> const s = sources[i];
<add>
<add> if (s.type && s.src && s.src === src) {
<add> return s.type;
<add> }
<add> }
<add>
<add> // 4. finally fallback to our list of mime types based on src url extension
<add> return getMimetype(src);
<add>};
<ide><path>test/unit/sourceset.test.js
<ide> import document from 'global/document';
<ide> import window from 'global/window';
<ide> import log from '../../src/js/utils/log.js';
<ide> import sinon from 'sinon';
<add>import {getAbsoluteURL} from '../../src/js/utils/url.js';
<ide>
<ide> const Html5 = videojs.getTech('Html5');
<ide> const wait = 1;
<ide> const testSrc = {
<ide> };
<ide> const sourceOne = {src: 'http://example.com/one.mp4', type: 'video/mp4'};
<ide> const sourceTwo = {src: 'http://example.com/two.mp4', type: 'video/mp4'};
<add>const sourceThree = {src: 'http://example.com/three.mp4', type: 'video/mp4'};
<ide>
<ide> if (!Html5.canOverrideAttributes()) {
<ide> qunitFn = 'skip';
<ide> }
<ide>
<ide> const oldMovingMedia = Html5.prototype.movingMediaElementInDOM;
<del>const validateSource = function(assert, player, sources, checkMediaElSource = true) {
<del> const tech = player.tech_;
<del> const mediaEl = tech.el();
<del>
<del> if (checkMediaElSource) {
<del> assert.equal(mediaEl.src, sources[0].src, 'mediaEl.src is correct');
<del> assert.equal(mediaEl.getAttribute('src'), sources[0].src, 'mediaEl attribute is correct');
<del> assert.equal(tech.src(), sources[0].src, 'tech is correct');
<del> }
<add>const validateSource = function(player, expectedSources, event, srcOverrides = {}) {
<add> expectedSources = Array.isArray(expectedSources) ? expectedSources : [expectedSources];
<add> const mediaEl = player.tech_.el();
<add> const assert = QUnit.assert;
<add> const expected = {
<add> // player cache checks
<add> currentSources: expectedSources, currentSource: expectedSources[0], src: expectedSources[0].src,
<add> // tech checks
<add> event: expectedSources[0].src, attr: expectedSources[0].src, prop: expectedSources[0].src
<add> };
<add>
<add> Object.keys(srcOverrides).forEach((k) => {
<add> // only override known properties
<add> if (!expected.hasOwnProperty(k)) {
<add> return;
<add> }
<add>
<add> expected[k] = srcOverrides[k];
<add> });
<add>
<add> assert.deepEqual(player.currentSource(), expected.currentSource, 'player.currentSource() is correct');
<add> assert.deepEqual(player.currentSources(), expected.currentSources, 'player.currentSources() is correct');
<add> assert.equal(player.src(), expected.src, 'player.src() is correct');
<add>
<add> assert.equal(event.src, expected.event, 'event src is correct');
<add>
<add> // if we expect a blank attr it will be null instead
<add> assert.equal(mediaEl.getAttribute('src'), expected.attr || null, 'mediaEl attribute is correct');
<add>
<add> // mediaEl.src source is always absolute, but can be empty string
<add> // getAbsoluteURL would return the current url of the page for empty string
<add> // so we have to check
<add> expected.prop = expected.prop ? getAbsoluteURL(expected.prop) : expected.prop;
<add> assert.equal(mediaEl.src, expected.prop, 'mediaEl src property is correct');
<add>
<ide> };
<ide>
<ide> const setupEnv = function(env, testName) {
<add> sinon.stub(log, 'error');
<ide> env.fixture = document.getElementById('qunit-fixture');
<ide>
<ide> if (testName === 'change video el' || testName === 'change audio el') {
<ide> const setupEnv = function(env, testName) {
<ide> } else {
<ide> env.mediaEl = document.createElement('video');
<ide> }
<del> env.testSrc = testSrc;
<del> env.sourceOne = sourceOne;
<del> env.sourceTwo = sourceTwo;
<del>
<ide> env.mediaEl.className = 'video-js';
<ide> env.fixture.appendChild(env.mediaEl);
<ide> };
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> QUnit.module('source before player', (subhooks) => testTypes.forEach((testName) => {
<ide> QUnit.module(testName, {
<ide> beforeEach() {
<del> sinon.stub(log, 'error');
<ide>
<ide> setupEnv(this, testName);
<ide> },
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> QUnit.test('data-setup one source', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.mediaEl.setAttribute('data-setup', JSON.stringify({sources: [this.testSrc]}));
<add> this.mediaEl.setAttribute('data-setup', JSON.stringify({sources: [testSrc]}));
<ide> this.player = videojs(this.mediaEl, {
<ide> enableSourceset: true
<ide> });
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide> });
<ide>
<ide> QUnit.test('data-setup preload auto', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.mediaEl.setAttribute('data-setup', JSON.stringify({sources: [this.testSrc]}));
<add> this.mediaEl.setAttribute('data-setup', JSON.stringify({sources: [testSrc]}));
<ide> this.mediaEl.setAttribute('preload', 'auto');
<ide> this.player = videojs(this.mediaEl, {
<ide> enableSourceset: true
<ide> });
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide> });
<ide>
<ide> QUnit.test('data-setup two sources', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.mediaEl.setAttribute('data-setup', JSON.stringify({sources: [this.sourceOne, this.sourceTwo]}));
<add> this.mediaEl.setAttribute('data-setup', JSON.stringify({sources: [sourceOne, sourceTwo]}));
<ide> this.player = videojs(this.mediaEl, {
<ide> enableSourceset: true
<ide> });
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceOne, this.sourceTwo]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [sourceOne, sourceTwo], e);
<ide> done();
<ide> });
<ide> });
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide>
<ide> this.player = videojs(this.mediaEl, {
<ide> enableSourceset: true,
<del> sources: [this.testSrc]
<add> sources: [testSrc]
<ide> });
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide> });
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide>
<ide> this.player = videojs(this.mediaEl, {
<ide> enableSourceset: true,
<del> sources: [this.sourceOne, this.sourceTwo]
<add> sources: [sourceOne, sourceTwo]
<ide> });
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceOne, this.sourceTwo]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [sourceOne, sourceTwo], e);
<ide> done();
<ide> });
<ide> });
<ide>
<ide> QUnit.test('mediaEl.src = ...;', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.mediaEl.src = this.testSrc.src;
<add> this.mediaEl.src = testSrc.src;
<ide> this.player = videojs(this.mediaEl, {
<ide> enableSourceset: true
<ide> });
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide> });
<ide>
<ide> QUnit.test('mediaEl.setAttribute("src", ...)"', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.mediaEl.setAttribute('src', this.testSrc.src);
<add> this.mediaEl.setAttribute('src', testSrc.src);
<ide> this.player = videojs(this.mediaEl, {
<ide> enableSourceset: true
<ide> });
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide> });
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> const done = assert.async();
<ide>
<ide> this.source = document.createElement('source');
<del> this.source.src = this.testSrc.src;
<del> this.source.type = this.testSrc.type;
<add> this.source.src = testSrc.src;
<add> this.source.type = testSrc.type;
<ide>
<ide> this.mediaEl.appendChild(this.source);
<ide>
<ide> this.player = videojs(this.mediaEl, {
<ide> enableSourceset: true
<ide> });
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide> });
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> const done = assert.async();
<ide>
<ide> this.source = document.createElement('source');
<del> this.source.src = this.sourceOne.src;
<del> this.source.type = this.sourceOne.type;
<add> this.source.src = sourceOne.src;
<add> this.source.type = sourceOne.type;
<ide>
<ide> this.source2 = document.createElement('source');
<del> this.source2.src = this.sourceTwo.src;
<del> this.source2.type = this.sourceTwo.type;
<add> this.source2.src = sourceTwo.src;
<add> this.source2.type = sourceTwo.type;
<ide>
<ide> this.mediaEl.appendChild(this.source);
<ide> this.mediaEl.appendChild(this.source2);
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> enableSourceset: true
<ide> });
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceOne, this.sourceTwo]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [sourceOne, sourceTwo], e);
<ide> done();
<ide> });
<ide> });
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> QUnit.module('source after player', (subhooks) => testTypes.forEach((testName) => {
<ide> QUnit.module(testName, {
<ide> beforeEach() {
<del> sinon.stub(log, 'error');
<ide>
<ide> setupEnv(this, testName);
<ide> },
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> this.player = videojs(this.mediaEl, {
<ide> enableSourceset: true
<ide> });
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide>
<del> this.player.src(this.testSrc);
<add> this.player.src(testSrc);
<ide> });
<ide>
<ide> QUnit.test('player.src({...}) preload auto', function(assert) {
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> enableSourceset: true
<ide> });
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide>
<del> this.player.src(this.testSrc);
<add> this.player.src(testSrc);
<ide> });
<ide>
<ide> QUnit.test('player.src({...}) two sources', function(assert) {
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> enableSourceset: true
<ide> });
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceOne, this.sourceTwo]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [sourceOne, sourceTwo], e);
<ide> done();
<ide> });
<ide>
<del> this.player.src([this.sourceOne, this.sourceTwo]);
<add> this.player.src([sourceOne, sourceTwo]);
<ide> });
<ide>
<ide> QUnit.test('mediaEl.src = ...;', function(assert) {
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> this.player = videojs(this.mediaEl, {enableSourceset: true});
<ide>
<ide> this.player.one('sourceset', (e) => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide>
<del> this.player.tech_.el_.src = this.testSrc.src;
<add> this.player.tech_.el_.src = testSrc.src;
<ide> });
<ide>
<ide> QUnit.test('mediaEl.setAttribute("src", ...)"', function(assert) {
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> this.player = videojs(this.mediaEl, {enableSourceset: true});
<ide>
<ide> this.player.one('sourceset', (e) => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide>
<del> this.player.tech_.el_.setAttribute('src', this.testSrc.src);
<add> this.player.tech_.el_.setAttribute('src', testSrc.src);
<ide> });
<ide>
<ide> const appendTypes = [
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> const done = assert.async();
<ide>
<ide> this.source = document.createElement('source');
<del> this.source.src = this.testSrc.src;
<del> this.source.type = this.testSrc.type;
<add> this.source.src = testSrc.src;
<add> this.source.type = testSrc.type;
<ide>
<ide> this.player = videojs(this.mediaEl, {enableSourceset: true});
<ide>
<ide> this.player.one('sourceset', (e) => {
<del> assert.equal(e.src, this.testSrc.src, 'source is as expected');
<add> validateSource(this.player, testSrc, e, {prop: '', attr: ''});
<ide> done();
<ide> });
<ide>
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide>
<ide> this.totalSourcesets = 2;
<ide> this.source = document.createElement('source');
<del> this.source.src = this.testSrc.src;
<del> this.source.type = this.testSrc.type;
<add> this.source.src = testSrc.src;
<add> this.source.type = testSrc.type;
<ide>
<ide> this.player = videojs(this.mediaEl, {enableSourceset: true});
<ide>
<ide> this.player.one('sourceset', (e1) => {
<del> assert.equal(e1.src, this.testSrc.src, 'event has expected source');
<add> validateSource(this.player, testSrc, e1, {prop: '', attr: ''});
<ide>
<ide> this.player.one('sourceset', (e2) => {
<del> assert.equal(e2.src, this.testSrc.src, 'second event has expected source');
<add> validateSource(this.player, testSrc, e2, {prop: '', attr: ''});
<ide> done();
<ide> });
<ide> });
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide>
<ide> this.totalSourcesets = 2;
<ide> this.source = document.createElement('source');
<del> this.source.src = this.testSrc.src;
<del> this.source.type = this.testSrc.type;
<add> this.source.src = testSrc.src;
<add> this.source.type = testSrc.type;
<ide>
<ide> this.player = videojs(this.mediaEl, {enableSourceset: true});
<ide>
<del> this.player.one('sourceset', (e) => {
<del> assert.equal(e.src, this.testSrc.src, 'source is as expected');
<add> this.player.one('sourceset', (e1) => {
<add> validateSource(this.player, testSrc, e1, {prop: '', attr: ''});
<ide>
<ide> this.player.one('sourceset', (e2) => {
<del> validateSource(assert, this.player, [this.sourceOne]);
<add> validateSource(this.player, [sourceOne], e2);
<ide>
<ide> done();
<ide> });
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> appendObj.fn(this.player.tech_.el_, this.source);
<ide>
<ide> // should fire an additional sourceset
<del> this.player.tech_.el_.src = this.sourceOne.src;
<add> this.player.tech_.el_.src = sourceOne.src;
<ide> });
<ide>
<ide> QUnit.test(`one <source> through ${appendObj.name} and then mediaEl.setAttribute`, function(assert) {
<ide> const done = assert.async();
<ide>
<ide> this.totalSourcesets = 2;
<ide> this.source = document.createElement('source');
<del> this.source.src = this.testSrc.src;
<del> this.source.type = this.testSrc.type;
<add> this.source.src = testSrc.src;
<add> this.source.type = testSrc.type;
<ide>
<ide> this.player = videojs(this.mediaEl, {enableSourceset: true});
<ide>
<del> this.player.one('sourceset', (e) => {
<del> assert.equal(e.src, this.testSrc.src, 'source is as expected');
<add> this.player.one('sourceset', (e1) => {
<add> validateSource(this.player, testSrc, e1, {prop: '', attr: ''});
<ide>
<ide> this.player.one('sourceset', (e2) => {
<del> validateSource(assert, this.player, [this.sourceOne]);
<add> validateSource(this.player, [sourceOne], e2);
<ide>
<ide> done();
<ide> });
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> appendObj.fn(this.player.tech_.el_, this.source);
<ide>
<ide> // should fire an additional sourceset
<del> this.player.tech_.el_.setAttribute('src', this.sourceOne.src);
<add> this.player.tech_.el_.setAttribute('src', sourceOne.src);
<ide> });
<ide>
<ide> QUnit.test(`mediaEl.src and then <source> through ${appendObj.name}`, function(assert) {
<ide> const done = assert.async();
<ide>
<ide> this.source = document.createElement('source');
<del> this.source.src = this.testSrc.src;
<del> this.source.type = this.testSrc.type;
<add> this.source.src = testSrc.src;
<add> this.source.type = testSrc.type;
<ide>
<ide> this.player = videojs(this.mediaEl, {enableSourceset: true});
<ide>
<ide> this.player.one('sourceset', (e) => {
<del> validateSource(assert, this.player, [this.sourceOne]);
<add> validateSource(this.player, [sourceOne], e);
<ide>
<ide> done();
<ide> });
<ide>
<del> this.player.tech_.el_.src = this.sourceOne.src;
<add> this.player.tech_.el_.src = sourceOne.src;
<ide>
<ide> // should not fire sourceset
<ide> appendObj.fn(this.player.tech_.el_, this.source);
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> const done = assert.async();
<ide>
<ide> this.source = document.createElement('source');
<del> this.source.src = this.testSrc.src;
<del> this.source.type = this.testSrc.type;
<add> this.source.src = testSrc.src;
<add> this.source.type = testSrc.type;
<ide>
<ide> this.player = videojs(this.mediaEl, {enableSourceset: true});
<ide>
<ide> this.player.one('sourceset', (e) => {
<del> validateSource(assert, this.player, [this.sourceOne]);
<add> validateSource(this.player, [sourceOne], e);
<ide>
<ide> done();
<ide> });
<ide>
<del> this.player.tech_.el_.setAttribute('src', this.sourceOne.src);
<add> this.player.tech_.el_.setAttribute('src', sourceOne.src);
<ide>
<ide> // should not fire sourceset
<ide> appendObj.fn(this.player.tech_.el_, this.source);
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> const done = assert.async();
<ide>
<ide> this.source = document.createElement('source');
<del> this.source.src = this.sourceOne.src;
<del> this.source.type = this.sourceOne.type;
<add> this.source.src = sourceOne.src;
<add> this.source.type = sourceOne.type;
<ide>
<ide> this.source2 = document.createElement('source');
<del> this.source2.src = this.sourceTwo.src;
<del> this.source2.type = this.sourceTwo.type;
<add> this.source2.src = sourceTwo.src;
<add> this.source2.type = sourceTwo.type;
<ide>
<ide> this.player = videojs(this.mediaEl, {enableSourceset: true});
<ide>
<ide> this.player.one('sourceset', (e) => {
<del> assert.equal(e.src, this.sourceOne.src, 'source is as expected');
<add> validateSource(this.player, sourceOne, e, {prop: '', attr: ''});
<ide> done();
<ide> });
<ide>
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide>
<ide> this.totalSourcesets = 2;
<ide> this.source = document.createElement('source');
<del> this.source.src = this.sourceTwo.src;
<del> this.source.type = this.sourceTwo.type;
<add> this.source.src = sourceTwo.src;
<add> this.source.type = sourceTwo.type;
<ide>
<ide> this.player = videojs(this.mediaEl, {enableSourceset: true});
<ide>
<ide> this.player.one('sourceset', (e1) => {
<del> validateSource(assert, this.player, [this.sourceOne]);
<add> validateSource(this.player, [sourceOne], e1);
<ide>
<ide> this.player.one('sourceset', (e2) => {
<del> validateSource(assert, this.player, [this.sourceTwo], false);
<add> validateSource(this.player, sourceTwo, e2, {prop: '', attr: ''});
<ide> done();
<ide> });
<ide>
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide>
<ide> });
<ide>
<del> this.player.tech_.el_.setAttribute('src', this.sourceOne.src);
<add> this.player.tech_.el_.setAttribute('src', sourceOne.src);
<ide> });
<ide> });
<ide>
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> QUnit.module('source change', (subhooks) => testTypes.forEach((testName) => {
<ide> QUnit.module(testName, {
<ide> beforeEach(assert) {
<del> sinon.stub(log, 'error');
<ide> const done = assert.async();
<ide>
<ide> setupEnv(this, testName);
<ide>
<del> this.mediaEl.src = this.testSrc.src;
<add> this.mediaEl.src = testSrc.src;
<ide> this.player = videojs(this.mediaEl, {
<ide> enableSourceset: true
<ide> });
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> });
<ide>
<ide> // intial sourceset should happen on player.ready
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e) => {
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide> },
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> QUnit.test('player.src({...})', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e1) => {
<add> validateSource(this.player, [testSrc], e1);
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceOne]);
<add> this.player.one('sourceset', (e2) => {
<add> validateSource(this.player, [sourceOne], e2);
<ide> done();
<ide> });
<ide>
<del> this.player.src(this.sourceOne);
<add> this.player.src(sourceOne);
<ide> });
<ide>
<del> this.player.src(this.testSrc);
<add> this.player.src(testSrc);
<ide> });
<ide>
<ide> QUnit.test('player.src({...}) x2 at the same time', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceOne]);
<add> this.player.one('sourceset', (e1) => {
<add> validateSource(this.player, [sourceOne], e1);
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceTwo]);
<add> this.player.one('sourceset', (e2) => {
<add> validateSource(this.player, [sourceTwo], e2);
<ide> done();
<ide> });
<ide> });
<ide>
<del> this.player.src(this.sourceOne);
<del> this.player.src(this.sourceTwo);
<add> this.player.src(sourceOne);
<add> this.player.src(sourceTwo);
<add> });
<add>
<add> QUnit.test('player.src({...}) x3 at the same time', function(assert) {
<add> const done = assert.async();
<add>
<add> // we have one more sourceset then other tests
<add> this.totalSourcesets = 4;
<add>
<add> this.player.one('sourceset', (e1) => {
<add> validateSource(this.player, sourceOne, e1);
<add>
<add> this.player.one('sourceset', (e2) => {
<add> validateSource(this.player, sourceTwo, e2);
<add>
<add> this.player.one('sourceset', (e3) => {
<add> validateSource(this.player, sourceThree, e3);
<add> done();
<add> });
<add> });
<add> });
<add>
<add> this.player.src(sourceOne);
<add> this.player.src(sourceTwo);
<add> this.player.src(sourceThree);
<ide> });
<ide>
<ide> QUnit.test('mediaEl.src = ...', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e1) => {
<add> validateSource(this.player, [testSrc], e1);
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceOne]);
<add> this.player.one('sourceset', (e2) => {
<add> validateSource(this.player, [sourceOne], e2);
<ide> done();
<ide> });
<ide>
<del> this.mediaEl.src = this.sourceOne.src;
<add> this.mediaEl.src = sourceOne.src;
<ide> });
<ide>
<del> this.mediaEl.src = this.testSrc.src;
<add> this.mediaEl.src = testSrc.src;
<ide> });
<ide>
<ide> QUnit.test('mediaEl.src = ... x2 at the same time', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceOne]);
<add> this.player.one('sourceset', (e1) => {
<add> validateSource(this.player, [sourceOne], e1);
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceTwo]);
<add> this.player.one('sourceset', (e2) => {
<add> validateSource(this.player, [sourceTwo], e2);
<ide> done();
<ide> });
<ide> });
<ide>
<del> this.mediaEl.src = this.sourceOne.src;
<del> this.mediaEl.src = this.sourceTwo.src;
<add> this.mediaEl.src = sourceOne.src;
<add> this.mediaEl.src = sourceTwo.src;
<ide> });
<ide>
<ide> QUnit.test('mediaEl.setAttribute("src", ...)', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.testSrc]);
<add> this.player.one('sourceset', (e1) => {
<add> validateSource(this.player, [testSrc], e1);
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceOne]);
<add> this.player.one('sourceset', (e2) => {
<add> validateSource(this.player, [sourceOne], e2);
<ide> done();
<ide> });
<ide>
<del> this.mediaEl.setAttribute('src', this.sourceOne.src);
<add> this.mediaEl.setAttribute('src', sourceOne.src);
<ide> });
<ide>
<del> this.mediaEl.setAttribute('src', this.testSrc.src);
<add> this.mediaEl.setAttribute('src', testSrc.src);
<ide> });
<ide>
<ide> QUnit.test('mediaEl.setAttribute("src", ...) x2 at the same time', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceOne]);
<add> this.player.one('sourceset', (e1) => {
<add> validateSource(this.player, [sourceOne], e1);
<ide>
<del> this.player.one('sourceset', () => {
<del> validateSource(assert, this.player, [this.sourceTwo]);
<add> this.player.one('sourceset', (e2) => {
<add> validateSource(this.player, [sourceTwo], e2);
<ide> done();
<ide> });
<ide> });
<ide>
<del> this.mediaEl.setAttribute('src', this.sourceOne.src);
<del> this.mediaEl.setAttribute('src', this.sourceTwo.src);
<add> this.mediaEl.setAttribute('src', sourceOne.src);
<add> this.mediaEl.setAttribute('src', sourceTwo.src);
<ide> });
<ide>
<del> QUnit.test('load() with a src attribute', function(assert) {
<add> QUnit.test('mediaEl.load() with a src attribute', function(assert) {
<ide> const done = assert.async();
<ide>
<del> this.player = videojs(this.mediaEl, {
<del> enableSourceset: true
<del> });
<del>
<ide> this.totalSourcesets = 1;
<ide>
<ide> window.setTimeout(() => {
<ide> this.sourcesets = 0;
<ide> this.totalSourcesets = 1;
<ide>
<ide> this.player.one('sourceset', (e) => {
<del> assert.equal(e.src, this.mediaEl.src, "the sourceset event's src matches the src attribute");
<del>
<add> validateSource(this.player, [testSrc], e);
<ide> done();
<ide> });
<ide>
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> QUnit.test('mediaEl.load()', function(assert) {
<ide> const source = document.createElement('source');
<ide>
<del> source.src = this.testSrc.src;
<del> source.type = this.testSrc.type;
<add> source.src = testSrc.src;
<add> source.type = testSrc.type;
<ide>
<ide> // the only way to unset a source, so that we use the source
<ide> // elements instead
<ide> this.mediaEl.removeAttribute('src');
<ide>
<ide> this.player.one('sourceset', (e1) => {
<del> assert.equal(e1.src, this.testSrc.src, 'we got a sourceset with the expected src');
<add> validateSource(this.player, [testSrc], e1, {attr: '', prop: ''});
<ide>
<ide> this.player.one('sourceset', (e2) => {
<del> assert.equal(e2.src, this.sourceOne.src, 'we got a sourceset with the expected src');
<add> validateSource(this.player, [sourceOne], e2, {attr: '', prop: ''});
<ide> });
<ide>
<del> source.src = this.sourceOne.src;
<del> source.type = this.sourceOne.type;
<add> source.src = sourceOne.src;
<add> source.type = sourceOne.type;
<ide>
<ide> this.mediaEl.load();
<ide> });
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> QUnit.test('mediaEl.load() x2 at the same time', function(assert) {
<ide> const source = document.createElement('source');
<ide>
<del> source.src = this.sourceOne.src;
<del> source.type = this.sourceOne.type;
<add> source.src = sourceOne.src;
<add> source.type = sourceOne.type;
<ide>
<ide> this.player.one('sourceset', (e1) => {
<del> assert.equal(e1.src, this.sourceOne.src, 'we got a sourceset with the expected src');
<add> validateSource(this.player, [sourceOne], e1, {attr: '', prop: ''});
<ide>
<ide> this.player.one('sourceset', (e2) => {
<del> assert.equal(e2.src, this.sourceTwo.src, 'we got a sourceset with the expected src');
<add> validateSource(this.player, [sourceTwo], e2, {attr: '', prop: ''});
<ide> });
<ide> });
<ide>
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> this.mediaEl.appendChild(source);
<ide> this.mediaEl.load();
<ide>
<del> source.src = this.sourceTwo.src;
<del> source.type = this.sourceTwo.type;
<add> source.src = sourceTwo.src;
<add> source.type = sourceTwo.type;
<ide> this.mediaEl.load();
<ide> });
<ide>
<ide> QUnit.test('adding a <source> without load()', function(assert) {
<ide> const done = assert.async();
<ide> const source = document.createElement('source');
<ide>
<del> source.src = this.testSrc.src;
<del> source.type = this.testSrc.type;
<add> source.src = testSrc.src;
<add> source.type = testSrc.type;
<ide>
<ide> this.mediaEl.appendChild(source);
<ide>
<ide> QUnit[qunitFn]('sourceset', function(hooks) {
<ide> const done = assert.async();
<ide> const source = document.createElement('source');
<ide>
<del> source.src = this.testSrc.src;
<del> source.type = this.testSrc.type;
<add> source.src = testSrc.src;
<add> source.type = testSrc.type;
<ide>
<ide> this.mediaEl.appendChild(source);
<ide>
<del> source.src = this.testSrc.src;
<add> source.src = testSrc.src;
<ide>
<ide> this.totalSourcesets = 1;
<ide>
| 4
|
Python
|
Python
|
simplify application tests
|
f65a562d8266b769e16cc2cd14f0049720e8d81c
|
<ide><path>tests/keras/applications/applications_test.py
<ide> import pytest
<del>import numpy as np
<del>import time
<ide> import random
<ide> import os
<ide> from multiprocessing import Process, Queue
<ide>
<ide>
<ide> pytestmark = pytest.mark.skipif(
<del> os.environ['CORE_CHANGED'] == 'False' and os.environ['APP_CHANGED'] == 'False',
<del> reason='runs only when the relevant files have been modified')
<add> os.environ.get('CORE_CHANGED', 'True') == 'False' and
<add> os.environ('APP_CHANGED', 'True') == 'False',
<add> reason='Runs only when the relevant files have been modified.')
<ide>
<ide>
<ide> DENSENET_LIST = [(applications.DenseNet121, 1024),
<ide> (applications.NASNetLarge, 4032)]
<ide>
<ide>
<del>def clean_run(model_fn):
<add>def _get_output_shape(model_fn):
<ide> if K.backend() == 'cntk':
<del> # Create model in a subprocess so that the memory consumed by InceptionResNetV2 will be
<del> # released back to the system after this test (to deal with OOM error on CNTK backend)
<del> # TODO: remove the use of multiprocessing from these tests once a memory clearing mechanism
<del> # is implemented in the CNTK backend
<add> # Create model in a subprocess so that
<add> # the memory consumed by InceptionResNetV2 will be
<add> # released back to the system after this test
<add> # (to deal with OOM error on CNTK backend).
<add> # TODO: remove the use of multiprocessing from these tests
<add> # once a memory clearing mechanism
<add> # is implemented in the CNTK backend.
<ide> def target(queue):
<ide> model = model_fn()
<ide> queue.put(model.output_shape)
<ide> queue = Queue()
<ide> p = Process(target=target, args=(queue,))
<ide> p.start()
<ide> p.join()
<del> # The error in a subprocess won't propagate to the main process, so we check if the model
<del> # is successfully created by checking if the output shape has been put into the queue
<add> # The error in a subprocess won't propagate
<add> # to the main process, so we check if the model
<add> # is successfully created by checking if the output shape
<add> # has been put into the queue
<ide> assert not queue.empty(), 'Model creation failed.'
<ide> return queue.get_nowait()
<ide> else:
<ide> def target(queue):
<ide>
<ide>
<ide> @keras_test
<del>def test_resnet50():
<del> model = applications.ResNet50(weights=None)
<del> assert model.output_shape == (None, 1000)
<del>
<del>
<del>@keras_test
<del>def test_resnet50_notop():
<del> model = applications.ResNet50(weights=None, include_top=False)
<del> assert model.output_shape == (None, None, None, 2048)
<del>
<del>
<del>@keras_test
<del>def test_resnet50_variable_input_channels():
<del> input_shape = (1, None, None) if K.image_data_format() == 'channels_first' else (None, None, 1)
<del> model = applications.ResNet50(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 2048)
<del>
<del> input_shape = (4, None, None) if K.image_data_format() == 'channels_first' else (None, None, 4)
<del> model = applications.ResNet50(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 2048)
<del>
<del>
<del>@keras_test
<del>def test_resnet50_pooling():
<del> model = applications.ResNet50(weights=None,
<del> include_top=False,
<del> pooling='avg')
<del> assert model.output_shape == (None, 2048)
<del>
<del>
<del>@keras_test
<del>def test_vgg16():
<del> model = applications.VGG16(weights=None)
<del> assert model.output_shape == (None, 1000)
<del>
<del>
<del>@keras_test
<del>def test_vgg16_notop():
<del> model = applications.VGG16(weights=None, include_top=False)
<del> assert model.output_shape == (None, None, None, 512)
<del>
<del>
<del>@keras_test
<del>def test_vgg16_variable_input_channels():
<del> input_shape = (1, None, None) if K.image_data_format() == 'channels_first' else (None, None, 1)
<del> model = applications.VGG16(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 512)
<del>
<del> input_shape = (4, None, None) if K.image_data_format() == 'channels_first' else (None, None, 4)
<del> model = applications.VGG16(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 512)
<add>def _test_application_basic(app, last_dim=1000):
<add> output_shape = _get_output_shape(lambda: app(weights=None))
<add> assert output_shape == (None, last_dim)
<ide>
<ide>
<ide> @keras_test
<del>def test_vgg16_pooling():
<del> model = applications.VGG16(weights=None, include_top=False, pooling='avg')
<del> assert model.output_shape == (None, 512)
<add>def _test_application_notop(app, last_dim):
<add> output_shape = _get_output_shape(
<add> lambda: app(weights=None, include_top=False))
<add> assert output_shape == (None, None, None, last_dim)
<ide>
<ide>
<ide> @keras_test
<del>def test_vgg19():
<del> model = applications.VGG19(weights=None)
<del> assert model.output_shape == (None, 1000)
<del>
<add>def _test_application_variable_input_channels(app, last_dim):
<add> if K.image_data_format() == 'channels_first':
<add> input_shape = (1, None, None)
<add> else:
<add> input_shape = (None, None, 1)
<add> output_shape = _get_output_shape(
<add> lambda: app(weights=None, include_top=False, input_shape=input_shape))
<add> assert output_shape == (None, None, None, last_dim)
<ide>
<del>@keras_test
<del>def test_vgg19_notop():
<del> model = applications.VGG19(weights=None, include_top=False)
<del> assert model.output_shape == (None, None, None, 512)
<add> if K.image_data_format() == 'channels_first':
<add> input_shape = (4, None, None)
<add> else:
<add> input_shape = (None, None, 4)
<add> output_shape = _get_output_shape(
<add> lambda: app(weights=None, include_top=False, input_shape=input_shape))
<add> assert output_shape == (None, None, None, last_dim)
<ide>
<ide>
<ide> @keras_test
<del>def test_vgg19_variable_input_channels():
<del> input_shape = (1, None, None) if K.image_data_format() == 'channels_first' else (None, None, 1)
<del> model = applications.VGG19(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 512)
<del>
<del> input_shape = (4, None, None) if K.image_data_format() == 'channels_first' else (None, None, 4)
<del> model = applications.VGG19(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 512)
<add>def _test_app_pooling(app, last_dim):
<add> output_shape = _get_output_shape(
<add> lambda: app(weights=None,
<add> include_top=False,
<add> pooling=random.choice(['avg', 'max'])))
<add> assert output_shape == (None, last_dim)
<ide>
<ide>
<del>@keras_test
<del>def test_vgg19_notop_specified_input_shape():
<del> input_shape = (3, 300, 300) if K.image_data_format() == 'channels_first' else (300, 300, 3)
<del> model = applications.VGG19(weights=None, include_top=False, input_shape=input_shape)
<del> output_shape = (None, 512, 9, 9) if K.image_data_format() == 'channels_first' else (None, 9, 9, 512)
<del> assert model.output_shape == output_shape
<add>def test_resnet50():
<add> app = applications.ResNet50
<add> last_dim = 2048
<add> _test_application_basic(app)
<add> _test_application_notop(app, last_dim)
<add> _test_application_variable_input_channels(app, last_dim)
<add> _test_app_pooling(app, last_dim)
<ide>
<ide>
<del>@keras_test
<del>def test_vgg19_pooling():
<del> model = applications.VGG16(weights=None, include_top=False, pooling='avg')
<del> assert model.output_shape == (None, 512)
<add>def test_vgg():
<add> app = random.choice([applications.VGG16, applications.VGG19])
<add> last_dim = 512
<add> _test_application_basic(app)
<add> _test_application_notop(app, last_dim)
<add> _test_application_variable_input_channels(app, last_dim)
<add> _test_app_pooling(app, last_dim)
<ide>
<ide>
<del>@keras_test
<ide> @pytest.mark.skipif((K.backend() != 'tensorflow'),
<ide> reason='Requires TensorFlow backend')
<ide> def test_xception():
<del> model = applications.Xception(weights=None)
<del> assert model.output_shape == (None, 1000)
<add> app = applications.Xception
<add> last_dim = 2048
<add> _test_application_basic(app)
<add> _test_application_notop(app, last_dim)
<add> _test_application_variable_input_channels(app, last_dim)
<add> _test_app_pooling(app, last_dim)
<ide>
<ide>
<del>@keras_test
<del>@pytest.mark.skipif((K.backend() != 'tensorflow'),
<del> reason='Requires TensorFlow backend')
<del>def test_xception_notop():
<del> model = applications.Xception(weights=None, include_top=False)
<del> assert model.output_shape == (None, None, None, 2048)
<del>
<del>
<del>@keras_test
<del>@pytest.mark.skipif((K.backend() != 'tensorflow'),
<del> reason='Requires TensorFlow backend')
<del>def test_xception_pooling():
<del> model = applications.Xception(weights=None, include_top=False, pooling='avg')
<del> assert model.output_shape == (None, 2048)
<del>
<del>
<del>@keras_test
<del>@pytest.mark.skipif((K.backend() != 'tensorflow'),
<del> reason='Requires TensorFlow backend')
<del>def test_xception_variable_input_channels():
<del> input_shape = (1, None, None) if K.image_data_format() == 'channels_first' else (None, None, 1)
<del> model = applications.Xception(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 2048)
<del>
<del> input_shape = (4, None, None) if K.image_data_format() == 'channels_first' else (None, None, 4)
<del> model = applications.Xception(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 2048)
<del>
<del>
<del>@keras_test
<ide> def test_inceptionv3():
<del> model = applications.InceptionV3(weights=None)
<del> assert model.output_shape == (None, 1000)
<del>
<del>
<del>@keras_test
<del>def test_inceptionv3_notop():
<del> model = applications.InceptionV3(weights=None, include_top=False)
<del> assert model.output_shape == (None, None, None, 2048)
<add> app = applications.InceptionV3
<add> last_dim = 2048
<add> _test_application_basic(app)
<add> _test_application_notop(app, last_dim)
<add> _test_app_pooling(app, last_dim)
<ide>
<add> if K.backend() != 'cntk':
<add> # CNTK does not support dynamic padding.
<add> _test_application_variable_input_channels(app, last_dim)
<ide>
<del>@keras_test
<del>def test_inceptionv3_pooling():
<del> model = applications.InceptionV3(weights=None, include_top=False, pooling='avg')
<del> assert model.output_shape == (None, 2048)
<del>
<del>
<del>@keras_test
<del>@pytest.mark.skipif((K.backend() == 'cntk'),
<del> reason='cntk does not support padding with non-concrete dimension')
<del>def test_inceptionv3_variable_input_channels():
<del> input_shape = (1, None, None) if K.image_data_format() == 'channels_first' else (None, None, 1)
<del> model = applications.InceptionV3(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 2048)
<ide>
<del> input_shape = (4, None, None) if K.image_data_format() == 'channels_first' else (None, None, 4)
<del> model = applications.InceptionV3(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 2048)
<del>
<del>
<del>@keras_test
<ide> def test_inceptionresnetv2():
<del> def model_fn():
<del> return applications.InceptionResNetV2(weights=None)
<del> output_shape = clean_run(model_fn)
<del> assert output_shape == (None, 1000)
<del>
<del>
<del>@keras_test
<del>def test_inceptionresnetv2_notop():
<del> def model_fn():
<del> return applications.InceptionResNetV2(weights=None, include_top=False)
<del> output_shape = clean_run(model_fn)
<del> if K.image_data_format() == 'channels_first':
<del> assert output_shape == (None, 1536, None, None)
<del> else:
<del> assert output_shape == (None, None, None, 1536)
<del>
<del>
<del>@keras_test
<del>def test_inceptionresnetv2_pooling():
<del> def model_fn():
<del> return applications.InceptionResNetV2(weights=None, include_top=False, pooling='avg')
<del> output_shape = clean_run(model_fn)
<del> assert output_shape == (None, 1536)
<del>
<del>
<del>@keras_test
<del>def test_inceptionresnetv2_variable_input_channels():
<del> def model_fn(input_shape):
<del> return applications.InceptionResNetV2(weights=None, include_top=False, input_shape=input_shape)
<del> output_shape = clean_run(lambda: model_fn((None, None, 1)))
<del> assert output_shape == (None, None, None, 1536)
<del>
<del> output_shape = clean_run(lambda: model_fn((None, None, 4)))
<del> assert output_shape == (None, None, None, 1536)
<add> app = applications.InceptionResNetV2
<add> last_dim = 1536
<add> _test_application_basic(app)
<add> _test_application_notop(app, last_dim)
<add> _test_application_variable_input_channels(app, last_dim)
<add> _test_app_pooling(app, last_dim)
<ide>
<ide>
<del>@keras_test
<ide> @pytest.mark.skipif((K.backend() != 'tensorflow'),
<ide> reason='MobileNets are supported only on TensorFlow')
<ide> def test_mobilenet():
<del> model = applications.MobileNet(weights=None)
<del> assert model.output_shape == (None, 1000)
<del>
<del>
<del>@keras_test
<del>@pytest.mark.skipif((K.backend() != 'tensorflow'),
<del> reason='MobileNets are supported only on TensorFlow')
<del>def test_mobilenet_no_top():
<del> model = applications.MobileNet(weights=None, include_top=False)
<del> assert model.output_shape == (None, None, None, 1024)
<del>
<del>
<del>@keras_test
<del>@pytest.mark.skipif((K.backend() != 'tensorflow'),
<del> reason='MobileNets are supported only on TensorFlow')
<del>def test_mobilenet_pooling():
<del> model = applications.MobileNet(weights=None, include_top=False, pooling='avg')
<del> assert model.output_shape == (None, 1024)
<del>
<del>
<del>@keras_test
<del>@pytest.mark.skipif((K.backend() != 'tensorflow'),
<del> reason='MobileNets are supported only on TensorFlow')
<del>def test_mobilenet_variable_input_channels():
<del> input_shape = (1, None, None) if K.image_data_format() == 'channels_first' else (None, None, 1)
<del> model = applications.MobileNet(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 1024)
<add> app = applications.MobileNet
<add> last_dim = 1024
<add> _test_application_basic(app)
<add> _test_application_notop(app, last_dim)
<add> _test_application_variable_input_channels(app, last_dim)
<add> _test_app_pooling(app, last_dim)
<ide>
<del> input_shape = (4, None, None) if K.image_data_format() == 'channels_first' else (None, None, 4)
<del> model = applications.MobileNet(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, 1024)
<ide>
<del>
<del>@keras_test
<del>@pytest.mark.skipif((K.backend() != 'tensorflow'),
<del> reason='MobileNets are supported only on TensorFlow')
<del>def test_mobilenet_image_size():
<del> valid_image_sizes = [128, 160, 192, 224]
<del> for size in valid_image_sizes:
<del> input_shape = (size, size, 3) if K.image_data_format() == 'channels_last' else (3, size, size)
<del> model = applications.MobileNet(input_shape=input_shape, weights='imagenet', include_top=True)
<del> assert model.input_shape == (None,) + input_shape
<del>
<del> invalid_image_shape = (112, 112, 3) if K.image_data_format() == 'channels_last' else (3, 112, 112)
<del> with pytest.raises(ValueError):
<del> model = applications.MobileNet(input_shape=invalid_image_shape, weights='imagenet', include_top=True)
<del>
<del>
<del>@keras_test
<ide> def test_densenet():
<del> random.seed(time.time())
<del> fun, _ = random.choice(DENSENET_LIST)
<add> app, last_dim = random.choice(DENSENET_LIST)
<add> _test_application_basic(app)
<add> _test_application_notop(app, last_dim)
<add> _test_application_variable_input_channels(app, last_dim)
<add> _test_app_pooling(app, last_dim)
<ide>
<del> def model_fn():
<del> return fun(weights=None)
<del> output_shape = clean_run(model_fn)
<del> assert output_shape == (None, 1000)
<ide>
<del>
<del>@keras_test
<del>def test_densenet_no_top():
<del> random.seed(time.time())
<del> fun, dim = random.choice(DENSENET_LIST)
<del>
<del> def model_fn():
<del> return fun(weights=None, include_top=False)
<del> output_shape = clean_run(model_fn)
<del> assert output_shape == (None, None, None, dim)
<del>
<del>
<del>@keras_test
<del>def test_densenet_pooling():
<del> random.seed(time.time())
<del> fun, dim = random.choice(DENSENET_LIST)
<del>
<del> def model_fn():
<del> return fun(weights=None, include_top=False, pooling='avg')
<del> output_shape = clean_run(model_fn)
<del> assert output_shape == (None, None, None, dim)
<del>
<del>
<del>@keras_test
<del>def test_densenet_variable_input_channels():
<del> random.seed(time.time())
<del> fun, dim = random.choice(DENSENET_LIST)
<del>
<del> def model_fn(input_shape):
<del> return fun(weights=None, include_top=False, input_shape=input_shape)
<del> output_shape = clean_run(lambda: model_fn((None, None, 1)))
<del> assert output_shape == (None, None, None, dim)
<del>
<del> output_shape = clean_run(lambda: model_fn((None, None, 4)))
<del> assert output_shape == (None, None, None, dim)
<del>
<del>
<del>@keras_test
<ide> @pytest.mark.skipif((K.backend() != 'tensorflow'),
<ide> reason='NASNets are supported only on TensorFlow')
<ide> def test_nasnet():
<del> random.seed(time.time())
<del> fun, _ = random.choice(NASNET_LIST)
<del> model = fun(weights=None)
<del> assert model.output_shape == (None, 1000)
<del>
<del>
<del>@keras_test
<del>@pytest.mark.skipif((K.backend() != 'tensorflow'),
<del> reason='NASNets are supported only on TensorFlow')
<del>def test_nasnet_no_top():
<del> random.seed(time.time())
<del> fun, dim = random.choice(NASNET_LIST)
<del> model = fun(weights=None, include_top=False)
<del> assert model.output_shape == (None, None, None, dim)
<del>
<del>
<del>@keras_test
<del>@pytest.mark.skipif((K.backend() != 'tensorflow'),
<del> reason='NASNets are supported only on TensorFlow')
<del>def test_nasnet_pooling():
<del> random.seed(time.time())
<del> fun, dim = random.choice(NASNET_LIST)
<del> model = fun(weights=None, include_top=False, pooling='avg')
<del> assert model.output_shape == (None, dim)
<del>
<del>
<del>@keras_test
<del>@pytest.mark.skipif((K.backend() != 'tensorflow'),
<del> reason='NASNets are supported only on TensorFlow')
<del>def test_nasnet_variable_input_channels():
<del> random.seed(time.time())
<del> fun, dim = random.choice(NASNET_LIST)
<del> input_shape = (1, None, None) if K.image_data_format() == 'channels_first' else (None, None, 1)
<del> model = fun(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, dim)
<del>
<del> input_shape = (4, None, None) if K.image_data_format() == 'channels_first' else (None, None, 4)
<del> model = fun(weights=None, include_top=False, input_shape=input_shape)
<del> assert model.output_shape == (None, None, None, dim)
<add> app, last_dim = random.choice(NASNET_LIST)
<add> _test_application_basic(app)
<add> _test_application_notop(app, last_dim)
<add> _test_application_variable_input_channels(app, last_dim)
<add> _test_app_pooling(app, last_dim)
<ide>
<ide>
<ide> @pytest.mark.skipif(K.backend() != 'tensorflow', reason='Requires TF backend')
<ide> def test_depthwise_conv_2d():
<ide> num_row = 7
<ide> num_col = 6
<ide>
<del> with CustomObjectScope({'relu6': applications.mobilenet.relu6,
<del> 'DepthwiseConv2D': applications.mobilenet.DepthwiseConv2D}):
<add> with CustomObjectScope(
<add> {'relu6': applications.mobilenet.relu6,
<add> 'DepthwiseConv2D': applications.mobilenet.DepthwiseConv2D}):
<ide> for padding in _convolution_paddings:
<ide> for strides in [(1, 1), (2, 2)]:
<ide> for multiplier in [1, 2]:
<ide> def test_depthwise_conv_2d():
<ide> 'padding': padding,
<ide> 'strides': strides,
<ide> 'depth_multiplier': multiplier},
<del> input_shape=(num_samples, num_row, num_col, stack_size))
<add> input_shape=(num_samples,
<add> num_row,
<add> num_col,
<add> stack_size))
<ide>
<ide> layer_test(applications.mobilenet.DepthwiseConv2D,
<ide> kwargs={'kernel_size': 3,
<ide> def test_depthwise_conv_2d():
<ide>
<ide> # Test invalid use case
<ide> with pytest.raises(ValueError):
<del> model = Sequential([applications.mobilenet.DepthwiseConv2D(kernel_size=3,
<del> padding=padding,
<del> batch_input_shape=(None, None, 5, None))])
<add> Sequential([applications.mobilenet.DepthwiseConv2D(
<add> kernel_size=3,
<add> padding=padding,
<add> batch_input_shape=(None, None, 5, None))])
<ide>
<ide>
<ide> if __name__ == '__main__':
| 1
|
Javascript
|
Javascript
|
fix tostring for large positions
|
563a28dd0f952ff845649ca743dfadf60d2d4994
|
<ide><path>lib/serialization/BinaryMiddleware.js
<ide> class BinaryMiddleware extends SerializerMiddleware {
<ide> return () => result.push("");
<ide> case SHORT_STRING_HEADER | 1:
<ide> return () => {
<del> if (currentIsBuffer) {
<add> if (currentIsBuffer && currentPosition < 0x7ffffffe) {
<ide> result.push(
<ide> currentBuffer.toString(
<ide> "latin1",
| 1
|
Go
|
Go
|
fix incorrect comment
|
d626f77e36e16911358248af678cc00900a6f5b5
|
<ide><path>api/server/copy.go
<ide> import (
<ide> "github.com/docker/docker/pkg/version"
<ide> )
<ide>
<del>// postContainersCopy is deprecated in favor of getContainersArchivePath.
<add>// postContainersCopy is deprecated in favor of getContainersArchive.
<ide> func (s *Server) postContainersCopy(version version.Version, w http.ResponseWriter, r *http.Request, vars map[string]string) error {
<ide> if vars == nil {
<ide> return fmt.Errorf("Missing parameter")
| 1
|
Javascript
|
Javascript
|
test process.setuid for bad argument types
|
83444b7c66a5d3ba89e1d40790db26735094f5ea
|
<ide><path>test/parallel/test-process-setuid-setgid.js
<ide> if (common.isWindows) {
<ide> return;
<ide> }
<ide>
<add>assert.throws(() => {
<add> process.setuid({});
<add>}, /^TypeError: setuid argument must be a number or a string$/);
<add>
<ide> assert.throws(() => {
<ide> process.setuid('fhqwhgadshgnsdhjsdbkhsdabkfabkveybvf');
<ide> }, /^Error: setuid user id does not exist$/);
| 1
|
Python
|
Python
|
remove duplicated batch_set_value in cntk_backend
|
1b1e09a3665d71061bb5b62ca475d304634a85bf
|
<ide><path>keras/backend/cntk_backend.py
<ide> def get_variable_shape(x):
<ide> return x.shape
<ide>
<ide>
<del>def batch_set_value(tuples):
<del> for p, v in tuples:
<del> p.value = v.astype(np.float32)
<del>
<del>
<ide> def update(x, new_x):
<ide> return C.assign(x, new_x)
<ide>
| 1
|
Java
|
Java
|
fix javadoc for placeholderconfigurersupport
|
a78c12b8d3f5304665fa73a76cb01b8a7c744ca8
|
<ide><path>spring-beans/src/main/java/org/springframework/beans/factory/config/PlaceholderConfigurerSupport.java
<ide> /*
<del> * Copyright 2002-2018 the original author or authors.
<add> * Copyright 2002-2021 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> * Example XML bean definition:
<ide> *
<ide> * <pre class="code">
<del> * <bean id="dataSource" class="org.springframework.jdbc.datasource.DriverManagerDataSource"/>
<del> * <property name="driverClassName" value="${driver}"/>
<del> * <property name="url" value="jdbc:${dbname}"/>
<add> * <bean id="dataSource" class="org.springframework.jdbc.datasource.DriverManagerDataSource">
<add> * <property name="driverClassName" value="${driver}" />
<add> * <property name="url" value="jdbc:${dbname}" />
<ide> * </bean>
<ide> * </pre>
<ide> *
<ide> * Example properties file:
<ide> *
<del> * <pre class="code">driver=com.mysql.jdbc.Driver
<add> * <pre class="code">
<add> * driver=com.mysql.jdbc.Driver
<ide> * dbname=mysql:mydb</pre>
<ide> *
<ide> * Annotated bean definitions may take advantage of property replacement using
<ide> * in bean references. Furthermore, placeholder values can also cross-reference
<ide> * other placeholders, like:
<ide> *
<del> * <pre class="code">rootPath=myrootdir
<add> * <pre class="code">
<add> * rootPath=myrootdir
<ide> * subPath=${rootPath}/subdir</pre>
<ide> *
<ide> * In contrast to {@link PropertyOverrideConfigurer}, subclasses of this type allow
<ide> *
<ide> * <p>Default property values can be defined globally for each configurer instance
<ide> * via the {@link #setProperties properties} property, or on a property-by-property basis
<del> * using the default value separator which is {@code ":"} by default and
<del> * customizable via {@link #setValueSeparator(String)}.
<add> * using the value separator which is {@code ":"} by default and customizable via
<add> * {@link #setValueSeparator(String)}.
<ide> *
<ide> * <p>Example XML property with default value:
<ide> *
<ide> * <pre class="code">
<del> * <property name="url" value="jdbc:${dbname:defaultdb}"/>
<add> * <property name="url" value="jdbc:${dbname:defaultdb}" />
<ide> * </pre>
<ide> *
<ide> * @author Chris Beams
| 1
|
Javascript
|
Javascript
|
fix uiexplorer tests (image loading failure)
|
7f0071ae580f0b8d1fe4684d2ef60dde4a3f69fb
|
<ide><path>Examples/UIExplorer/js/ImageExample.js
<ide> var {
<ide> var base64Icon = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAEsAAABLCAQAAACSR7JhAAADtUlEQVR4Ac3YA2Bj6QLH0XPT1Fzbtm29tW3btm3bfLZtv7e2ObZnms7d8Uw098tuetPzrxv8wiISrtVudrG2JXQZ4VOv+qUfmqCGGl1mqLhoA52oZlb0mrjsnhKpgeUNEs91Z0pd1kvihA3ULGVHiQO2narKSHKkEMulm9VgUyE60s1aWoMQUbpZOWE+kaqs4eLEjdIlZTcFZB0ndc1+lhB1lZrIuk5P2aib1NBpZaL+JaOGIt0ls47SKzLC7CqrlGF6RZ09HGoNy1lYl2aRSWL5GuzqWU1KafRdoRp0iOQEiDzgZPnG6DbldcomadViflnl/cL93tOoVbsOLVM2jylvdWjXolWX1hmfZbGR/wjypDjFLSZIRov09BgYmtUqPQPlQrPapecLgTIy0jMgPKtTeob2zWtrGH3xvjUkPCtNg/tm1rjwrMa+mdUkPd3hWbH0jArPGiU9ufCsNNWFZ40wpwn+62/66R2RUtoso1OB34tnLOcy7YB1fUdc9e0q3yru8PGM773vXsuZ5YIZX+5xmHwHGVvlrGPN6ZSiP1smOsMMde40wKv2VmwPPVXNut4sVpUreZiLBHi0qln/VQeI/LTMYXpsJtFiclUN+5HVZazim+Ky+7sAvxWnvjXrJFneVtLWLyPJu9K3cXLWeOlbMTlrIelbMDlrLenrjEQOtIF+fuI9xRp9ZBFp6+b6WT8RrxEpdK64BuvHgDk+vUy+b5hYk6zfyfs051gRoNO1usU12WWRWL73/MMEy9pMi9qIrR4ZpV16Rrvduxazmy1FSvuFXRkqTnE7m2kdb5U8xGjLw/spRr1uTov4uOgQE+0N/DvFrG/Jt7i/FzwxbA9kDanhf2w+t4V97G8lrT7wc08aA2QNUkuTfW/KimT01wdlfK4yEw030VfT0RtZbzjeMprNq8m8tnSTASrTLti64oBNdpmMQm0eEwvfPwRbUBywG5TzjPCsdwk3IeAXjQblLCoXnDVeoAz6SfJNk5TTzytCNZk/POtTSV40NwOFWzw86wNJRpubpXsn60NJFlHeqlYRbslqZm2jnEZ3qcSKgm0kTli3zZVS7y/iivZTweYXJ26Y+RTbV1zh3hYkgyFGSTKPfRVbRqWWVReaxYeSLarYv1Qqsmh1s95S7G+eEWK0f3jYKTbV6bOwepjfhtafsvUsqrQvrGC8YhmnO9cSCk3yuY984F1vesdHYhWJ5FvASlacshUsajFt2mUM9pqzvKGcyNJW0arTKN1GGGzQlH0tXwLDgQTurS8eIQAAAABJRU5ErkJggg==';
<ide>
<ide> var ImageCapInsetsExample = require('./ImageCapInsetsExample');
<del>const IMAGE_PREFETCH_URL = 'https://facebook.github.io/origami/public/images/blog-hero.jpg?r=1&t=' + Date.now();
<add>const IMAGE_PREFETCH_URL = 'http://origami.design/public/images/bird-logo.png?r=1&t=' + Date.now();
<ide> var prefetchTask = Image.prefetch(IMAGE_PREFETCH_URL);
<ide>
<ide> var NetworkImageCallbackExample = React.createClass({
<ide> exports.examples = [
<ide> title: 'Image Loading Events',
<ide> render: function() {
<ide> return (
<del> <NetworkImageCallbackExample source={{uri: 'https://facebook.github.io/origami/public/images/blog-hero.jpg?r=1&t=' + Date.now()}}
<add> <NetworkImageCallbackExample source={{uri: 'http://origami.design/public/images/bird-logo.png?r=1&t=' + Date.now()}}
<ide> prefetchedSource={{uri: IMAGE_PREFETCH_URL}}/>
<ide> );
<ide> },
<ide> exports.examples = [
<ide> title: 'Image Download Progress',
<ide> render: function() {
<ide> return (
<del> <NetworkImageExample source={{uri: 'https://facebook.github.io/origami/public/images/blog-hero.jpg?r=1'}}/>
<add> <NetworkImageExample source={{uri: 'http://origami.design/public/images/bird-logo.png?r=1'}}/>
<ide> );
<ide> },
<ide> platform: 'ios',
| 1
|
Mixed
|
Ruby
|
add a hidden_field on the file_field
|
00b26532f05283d2b160308522d1bd2146d6ac18
|
<ide><path>actionview/CHANGELOG.md
<ide>
<ide> *Angelo Capilleri*
<ide>
<add>* Add a `hidden_field` on the `file_field` to avoid raise a error when the only
<add> input on the form is the `file_field`.
<add>
<add> *Mauro George*
<add>
<add>
<ide> Please check [4-2-stable](https://github.com/rails/rails/blob/4-2-stable/actionview/CHANGELOG.md) for previous changes.
<ide><path>actionview/lib/action_view/helpers/form_helper.rb
<ide> def hidden_field(object_name, method, options = {})
<ide> #
<ide> # file_field(:attachment, :file, class: 'file_input')
<ide> # # => <input type="file" id="attachment_file" name="attachment[file]" class="file_input" />
<add> #
<add> # ==== Gotcha
<add> #
<add> # The HTML specification says when nothing is select on a file field web browsers do not send any value to server.
<add> # Unfortunately this introduces a gotcha:
<add> # if an +User+ model has a +avatar+ field, and in the form none file is selected no +avatar+ parameter is sent. So,
<add> # any mass-assignment idiom like
<add> #
<add> # @user.update(params[:user])
<add> #
<add> # wouldn't update avatar.
<add> #
<add> # To prevent this the helper generates an auxiliary hidden field before
<add> # every file field. The hidden field has the same name as file field and blank value.
<add> #
<add> # In case if you don't want the helper to generate this hidden field you can specify
<add> # <tt>include_hidden: false</tt> option.
<ide> def file_field(object_name, method, options = {})
<ide> Tags::FileField.new(object_name, method, self, options).render
<ide> end
<ide><path>actionview/lib/action_view/helpers/tags/file_field.rb
<ide> module ActionView
<ide> module Helpers
<ide> module Tags # :nodoc:
<ide> class FileField < TextField # :nodoc:
<add>
<add> def render
<add> options = @options.stringify_keys
<add> if options.fetch("include_hidden", true)
<add> add_default_name_and_id(options)
<add> options[:type] = "file"
<add> tag("input", :name => options["name"], :type => "hidden", :value => "") + tag("input", options)
<add> else
<add> options.delete("include_hidden")
<add> @options = options
<add> super
<add> end
<add> end
<ide> end
<ide> end
<ide> end
<ide><path>actionview/test/template/form_helper_test.rb
<ide> def test_text_field_doesnt_change_param_values
<ide> assert_dom_equal expected, text_field(object_name, "title")
<ide> end
<ide>
<del> def test_file_field_has_no_size
<add> def test_file_field_does_generate_a_hidden_field
<add> expected = '<input name="user[avatar]" type="hidden" value="" /><input id="user_avatar" name="user[avatar]" type="file" />'
<add> assert_dom_equal expected, file_field("user", "avatar")
<add> end
<add>
<add> def test_file_field_does_not_generate_a_hidden_field_if_included_hidden_option_is_false
<add> expected = '<input id="user_avatar" name="user[avatar]" type="file" />'
<add> assert_dom_equal expected, file_field("user", "avatar", :include_hidden => false)
<add> end
<add>
<add> def test_file_field_does_not_generate_a_hidden_field_if_included_hidden_option_is_false_with_key_as_string
<ide> expected = '<input id="user_avatar" name="user[avatar]" type="file" />'
<add> assert_dom_equal expected, file_field("user", "avatar", 'include_hidden' => false)
<add> end
<add>
<add> def test_file_field_has_no_size
<add> expected = '<input name="user[avatar]" type="hidden" value="" /><input id="user_avatar" name="user[avatar]" type="file" />'
<ide> assert_dom_equal expected, file_field("user", "avatar")
<ide> end
<ide>
<ide> def test_file_field_with_multiple_behavior
<del> expected = '<input id="import_file" multiple="multiple" name="import[file][]" type="file" />'
<add> expected = '<input name="import[file][]" type="hidden" value="" /><input id="import_file" multiple="multiple" name="import[file][]" type="file" />'
<ide> assert_dom_equal expected, file_field("import", "file", :multiple => true)
<ide> end
<ide>
<ide> def test_file_field_with_multiple_behavior_and_explicit_name
<del> expected = '<input id="import_file" multiple="multiple" name="custom" type="file" />'
<add> expected = '<input name="custom" type="hidden" value="" /><input id="import_file" multiple="multiple" name="custom" type="file" />'
<ide> assert_dom_equal expected, file_field("import", "file", :multiple => true, :name => "custom")
<ide> end
<ide>
<ide> def test_form_for_with_file_field_generate_multipart
<ide> end
<ide>
<ide> expected = whole_form("/posts/123", "create-post", "edit_post", method: "patch", multipart: true) do
<del> "<input name='post[file]' type='file' id='post_file' />"
<add> "<input name='post[file]' type='hidden' value='' /><input name='post[file]' type='file' id='post_file' />"
<ide> end
<ide>
<ide> assert_dom_equal expected, output_buffer
<ide> def test_fields_for_with_file_field_generate_multipart
<ide> end
<ide>
<ide> expected = whole_form("/posts/123", "edit_post_123", "edit_post", method: "patch", multipart: true) do
<del> "<input name='post[comment][file]' type='file' id='post_comment_file' />"
<add> "<input name='post[comment][file]' type='hidden' value='' /><input name='post[comment][file]' type='file' id='post_comment_file' />"
<ide> end
<ide>
<ide> assert_dom_equal expected, output_buffer
| 4
|
Go
|
Go
|
prevent tests from creating users on prod index
|
c914abaf15634d8039927176dfc5ff3c765f30d0
|
<ide><path>integration/auth_test.go
<ide> package docker
<ide> import (
<ide> "crypto/rand"
<ide> "encoding/hex"
<add> "fmt"
<ide> "github.com/dotcloud/docker/auth"
<ide> "os"
<ide> "strings"
<ide> import (
<ide> func TestLogin(t *testing.T) {
<ide> os.Setenv("DOCKER_INDEX_URL", "https://indexstaging-docker.dotcloud.com")
<ide> defer os.Setenv("DOCKER_INDEX_URL", "")
<del> authConfig := &auth.AuthConfig{Username: "unittester", Password: "surlautrerivejetattendrai", Email: "noise+unittester@dotcloud.com"}
<add> authConfig := &auth.AuthConfig{
<add> Username: "unittester",
<add> Password: "surlautrerivejetattendrai",
<add> Email: "noise+unittester@docker.com",
<add> ServerAddress: "https://indexstaging-docker.dotcloud.com/v1/",
<add> }
<ide> status, err := auth.Login(authConfig, nil)
<ide> if err != nil {
<ide> t.Fatal(err)
<ide> func TestLogin(t *testing.T) {
<ide> }
<ide>
<ide> func TestCreateAccount(t *testing.T) {
<del> os.Setenv("DOCKER_INDEX_URL", "https://indexstaging-docker.dotcloud.com")
<del> defer os.Setenv("DOCKER_INDEX_URL", "")
<ide> tokenBuffer := make([]byte, 16)
<ide> _, err := rand.Read(tokenBuffer)
<ide> if err != nil {
<ide> t.Fatal(err)
<ide> }
<ide> token := hex.EncodeToString(tokenBuffer)[:12]
<ide> username := "ut" + token
<del> authConfig := &auth.AuthConfig{Username: username, Password: "test42", Email: "docker-ut+" + token + "@example.com"}
<add> authConfig := &auth.AuthConfig{
<add> Username: username,
<add> Password: "test42",
<add> Email: fmt.Sprintf("docker-ut+%s@example.com", token),
<add> ServerAddress: "https://indexstaging-docker.dotcloud.com/v1/",
<add> }
<ide> status, err := auth.Login(authConfig, nil)
<ide> if err != nil {
<ide> t.Fatal(err)
<ide> }
<del> expectedStatus := "Account created. Please use the confirmation link we sent" +
<del> " to your e-mail to activate it."
<add> expectedStatus := fmt.Sprintf(
<add> "Account created. Please see the documentation of the registry %s for instructions how to activate it.",
<add> authConfig.ServerAddress,
<add> )
<ide> if status != expectedStatus {
<ide> t.Fatalf("Expected status: \"%s\", found \"%s\" instead.", expectedStatus, status)
<ide> }
| 1
|
PHP
|
PHP
|
use array_key_exists in validator
|
28a880b5b53023ab930b8e86fd2d52da6292b58e
|
<ide><path>laravel/validator.php
<ide> protected function validate_same($attribute, $value, $parameters)
<ide> {
<ide> $other = $parameters[0];
<ide>
<del> return isset($this->attributes[$other]) and $value == $this->attributes[$other];
<add> return array_key_exists($other, $this->attributes) and $value == $this->attributes[$other];
<ide> }
<ide>
<ide> /**
<ide> protected function validate_different($attribute, $value, $parameters)
<ide> {
<ide> $other = $parameters[0];
<ide>
<del> return isset($this->attributes[$other]) and $value != $this->attributes[$other];
<add> return array_key_exists($other, $this->attributes) and $value != $this->attributes[$other];
<ide> }
<ide>
<ide> /**
| 1
|
Python
|
Python
|
fix mypy issues with ``test_snowflake.py``
|
3a82b90af28c78e603c48dc8b0920703755d4642
|
<ide><path>tests/providers/snowflake/hooks/test_snowflake.py
<ide> import unittest
<ide> from copy import deepcopy
<ide> from pathlib import Path
<del>from typing import Dict, Union
<add>from typing import Dict
<ide> from unittest import mock
<ide>
<ide> import pytest
<ide>
<ide> _PASSWORD = 'snowflake42'
<ide>
<del>BASE_CONNECTION_KWARGS: Dict[str, Union[str, Dict[str, Union[str, Dict[str, str]]]]] = {
<add>BASE_CONNECTION_KWARGS: Dict = {
<ide> 'login': 'user',
<ide> 'password': 'pw',
<ide> 'schema': 'public',
| 1
|
Javascript
|
Javascript
|
remove string literal from deepstrictequal
|
33eb509df623d83f398176915b70c2bd6801a9f8
|
<ide><path>test/parallel/test-zlib-from-concatenated-gzip.js
<ide> fs.createReadStream(pmmFileGz)
<ide> })
<ide> .on('data', (data) => pmmResultBuffers.push(data))
<ide> .on('finish', common.mustCall(() => {
<del> assert.deepStrictEqual(Buffer.concat(pmmResultBuffers), pmmExpected,
<del> 'result should match original random garbage');
<add> // Result should match original random garbage
<add> assert.deepStrictEqual(Buffer.concat(pmmResultBuffers), pmmExpected);
<ide> }));
<ide>
<ide> // test that the next gzip member can wrap around the input buffer boundary
| 1
|
Javascript
|
Javascript
|
add extra expectations and comments for clarity
|
7fdd26e6f3cb265e0e05ef70e1a5796629a14605
|
<ide><path>test/ng/directive/selectSpec.js
<ide> describe('select', function() {
<ide>
<ide> it('should not update selected property of an option element on digest with no change event',
<ide> function() {
<add> // ng-options="value.name for value in values"
<add> // ng-model="selected"
<ide> createSingleSelect();
<ide>
<ide> scope.$apply(function() {
<ide> describe('select', function() {
<ide> });
<ide>
<ide> var options = element.find('option');
<add>
<add> expect(scope.selected).toEqual({ name: 'A' });
<add> expect(options.eq(0).prop('selected')).toBe(true);
<add> expect(options.eq(1).prop('selected')).toBe(false);
<add>
<ide> var optionToSelect = options.eq(1);
<ide>
<ide> expect(optionToSelect.text()).toBe('B');
| 1
|
Python
|
Python
|
add sample_weight parameter to kerasclassifer
|
c6f81d393cc34f75583007379831a33d30b6d2e9
|
<ide><path>keras/wrappers/scikit_learn.py
<ide> class KerasClassifier(BaseWrapper):
<ide> """Implementation of the scikit-learn classifier API for Keras.
<ide> """
<ide>
<del> def fit(self, x, y, **kwargs):
<add> def fit(self, x, y, sample_weight=None, **kwargs):
<ide> """Constructs a new model with `build_fn` & fit the model to `(x, y)`.
<ide>
<ide> # Arguments
<ide> def fit(self, x, y, **kwargs):
<ide> else:
<ide> raise ValueError('Invalid shape for y: ' + str(y.shape))
<ide> self.n_classes_ = len(self.classes_)
<add> if sample_weight is not None:
<add> kwargs['sample_weight'] = sample_weight
<ide> return super(KerasClassifier, self).fit(x, y, **kwargs)
<ide>
<ide> def predict(self, x, **kwargs):
<ide><path>tests/keras/wrappers/scikit_learn_test.py
<ide> def __call__(self, hidden_dims):
<ide>
<ide>
<ide> def assert_classification_works(clf):
<del> clf.fit(X_train, y_train, batch_size=batch_size, epochs=epochs)
<add> clf.fit(X_train, y_train, sample_weight=np.ones(X_train.shape[0]), batch_size=batch_size, epochs=epochs)
<ide>
<ide> score = clf.score(X_train, y_train, batch_size=batch_size)
<ide> assert np.isscalar(score) and np.isfinite(score)
| 2
|
Ruby
|
Ruby
|
fix failing tests caused by
|
4f39d897b3f3437e3f6982b2a8c28a6f9c4036d2
|
<ide><path>activerecord/test/cases/persistence_test.rb
<ide> require 'models/owner'
<ide> require 'models/person'
<ide> require 'models/pet'
<add>require 'models/ship'
<ide> require 'models/toy'
<ide> require 'rexml/document'
<ide>
| 1
|
Mixed
|
Javascript
|
drop the json to jsonp auto-promotion logic
|
e7b3bc488d01d584262e12a7c5c25f935d0d034b
|
<ide><path>src/ajax/jsonp.js
<ide> jQuery.ajaxSetup( {
<ide> } );
<ide>
<ide> // Detect, normalize options and install callbacks for jsonp requests
<del>jQuery.ajaxPrefilter( "json jsonp", function( s, originalSettings, jqXHR ) {
<add>jQuery.ajaxPrefilter( "jsonp", function( s, originalSettings, jqXHR ) {
<ide>
<ide> var callbackName, overwritten, responseContainer,
<ide> jsonProp = s.jsonp !== false && ( rjsonp.test( s.url ) ?
<ide> jQuery.ajaxPrefilter( "json jsonp", function( s, originalSettings, jqXHR ) {
<ide> rjsonp.test( s.data ) && "data"
<ide> );
<ide>
<del> // Handle iff the expected data type is "jsonp" or we have a parameter to set
<del> if ( jsonProp || s.dataTypes[ 0 ] === "jsonp" ) {
<add> // Get callback name, remembering preexisting value associated with it
<add> callbackName = s.jsonpCallback = typeof s.jsonpCallback === "function" ?
<add> s.jsonpCallback() :
<add> s.jsonpCallback;
<ide>
<del> // Get callback name, remembering preexisting value associated with it
<del> callbackName = s.jsonpCallback = typeof s.jsonpCallback === "function" ?
<del> s.jsonpCallback() :
<del> s.jsonpCallback;
<add> // Insert callback into url or form data
<add> if ( jsonProp ) {
<add> s[ jsonProp ] = s[ jsonProp ].replace( rjsonp, "$1" + callbackName );
<add> } else if ( s.jsonp !== false ) {
<add> s.url += ( rquery.test( s.url ) ? "&" : "?" ) + s.jsonp + "=" + callbackName;
<add> }
<ide>
<del> // Insert callback into url or form data
<del> if ( jsonProp ) {
<del> s[ jsonProp ] = s[ jsonProp ].replace( rjsonp, "$1" + callbackName );
<del> } else if ( s.jsonp !== false ) {
<del> s.url += ( rquery.test( s.url ) ? "&" : "?" ) + s.jsonp + "=" + callbackName;
<add> // Use data converter to retrieve json after script execution
<add> s.converters[ "script json" ] = function() {
<add> if ( !responseContainer ) {
<add> jQuery.error( callbackName + " was not called" );
<ide> }
<add> return responseContainer[ 0 ];
<add> };
<ide>
<del> // Use data converter to retrieve json after script execution
<del> s.converters[ "script json" ] = function() {
<del> if ( !responseContainer ) {
<del> jQuery.error( callbackName + " was not called" );
<del> }
<del> return responseContainer[ 0 ];
<del> };
<del>
<del> // Force json dataType
<del> s.dataTypes[ 0 ] = "json";
<add> // Force json dataType
<add> s.dataTypes[ 0 ] = "json";
<ide>
<del> // Install callback
<del> overwritten = window[ callbackName ];
<del> window[ callbackName ] = function() {
<del> responseContainer = arguments;
<del> };
<add> // Install callback
<add> overwritten = window[ callbackName ];
<add> window[ callbackName ] = function() {
<add> responseContainer = arguments;
<add> };
<ide>
<del> // Clean-up function (fires after converters)
<del> jqXHR.always( function() {
<add> // Clean-up function (fires after converters)
<add> jqXHR.always( function() {
<ide>
<del> // If previous value didn't exist - remove it
<del> if ( overwritten === undefined ) {
<del> jQuery( window ).removeProp( callbackName );
<add> // If previous value didn't exist - remove it
<add> if ( overwritten === undefined ) {
<add> jQuery( window ).removeProp( callbackName );
<ide>
<del> // Otherwise restore preexisting value
<del> } else {
<del> window[ callbackName ] = overwritten;
<del> }
<add> // Otherwise restore preexisting value
<add> } else {
<add> window[ callbackName ] = overwritten;
<add> }
<ide>
<del> // Save back as free
<del> if ( s[ callbackName ] ) {
<add> // Save back as free
<add> if ( s[ callbackName ] ) {
<ide>
<del> // Make sure that re-using the options doesn't screw things around
<del> s.jsonpCallback = originalSettings.jsonpCallback;
<add> // Make sure that re-using the options doesn't screw things around
<add> s.jsonpCallback = originalSettings.jsonpCallback;
<ide>
<del> // Save the callback name for future use
<del> oldCallbacks.push( callbackName );
<del> }
<add> // Save the callback name for future use
<add> oldCallbacks.push( callbackName );
<add> }
<ide>
<del> // Call if it was a function and we have a response
<del> if ( responseContainer && typeof overwritten === "function" ) {
<del> overwritten( responseContainer[ 0 ] );
<del> }
<add> // Call if it was a function and we have a response
<add> if ( responseContainer && typeof overwritten === "function" ) {
<add> overwritten( responseContainer[ 0 ] );
<add> }
<ide>
<del> responseContainer = overwritten = undefined;
<del> } );
<add> responseContainer = overwritten = undefined;
<add> } );
<ide>
<del> // Delegate to script
<del> return "script";
<del> }
<add> // Delegate to script
<add> return "script";
<ide> } );
<ide><path>test/data/mock.php
<ide> protected function json( $req ) {
<ide> header( 'Content-type: application/json' );
<ide> }
<ide>
<add> if ( isset( $req->query['cors'] ) ) {
<add> header( 'Access-Control-Allow-Origin: *' );
<add> }
<add>
<ide> if ( isset( $req->query['array'] ) ) {
<ide> echo '[ {"name": "John", "age": 21}, {"name": "Peter", "age": 25 } ]';
<ide> } else {
<ide><path>test/middleware-mockserver.js
<ide> var mocks = {
<ide> if ( req.query.header ) {
<ide> resp.writeHead( 200, { "content-type": "application/json" } );
<ide> }
<add> if ( req.query.cors ) {
<add> resp.writeHead( 200, { "access-control-allow-origin": "*" } );
<add> }
<ide> if ( req.query.array ) {
<ide> resp.end( JSON.stringify(
<ide> [ { name: "John", age: 21 }, { name: "Peter", age: 25 } ]
<ide><path>test/unit/ajax.js
<ide> QUnit.module( "ajax", {
<ide> ];
<ide> } );
<ide>
<add> ajaxTest( "jQuery.ajax() - no JSONP auto-promotion" + label, 4, function( assert ) {
<add> return [
<add> {
<add> url: baseURL + "mock.php?action=jsonp",
<add> dataType: "json",
<add> crossDomain: crossDomain,
<add> success: function() {
<add> assert.ok( false, "JSON parsing should have failed (no callback)" );
<add> },
<add> fail: function() {
<add> assert.ok( true, "JSON parsing failed, JSONP not used (no callback)" );
<add> }
<add> },
<add> {
<add> url: baseURL + "mock.php?action=jsonp&callback=?",
<add> dataType: "json",
<add> crossDomain: crossDomain,
<add> success: function() {
<add> assert.ok( false, "JSON parsing should have failed (ULR callback)" );
<add> },
<add> fail: function() {
<add> assert.ok( true, "JSON parsing failed, JSONP not used (URL callback)" );
<add> }
<add> },
<add> {
<add> url: baseURL + "mock.php?action=jsonp",
<add> dataType: "json",
<add> crossDomain: crossDomain,
<add> data: "callback=?",
<add> success: function() {
<add> assert.ok( false, "JSON parsing should have failed (data callback=?)" );
<add> },
<add> fail: function() {
<add> assert.ok( true, "JSON parsing failed, JSONP not used (data callback=?)" );
<add> }
<add> },
<add> {
<add> url: baseURL + "mock.php?action=jsonp",
<add> dataType: "json",
<add> crossDomain: crossDomain,
<add> data: "callback=??",
<add> success: function() {
<add> assert.ok( false, "JSON parsing should have failed (data callback=??)" );
<add> },
<add> fail: function() {
<add> assert.ok( true, "JSON parsing failed, JSONP not used (data callback=??)" );
<add> }
<add> }
<add> ];
<add> } );
<add>
<add> ajaxTest( "jQuery.ajax() - JSON - no ? replacement" + label, 9, function( assert ) {
<add> return [
<add> {
<add> url: baseURL + "mock.php?action=json&callback=?",
<add> dataType: "json",
<add> crossDomain: crossDomain,
<add> beforeSend: function( _jqXhr, settings ) {
<add> var queryString = settings.url.replace( /^[^?]*\?/, "" );
<add> assert.ok(
<add> queryString.indexOf( "jQuery" ) === -1,
<add> "jQuery callback not inserted into the URL (URL callback)"
<add> );
<add> assert.ok(
<add> queryString.indexOf( "callback=?" ) > -1,
<add> "\"callback=?\" present in the URL unchanged (URL callback)"
<add> );
<add> },
<add> success: function( data ) {
<add> assert.ok( data.data, "JSON results returned (URL callback)" );
<add> }
<add> },
<add> {
<add> url: baseURL + "mock.php?action=json",
<add> dataType: "json",
<add> crossDomain: crossDomain,
<add> data: "callback=?",
<add> beforeSend: function( _jqXhr, settings ) {
<add> var queryString = settings.url.replace( /^[^?]*\?/, "" );
<add> assert.ok(
<add> queryString.indexOf( "jQuery" ) === -1,
<add> "jQuery callback not inserted into the URL (data callback=?)"
<add> );
<add> assert.ok(
<add> queryString.indexOf( "callback=?" ) > -1,
<add> "\"callback=?\" present in the URL unchanged (data callback=?)"
<add> );
<add> },
<add> success: function( data ) {
<add> assert.ok( data.data, "JSON results returned (data callback=?)" );
<add> }
<add> },
<add> {
<add> url: baseURL + "mock.php?action=json",
<add> dataType: "json",
<add> crossDomain: crossDomain,
<add> data: "callback=??",
<add> beforeSend: function( _jqXhr, settings ) {
<add> var queryString = settings.url.replace( /^[^?]*\?/, "" );
<add> assert.ok(
<add> queryString.indexOf( "jQuery" ) === -1,
<add> "jQuery callback not inserted into the URL (data callback=??)"
<add> );
<add> assert.ok(
<add> queryString.indexOf( "callback=??" ) > -1,
<add> "\"callback=?\" present in the URL unchanged (data callback=??)"
<add> );
<add> },
<add> success: function( data ) {
<add> assert.ok( data.data, "JSON results returned (data callback=??)" );
<add> }
<add> }
<add> ];
<add> } );
<add>
<ide> } );
<ide>
<ide> ajaxTest( "jQuery.ajax() - script, Remote", 2, function( assert ) {
| 4
|
PHP
|
PHP
|
update mailer to hold mailer profiles
|
ee00209d6b45bf6890dc164ba633d92e9d17386d
|
<ide><path>src/Mailer/Mailer.php
<ide> */
<ide> namespace Cake\Mailer;
<ide>
<add>use Cake\Core\Exception\Exception;
<add>use Cake\Core\StaticConfigTrait;
<ide> use Cake\Datasource\ModelAwareTrait;
<ide> use Cake\Event\EventListenerInterface;
<ide> use Cake\Mailer\Exception\MissingActionException;
<ide> use Cake\View\ViewBuilder;
<add>use InvalidArgumentException;
<ide>
<ide> /**
<ide> * Mailer base class.
<ide> abstract class Mailer implements EventListenerInterface
<ide> {
<ide> use ModelAwareTrait;
<add> use StaticConfigTrait;
<ide>
<ide> /**
<ide> * Mailer's name.
<ide> abstract class Mailer implements EventListenerInterface
<ide> protected $_clonedEmail;
<ide>
<ide> /**
<del> * Constructor.
<add> * The transport instance to use for sending mail.
<ide> *
<del> * @param \Cake\Mailer\Email|null $email Email instance.
<add> * @var \Cake\Mailer\AbstractTransport|null
<ide> */
<del> public function __construct(?Email $email = null)
<add> protected $transport;
<add>
<add> /**
<add> * Message class name.
<add> *
<add> * @var string
<add> */
<add> protected $messageClass = Message::class;
<add>
<add> /**
<add> * Message instance.
<add> *
<add> * @var \Cake\Mailer\Message
<add> */
<add> protected $message;
<add>
<add> /**
<add> * Constructor
<add> *
<add> * @param array|string|null $config Array of configs, or string to load configs from app.php
<add> */
<add> public function __construct($config = null)
<ide> {
<del> if ($email === null) {
<del> $email = new Email();
<del> }
<add> $email = new Email();
<ide>
<ide> $this->_email = $email;
<ide> $this->_clonedEmail = clone $email;
<add>
<add> $this->message = new $this->messageClass();
<add>
<add> if ($config === null) {
<add> $config = static::getConfig('default');
<add> }
<add>
<add> $this->viewBuilder()
<add> ->setClassName(View::class)
<add> ->setTemplate('')
<add> ->setLayout('default')
<add> ->setHelpers(['Html']);
<add>
<add> if ($config) {
<add> $this->setProfile($config);
<add> }
<ide> }
<ide>
<ide> /**
<ide> public function viewBuilder(): ViewBuilder
<ide> }
<ide>
<ide> /**
<del> * Magic method to forward method class to Email instance.
<add> * Magic method to forward method class to Message instance.
<ide> *
<ide> * @param string $method Method name.
<ide> * @param array $args Method arguments
<ide> * @return $this|mixed
<ide> */
<ide> public function __call(string $method, array $args)
<ide> {
<del> $result = $this->_email->$method(...$args);
<add> $result = $this->message->$method(...$args);
<ide> if (strpos($method, 'get') === 0) {
<ide> return $result;
<ide> }
<ide> public function send(string $action, array $args = [], array $headers = []): arr
<ide> return $result;
<ide> }
<ide>
<add> /**
<add> * Sets the configuration profile to use for this instance.
<add> *
<add> * @param string|array $config String with configuration name, or
<add> * an array with config.
<add> * @return $this
<add> */
<add> public function setProfile($config)
<add> {
<add> if (is_string($config)) {
<add> $name = $config;
<add> $config = static::getConfig($name);
<add> if (empty($config)) {
<add> throw new InvalidArgumentException(sprintf('Unknown email configuration "%s".', $name));
<add> }
<add> unset($name);
<add> }
<add>
<add> $simpleMethods = [
<add> 'transport',
<add> ];
<add> foreach ($simpleMethods as $method) {
<add> if (isset($config[$method])) {
<add> $this->{'set' . ucfirst($method)}($config[$method]);
<add> unset($config[$method]);
<add> }
<add> }
<add>
<add> $viewBuilderMethods = [
<add> 'template', 'layout', 'theme',
<add> ];
<add> foreach ($viewBuilderMethods as $method) {
<add> if (array_key_exists($method, $config)) {
<add> $this->viewBuilder()->{'set' . ucfirst($method)}($config[$method]);
<add> unset($config[$method]);
<add> }
<add> }
<add>
<add> if (array_key_exists('helpers', $config)) {
<add> $this->viewBuilder()->setHelpers($config['helpers'], false);
<add> unset($config['helpers']);
<add> }
<add> if (array_key_exists('viewRenderer', $config)) {
<add> $this->viewBuilder()->setClassName($config['viewRenderer']);
<add> unset($config['viewRenderer']);
<add> }
<add> if (array_key_exists('viewVars', $config)) {
<add> $this->viewBuilder()->setVars($config['viewVars']);
<add> unset($config['viewVars']);
<add> }
<add>
<add> $this->message->setConfig($config);
<add>
<add> return $this;
<add> }
<add>
<add> /**
<add> * Sets the transport.
<add> *
<add> * When setting the transport you can either use the name
<add> * of a configured transport or supply a constructed transport.
<add> *
<add> * @param string|\Cake\Mailer\AbstractTransport $name Either the name of a configured
<add> * transport, or a transport instance.
<add> * @return $this
<add> * @throws \LogicException When the chosen transport lacks a send method.
<add> * @throws \InvalidArgumentException When $name is neither a string nor an object.
<add> */
<add> public function setTransport($name)
<add> {
<add> if (is_string($name)) {
<add> $transport = TransportFactory::get($name);
<add> } elseif (is_object($name)) {
<add> $transport = $name;
<add> if (!$transport instanceof AbstractTransport) {
<add> throw new Exception('Transport class must extend Cake\Mailer\AbstractTransport');
<add> }
<add> } else {
<add> throw new InvalidArgumentException(sprintf(
<add> 'The value passed for the "$name" argument must be either a string, or an object, %s given.',
<add> gettype($name)
<add> ));
<add> }
<add>
<add> $this->transport = $transport;
<add>
<add> return $this;
<add> }
<add>
<add> /**
<add> * Gets the transport.
<add> *
<add> * @return \Cake\Mailer\AbstractTransport
<add> */
<add> public function getTransport(): AbstractTransport
<add> {
<add> if ($this->transport === null) {
<add> throw new BadMethodCallException(
<add> 'Transport was not defined. You must set on using setTransport() or set `transport` option in profile.'
<add> );
<add> }
<add>
<add> return $this->transport;
<add> }
<add>
<ide> /**
<ide> * Reset email instance.
<ide> *
<ide><path>tests/TestCase/Mailer/MailerTest.php
<ide> */
<ide> namespace Cake\Test\TestCase\Mailer;
<ide>
<add>use Cake\Core\Configure;
<add>use Cake\Core\Exception\Exception;
<add>use Cake\Mailer\AbstractTransport;
<add>use Cake\Mailer\Mailer;
<add>use Cake\Mailer\Transport\DebugTransport;
<add>use Cake\Mailer\TransportFactory;
<ide> use Cake\TestSuite\TestCase;
<ide> use RuntimeException;
<ide> use TestApp\Mailer\TestMailer;
<ide>
<ide> class MailerTest extends TestCase
<ide> {
<add> /**
<add> * @var array
<add> */
<add> protected $transports = [];
<add>
<add> /**
<add> * @var \Cake\Mailer\Mailer
<add> */
<add> protected $mailer;
<add>
<add> /**
<add> * setUp
<add> *
<add> * @return void
<add> */
<add> public function setUp(): void
<add> {
<add> parent::setUp();
<add>
<add> $this->transports = [
<add> 'debug' => [
<add> 'className' => 'Debug',
<add> ],
<add> 'badClassName' => [
<add> 'className' => 'TestFalse',
<add> ],
<add> ];
<add>
<add> TransportFactory::setConfig($this->transports);
<add>
<add> $this->mailer = new TestMailer();
<add> }
<add>
<add> /**
<add> * tearDown method
<add> *
<add> * @return void
<add> */
<add> public function tearDown(): void
<add> {
<add> parent::tearDown();
<add>
<add> TransportFactory::drop('debug');
<add> TransportFactory::drop('badClassName');
<add> }
<add>
<ide> /**
<ide> * @param array $methods
<ide> * @param array $args
<ide> public function testConstructor()
<ide> $this->assertInstanceOf('Cake\Mailer\Email', $mailer->getEmailForAssertion());
<ide> }
<ide>
<add> /**
<add> * testTransport method
<add> *
<add> * @return void
<add> */
<add> public function testTransport()
<add> {
<add> $result = $this->mailer->setTransport('debug');
<add> $this->assertSame($this->mailer, $result);
<add>
<add> $result = $this->mailer->getTransport();
<add> $this->assertInstanceOf(DebugTransport::class, $result);
<add>
<add> $instance = $this->getMockBuilder(DebugTransport::class)->getMock();
<add> $this->mailer->setTransport($instance);
<add> $this->assertSame($instance, $this->mailer->getTransport());
<add> }
<add>
<add> /**
<add> * Test that using unknown transports fails.
<add> *
<add> */
<add> public function testTransportInvalid()
<add> {
<add> $this->expectException(\InvalidArgumentException::class);
<add> $this->expectExceptionMessage('The "Invalid" transport configuration does not exist');
<add> $this->mailer->setTransport('Invalid');
<add> }
<add>
<add> /**
<add> * Test that using classes with no send method fails.
<add> *
<add> */
<add> public function testTransportInstanceInvalid()
<add> {
<add> $this->expectException(Exception::class);
<add> $this->mailer->setTransport(new \StdClass());
<add> }
<add>
<add> /**
<add> * Test that using unknown transports fails.
<add> *
<add> */
<add> public function testTransportTypeInvalid()
<add> {
<add> $this->expectException(\InvalidArgumentException::class);
<add> $this->expectExceptionMessage('The value passed for the "$name" argument must be either a string, or an object, integer given.');
<add> $this->mailer->setTransport(123);
<add> }
<add>
<add> /**
<add> * Test reading/writing configuration profiles.
<add> *
<add> * @return void
<add> */
<add> public function testConfig()
<add> {
<add> $settings = [
<add> 'to' => 'mark@example.com',
<add> 'from' => 'noreply@example.com',
<add> ];
<add> Mailer::setConfig('test', $settings);
<add> $this->assertEquals($settings, Mailer::getConfig('test'), 'Should be the same.');
<add>
<add> $mailer = new TestMailer('test');
<add> $this->assertContains($settings['to'], $mailer->getTo());
<add> }
<add>
<add> /**
<add> * Test that exceptions are raised on duplicate config set.
<add> *
<add> * @return void
<add> */
<add> public function testConfigErrorOnDuplicate()
<add> {
<add> $this->expectException(\BadMethodCallException::class);
<add> $settings = [
<add> 'to' => 'mark@example.com',
<add> 'from' => 'noreply@example.com',
<add> ];
<add> Mailer::setConfig('test', $settings);
<add> Mailer::setConfig('test', $settings);
<add> }
<add>
<add> /**
<add> * test profile method
<add> *
<add> * @return void
<add> */
<add> public function testSetProfile()
<add> {
<add> $config = ['to' => 'foo@bar.com'];
<add> $this->mailer->setProfile($config);
<add> $this->assertSame(['foo@bar.com' => 'foo@bar.com'], $this->mailer->getTo());
<add> }
<add>
<add> /**
<add> * test that default profile is used by constructor if available.
<add> *
<add> * @return void
<add> */
<add> public function testDefaultProfile()
<add> {
<add> $config = ['to' => 'foo@bar.com', 'from' => 'from@bar.com'];
<add>
<add> Configure::write('Mailer.default', $config);
<add> Mailer::setConfig(Configure::consume('Mailer'));
<add>
<add> $mailer = new TestMailer();
<add> $this->assertSame(['foo@bar.com' => 'foo@bar.com'], $mailer->getTo());
<add> $this->assertSame(['from@bar.com' => 'from@bar.com'], $mailer->getFrom());
<add>
<add> Configure::delete('Mailer');
<add> Mailer::drop('default');
<add> }
<add>
<add> /**
<add> * Test that using an invalid profile fails.
<add> *
<add> */
<add> public function testProfileInvalid()
<add> {
<add> $this->expectException(\InvalidArgumentException::class);
<add> $this->expectExceptionMessage('Unknown email configuration "derp".');
<add> $mailer = new TestMailer();
<add> $mailer->setProfile('derp');
<add> }
<add>
<add> /**
<add> * testConfigString method
<add> *
<add> * @return void
<add> */
<add> public function testUseConfigString()
<add> {
<add> $config = [
<add> 'from' => ['some@example.com' => 'My website'],
<add> 'to' => ['test@example.com' => 'Testname'],
<add> 'subject' => 'Test mail subject',
<add> 'transport' => 'debug',
<add> 'theme' => 'TestTheme',
<add> 'helpers' => ['Html', 'Form'],
<add> ];
<add> Mailer::setConfig('test', $config);
<add> $this->mailer->setProfile('test');
<add>
<add> $result = $this->mailer->getTo();
<add> $this->assertEquals($config['to'], $result);
<add>
<add> $result = $this->mailer->getFrom();
<add> $this->assertEquals($config['from'], $result);
<add>
<add> $result = $this->mailer->getSubject();
<add> $this->assertEquals($config['subject'], $result);
<add>
<add> $result = $this->mailer->viewBuilder()->getTheme();
<add> $this->assertEquals($config['theme'], $result);
<add>
<add> $result = $this->mailer->getTransport();
<add> $this->assertInstanceOf(DebugTransport::class, $result);
<add>
<add> $result = $this->mailer->viewBuilder()->getHelpers();
<add> $this->assertEquals($config['helpers'], $result);
<add> }
<add>
<add> /**
<add> * CakeEmailTest::testMockTransport()
<add> */
<add> public function testMockTransport()
<add> {
<add> TransportFactory::drop('default');
<add>
<add> $mock = $this->getMockBuilder(AbstractTransport::class)->getMock();
<add> $config = ['from' => 'tester@example.org', 'transport' => 'default'];
<add>
<add> Mailer::setConfig('default', $config);
<add> TransportFactory::setConfig('default', $mock);
<add>
<add> $em = new TestMailer('default');
<add>
<add> $this->assertSame($mock, $em->getTransport());
<add> }
<add>
<ide> /**
<ide> * @return void
<ide> */
| 2
|
PHP
|
PHP
|
update param name in docblock
|
af1bf7fd1fb408ef25004dcd04402d4cc219c9d3
|
<ide><path>src/Illuminate/Database/Query/Builder.php
<ide> public function whereIntegerNotInRaw($column, $values, $boolean = 'and')
<ide> /**
<ide> * Add a "where null" clause to the query.
<ide> *
<del> * @param string|array $column
<add> * @param string|array $columns
<ide> * @param string $boolean
<ide> * @param bool $not
<ide> * @return $this
| 1
|
Mixed
|
Javascript
|
trim trailing whitespace from js files and readme
|
aa2de8cbd5fbcbc9314b143f9b6625fe26427b97
|
<ide><path>README.md
<ide> that you'd normally have to do by hand. There are tasks that are common
<ide> to every web app; Ember.js does those things for you, so you can focus
<ide> on building killer features and UI.
<ide>
<del>- [Website](http://emberjs.com)
<del>- [Guides](http://emberjs.com/guides)
<del>- [API](http://emberjs.com/api)
<del>- [Community](http://emberjs.com/community)
<del>- [Blog](http://emberjs.com/blog)
<add>- [Website](http://emberjs.com)
<add>- [Guides](http://emberjs.com/guides)
<add>- [API](http://emberjs.com/api)
<add>- [Community](http://emberjs.com/community)
<add>- [Blog](http://emberjs.com/blog)
<ide> - [Builds](http://emberjs.com/builds)
<ide>
<ide> # Building Ember.js
<ide><path>lib/packages.js
<ide> module.exports = {
<ide> 'container': {trees: null, requirements: []},
<ide> 'ember-metal': {trees: null, vendorRequirements: ['backburner']},
<ide> 'ember-debug': {trees: null, requirements: ['ember-metal'], skipTests: true},
<del> 'ember-runtime': {trees: null, vendorRequirements: ['rsvp'], requirements: ['container', 'ember-metal']},
<add> 'ember-runtime': {trees: null, vendorRequirements: ['rsvp'], requirements: ['container', 'ember-metal']},
<ide> 'ember-views': {trees: null, requirements: ['ember-runtime']},
<ide> 'ember-extension-support': {trees: null, requirements: ['ember-application']},
<ide> 'ember-testing': {trees: null, requirements: ['ember-application', 'ember-routing']},
<ide><path>packages/ember-handlebars/lib/controls.js
<ide> export function inputHelper(options) {
<ide> Internally, `{{textarea}}` creates an instance of `Ember.TextArea`, passing
<ide> arguments from the helper to `Ember.TextArea`'s `create` method. You can
<ide> extend the capabilities of text areas in your application by reopening this
<del> class. For example, if you are building a Bootstrap project where `data-*`
<add> class. For example, if you are building a Bootstrap project where `data-*`
<ide> attributes are used, you can globally add support for a `data-*` attribute
<ide> on all `{{textarea}}`s' in your app by reopening `Ember.TextArea` or
<ide> `Ember.TextSupport` and adding it to the `attributeBindings` concatenated
<ide><path>packages/ember-handlebars/lib/helpers/loc.js
<ide> import { loc } from "ember-runtime/system/string";
<ide> Take note that `"welcome"` is a string and not an object
<ide> reference.
<ide>
<del> See [Ember.String.loc](/api/classes/Ember.String.html#method_loc) for how to
<add> See [Ember.String.loc](/api/classes/Ember.String.html#method_loc) for how to
<ide> set up localized string references.
<ide>
<ide> @method loc
<ide><path>packages/ember-metal/lib/mixin.js
<ide> Alias.prototype = new Descriptor();
<ide> });
<ide>
<ide> var goodGuy = App.Person.create();
<del>
<add>
<ide> goodGuy.name(); // 'Tomhuda Katzdale'
<ide> goodGuy.moniker(); // 'Tomhuda Katzdale'
<ide> ```
<ide><path>packages/ember-routing-handlebars/lib/helpers/link_to.js
<ide> var LinkView = Ember.LinkView = EmberComponent.extend({
<ide> or the application's current route is the route the `LinkView` would trigger
<ide> transitions into.
<ide>
<del> The `currentWhen` property can match against multiple routes by separating
<add> The `currentWhen` property can match against multiple routes by separating
<ide> route names using the `|` character.
<ide>
<ide> @property active
<ide> var LinkView = Ember.LinkView = EmberComponent.extend({
<ide> if (Ember.FEATURES.isEnabled("ember-routing-multi-current-when")) {
<ide> currentWhen = currentWhen.split('|');
<ide> for (var i = 0, len = currentWhen.length; i < len; i++) {
<del> if (isActiveForRoute(currentWhen[i])) {
<del> return get(this, 'activeClass');
<add> if (isActiveForRoute(currentWhen[i])) {
<add> return get(this, 'activeClass');
<ide> }
<ide> }
<ide> } else {
<del> if (isActiveForRoute(currentWhen)) {
<del> return get(this, 'activeClass');
<add> if (isActiveForRoute(currentWhen)) {
<add> return get(this, 'activeClass');
<ide> }
<ide> }
<ide> }),
<ide><path>packages/ember-routing/tests/location/auto_location_test.js
<ide> if (Ember.FEATURES.isEnabled('ember-routing-auto-location-uses-replace-state-for
<ide> createLocation();
<ide>
<ide> equal(get(location, 'implementation'), 'history');
<del> });
<add> });
<ide> } else {
<ide> test("AutoLocation.create() should transform the URL for pushState-supported browsers viewing a HashLocation-formatted url", function() {
<ide> expect(4);
<ide><path>packages/ember-runtime/lib/computed/reduce_computed_macros.js
<ide> export var mapProperty = mapBy;
<ide> })
<ide> });
<ide>
<del> var hamster = Hamster.create({
<add> var hamster = Hamster.create({
<ide> chores: [
<ide> { name: 'cook', done: true },
<ide> { name: 'clean', done: true },
<ide> { name: 'write more unit tests', done: false }
<del> ]
<add> ]
<ide> });
<ide>
<ide> hamster.get('remainingChores'); // [{name: 'write more unit tests', done: false}]
<ide><path>packages/ember-runtime/lib/copy.js
<ide> function _copy(obj, deep, seen, copies) {
<ide> return copies[loc];
<ide> }
<ide>
<del> Ember.assert('Cannot clone an Ember.Object that does not implement Ember.Copyable',
<add> Ember.assert('Cannot clone an Ember.Object that does not implement Ember.Copyable',
<ide> !(obj instanceof EmberObject) || (Copyable && Copyable.detect(obj)));
<ide>
<ide> // IMPORTANT: this specific test will detect a native array only. Any other
<ide><path>packages/ember/tests/helpers/link_to_test.js
<ide> if (Ember.FEATURES.isEnabled("ember-routing-multi-current-when")) {
<ide> Ember.TEMPLATES['index/about'] = Ember.Handlebars.compile("{{#link-to 'item' id='link1' currentWhen='item|index'}}ITEM{{/link-to}}");
<ide> Ember.TEMPLATES['item'] = Ember.Handlebars.compile("{{#link-to 'item' id='link2' currentWhen='item|index'}}ITEM{{/link-to}}");
<ide> Ember.TEMPLATES['foo'] = Ember.Handlebars.compile("{{#link-to 'item' id='link3' currentWhen='item|index'}}ITEM{{/link-to}}");
<del>
<add>
<ide> bootApplication();
<ide>
<ide> Ember.run(function() {
| 10
|
Python
|
Python
|
add missing import
|
b46bdce8d28a2773284cdab02956b7d494e2c1e1
|
<ide><path>spacy/cli/vocab.py
<ide> # coding: utf8
<ide> from __future__ import unicode_literals
<ide>
<add>from pathlib import Path
<ide> import plac
<ide> import json
<ide> import spacy
| 1
|
Java
|
Java
|
improve support for @conditional on @configuration
|
2e2e9b8dd0ecb113c86ec3a5d0cf87d4f6f72fd6
|
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/AnnotatedBeanDefinitionReader.java
<ide> public class AnnotatedBeanDefinitionReader {
<ide>
<ide> private final BeanDefinitionRegistry registry;
<ide>
<del> private Environment environment;
<del>
<ide> private BeanNameGenerator beanNameGenerator = new AnnotationBeanNameGenerator();
<ide>
<ide> private ScopeMetadataResolver scopeMetadataResolver = new AnnotationScopeMetadataResolver();
<ide>
<add> private ConditionEvaluator conditionEvaluator;
<add>
<ide>
<ide> /**
<ide> * Create a new {@code AnnotatedBeanDefinitionReader} for the given registry.
<ide> public AnnotatedBeanDefinitionReader(BeanDefinitionRegistry registry, Environmen
<ide> Assert.notNull(registry, "BeanDefinitionRegistry must not be null");
<ide> Assert.notNull(environment, "Environment must not be null");
<ide> this.registry = registry;
<del> this.environment = environment;
<add> this.conditionEvaluator = new ConditionEvaluator(registry, environment,
<add> null, null, null);
<ide> AnnotationConfigUtils.registerAnnotationConfigProcessors(this.registry);
<ide> }
<ide>
<ide> public final BeanDefinitionRegistry getRegistry() {
<ide> * @see #registerBean(Class, String, Class...)
<ide> */
<ide> public void setEnvironment(Environment environment) {
<del> this.environment = environment;
<add> this.conditionEvaluator = new ConditionEvaluator(this.registry, environment,
<add> null, null, null);
<ide> }
<ide>
<ide> /**
<ide> public void registerBean(Class<?> annotatedClass, Class<? extends Annotation>...
<ide>
<ide> public void registerBean(Class<?> annotatedClass, String name, Class<? extends Annotation>... qualifiers) {
<ide> AnnotatedGenericBeanDefinition abd = new AnnotatedGenericBeanDefinition(annotatedClass);
<del> if (ConditionEvaluator.get(abd.getMetadata(), true).shouldSkip(this.registry, this.environment)) {
<add> if (conditionEvaluator.shouldSkip(abd.getMetadata())) {
<ide> return;
<ide> }
<ide> ScopeMetadata scopeMetadata = this.scopeMetadataResolver.resolveScopeMetadata(abd);
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/ClassPathBeanDefinitionScanner.java
<ide> protected Set<BeanDefinitionHolder> doScan(String... basePackages) {
<ide> return beanDefinitions;
<ide> }
<ide>
<del> @Override
<del> protected boolean isConditionMatch(MetadataReader metadataReader) {
<del> return !ConditionEvaluator.get(metadataReader.getAnnotationMetadata(), true).shouldSkip(
<del> getRegistry(), getEnvironment());
<del> }
<del>
<ide> /**
<ide> * Apply further settings to the given bean definition,
<ide> * beyond the contents retrieved from scanning the component class.
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/ClassPathScanningCandidateComponentProvider.java
<ide> import org.springframework.beans.factory.BeanDefinitionStoreException;
<ide> import org.springframework.beans.factory.annotation.AnnotatedBeanDefinition;
<ide> import org.springframework.beans.factory.config.BeanDefinition;
<add>import org.springframework.beans.factory.support.BeanDefinitionRegistry;
<ide> import org.springframework.context.ResourceLoaderAware;
<ide> import org.springframework.core.env.Environment;
<ide> import org.springframework.core.env.EnvironmentCapable;
<ide> public class ClassPathScanningCandidateComponentProvider implements EnvironmentC
<ide>
<ide> private final List<TypeFilter> excludeFilters = new LinkedList<TypeFilter>();
<ide>
<add> private ConditionEvaluator conditionEvaluator;
<add>
<ide>
<ide> /**
<ide> * Create a ClassPathScanningCandidateComponentProvider with a {@link StandardEnvironment}.
<ide> public final MetadataReaderFactory getMetadataReaderFactory() {
<ide> */
<ide> public void setEnvironment(Environment environment) {
<ide> this.environment = environment;
<add> this.conditionEvaluator = null;
<ide> }
<ide>
<ide> @Override
<ide> public final Environment getEnvironment() {
<ide> return this.environment;
<ide> }
<ide>
<add> /**
<add> * Returns the {@link BeanDefinitionRegistry} used by this scanner or {@code null}.
<add> */
<add> protected BeanDefinitionRegistry getRegistry() {
<add> return null;
<add> }
<add>
<ide> /**
<ide> * Set the resource pattern to use when scanning the classpath.
<ide> * This value will be appended to each base package name.
<ide> protected boolean isCandidateComponent(MetadataReader metadataReader) throws IOE
<ide> * @param metadataReader the ASM ClassReader for the class
<ide> * @return whether the class qualifies as a candidate component
<ide> */
<del> protected boolean isConditionMatch(MetadataReader metadataReader) {
<del> return !ConditionEvaluator.get(metadataReader.getAnnotationMetadata(), true).shouldSkip(
<del> null, getEnvironment());
<add> private boolean isConditionMatch(MetadataReader metadataReader) {
<add> if (this.conditionEvaluator == null) {
<add> this.conditionEvaluator = new ConditionEvaluator(getRegistry(),
<add> getEnvironment(), null, null, getResourceLoader());
<add> }
<add> return !conditionEvaluator.shouldSkip(metadataReader.getAnnotationMetadata());
<ide> }
<ide>
<ide> /**
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/Condition.java
<ide> * A single {@code condition} that must be {@linkplain #matches matched} in order
<ide> * for a component to be registered.
<ide> *
<del> * <p>Conditions are checked immediately before a component bean-definition is due to be
<del> * registered and are free to veto registration based on any criteria that can be
<del> * determined at that point.
<add> * <p>Conditions are checked immediately before the bean-definition is due to be
<add> * registered and are free to veto registration based on any criteria that can
<add> * be determined at that point.
<ide> *
<ide> * <p>Conditions must follow the same restrictions as {@link BeanFactoryPostProcessor}
<del> * and take care to never interact with bean instances.
<add> * and take care to never interact with bean instances. For more fine-grained control
<add> * of conditions that interact with {@code @Configuration} beans consider the
<add> * {@link ConfigurationCondition} interface.
<ide> *
<ide> * @author Phillip Webb
<ide> * @since 4.0
<add> * @see ConfigurationCondition
<ide> * @see Conditional
<ide> * @see ConditionContext
<ide> */
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/ConditionContext.java
<ide>
<ide> import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
<ide> import org.springframework.beans.factory.support.BeanDefinitionRegistry;
<add>import org.springframework.context.ApplicationContext;
<ide> import org.springframework.core.env.Environment;
<ide> import org.springframework.core.io.ResourceLoader;
<ide>
<ide> public interface ConditionContext {
<ide> */
<ide> ClassLoader getClassLoader();
<ide>
<add> ApplicationContext getApplicationContext();
<add>
<ide> }
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/ConditionEvaluator.java
<ide> import org.springframework.beans.BeanUtils;
<ide> import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
<ide> import org.springframework.beans.factory.support.BeanDefinitionRegistry;
<add>import org.springframework.context.ApplicationContext;
<ide> import org.springframework.context.ConfigurableApplicationContext;
<add>import org.springframework.context.annotation.ConfigurationCondition.ConfigurationPhase;
<ide> import org.springframework.core.env.Environment;
<ide> import org.springframework.core.env.EnvironmentCapable;
<ide> import org.springframework.core.io.ResourceLoader;
<ide> import org.springframework.util.MultiValueMap;
<ide>
<ide> /**
<del> * Utility class used to evaluate {@link Conditional} annotations.
<add> * Internal class used to evaluate {@link Conditional} annotations.
<ide> *
<ide> * @author Phillip Webb
<ide> * @since 4.0
<ide> */
<del>abstract class ConditionEvaluator {
<add>class ConditionEvaluator {
<ide>
<ide> private static final String CONDITIONAL_ANNOTATION = Conditional.class.getName();
<ide>
<del> private static final ConditionEvaluator NONE = new ConditionEvaluator() {
<ide>
<del> @Override
<del> public boolean shouldSkip(BeanDefinitionRegistry registry, Environment environment) {
<del> return false;
<del> }
<del>
<del> };
<add> private final ConditionContextImpl context;
<ide>
<ide>
<ide> /**
<del> * Evaluate if any condition does not match and hence registration should be skipped.
<del> * @param registry the registry or {@code null}
<del> * @param environment the environment or {@code null}
<del> * @return if the registration should be skipped
<add> * Create a new {@link ConditionEvaluator} instance.
<ide> */
<del> public abstract boolean shouldSkip(BeanDefinitionRegistry registry,
<del> Environment environment);
<add> public ConditionEvaluator(BeanDefinitionRegistry registry, Environment environment,
<add> ApplicationContext applicationContext, ClassLoader classLoader,
<add> ResourceLoader resourceLoader) {
<add> this.context = new ConditionContextImpl(registry, environment,
<add> applicationContext, classLoader, resourceLoader);
<add> }
<ide>
<ide>
<ide> /**
<del> * Returns a {@link ConditionEvaluator} instance of the specified metadata.
<del> * @param metadata the metadata to test
<del> * @param deferIfConfigurationCandidate if the evaluator should be deferred when the
<del> * metadata is from a {@code @Configuration} candidate.
<del> * @return the evaluator instance
<add> * Determine if an item should be skipped based on {@code @Conditional} annotations.
<add> * The {@link ConfigurationPhase} will be deduced from the type of item (i.e. a
<add> * {@code @Configuration} class will be {@link ConfigurationPhase#PARSE_CONFIGURATION})
<add> * @param metadata the meta data
<add> * @return if the item should be skipped
<ide> */
<del> public static ConditionEvaluator get(AnnotatedTypeMetadata metadata,
<del> boolean deferIfConfigurationCandidate) {
<del> if (metadata == null || !metadata.isAnnotated(CONDITIONAL_ANNOTATION)) {
<del> // Shortcut to save always creating a ConditionEvaluator
<del> return NONE;
<del> }
<del>
<del> // Defer @Conditional @Configuration classes until later when the
<del> // ConfigurationClassPostProcessor will evaluate them. Allows @Conditional
<del> // implementations that inspect beans created by @Configuration to work
<del> if (deferIfConfigurationCandidate && metadata instanceof AnnotationMetadata
<del> && ConfigurationClassUtils.isConfigurationCandidate((AnnotationMetadata) metadata)) {
<del> return NONE;
<del> }
<del>
<del> return new ConditionEvaluatorImpl(metadata);
<add> public boolean shouldSkip(AnnotatedTypeMetadata metadata) {
<add> return shouldSkip(metadata, null);
<ide> }
<ide>
<del>
<ide> /**
<del> * Implementation of {@link ConditionEvaluator}.
<add> * Determine if an item should be skipped based on {@code @Conditional} annotations.
<add> * @param metadata the meta data
<add> * @param phase the phase of the call
<add> * @return if the item should be skipped
<ide> */
<del> private static class ConditionEvaluatorImpl extends ConditionEvaluator {
<del>
<del> private AnnotatedTypeMetadata metadata;
<del>
<del>
<del> public ConditionEvaluatorImpl(AnnotatedTypeMetadata metadata) {
<del> this.metadata = metadata;
<add> public boolean shouldSkip(AnnotatedTypeMetadata metadata, ConfigurationPhase phase) {
<add> if (metadata == null || !metadata.isAnnotated(CONDITIONAL_ANNOTATION)) {
<add> return false;
<ide> }
<ide>
<add> if (phase == null) {
<add> if (metadata instanceof AnnotationMetadata &&
<add> ConfigurationClassUtils.isConfigurationCandidate((AnnotationMetadata) metadata)) {
<add> return shouldSkip(metadata, ConfigurationPhase.PARSE_CONFIGURATION);
<add> }
<add> return shouldSkip(metadata, ConfigurationPhase.REGISTER_BEAN);
<add> }
<ide>
<del> @Override
<del> public boolean shouldSkip(BeanDefinitionRegistry registry, Environment environment) {
<del> ConditionContext context = new ConditionContextImpl(registry, environment);
<del> if (this.metadata != null) {
<del> for (String[] conditionClasses : getConditionClasses(metadata)) {
<del> for (String conditionClass : conditionClasses) {
<del> if (!getCondition(conditionClass, context.getClassLoader()).matches(
<del> context, metadata)) {
<del> return true;
<del> }
<add> for (String[] conditionClasses : getConditionClasses(metadata)) {
<add> for (String conditionClass : conditionClasses) {
<add> Condition condition = getCondition(conditionClass, context.getClassLoader());
<add> ConfigurationPhase requiredPhase = null;
<add> if (condition instanceof ConfigurationCondition) {
<add> requiredPhase = ((ConfigurationCondition) condition).getConfigurationPhase();
<add> }
<add> if (requiredPhase == null || requiredPhase == phase) {
<add> if (!condition.matches(context, metadata)) {
<add> return true;
<ide> }
<ide> }
<ide> }
<del> return false;
<ide> }
<add> return false;
<add> }
<ide>
<del> @SuppressWarnings("unchecked")
<del> private static List<String[]> getConditionClasses(AnnotatedTypeMetadata metadata) {
<del> MultiValueMap<String, Object> attributes = metadata.getAllAnnotationAttributes(
<del> CONDITIONAL_ANNOTATION, true);
<del> Object values = attributes == null ? null : attributes.get("value");
<del> return (List<String[]>) (values == null ? Collections.emptyList() : values);
<del> }
<add> @SuppressWarnings("unchecked")
<add> private List<String[]> getConditionClasses(AnnotatedTypeMetadata metadata) {
<add> MultiValueMap<String, Object> attributes = metadata.getAllAnnotationAttributes(
<add> CONDITIONAL_ANNOTATION, true);
<add> Object values = attributes == null ? null : attributes.get("value");
<add> return (List<String[]>) (values == null ? Collections.emptyList() : values);
<add> }
<ide>
<del> private static Condition getCondition(String conditionClassName,
<del> ClassLoader classloader) {
<del> Class<?> conditionClass = ClassUtils.resolveClassName(conditionClassName,
<del> classloader);
<del> return (Condition) BeanUtils.instantiateClass(conditionClass);
<del> }
<add> private Condition getCondition(String conditionClassName,
<add> ClassLoader classloader) {
<add> Class<?> conditionClass = ClassUtils.resolveClassName(conditionClassName,
<add> classloader);
<add> return (Condition) BeanUtils.instantiateClass(conditionClass);
<ide> }
<ide>
<add>
<ide> /**
<ide> * Implementation of a {@link ConditionContext}.
<ide> */
<ide> private static class ConditionContextImpl implements ConditionContext {
<ide>
<ide> private Environment environment;
<ide>
<add> private ApplicationContext applicationContext;
<add>
<add> private ClassLoader classLoader;
<add>
<add> private ResourceLoader resourceLoader;
<add>
<ide>
<ide> public ConditionContextImpl(BeanDefinitionRegistry registry,
<del> Environment environment) {
<add> Environment environment, ApplicationContext applicationContext,
<add> ClassLoader classLoader, ResourceLoader resourceLoader) {
<ide> this.registry = registry;
<ide> this.beanFactory = deduceBeanFactory(registry);
<ide> this.environment = environment;
<del> if (this.environment == null) {
<del> this.environment = deduceEnvironment(registry);
<del> }
<add> this.applicationContext = applicationContext;
<add> this.classLoader = classLoader;
<add> this.resourceLoader = resourceLoader;
<ide> }
<ide>
<del>
<ide> private ConfigurableListableBeanFactory deduceBeanFactory(Object source) {
<ide> if (source == null) {
<ide> return null;
<ide> else if (source instanceof ConfigurableApplicationContext) {
<ide> return null;
<ide> }
<ide>
<del> private Environment deduceEnvironment(BeanDefinitionRegistry registry) {
<del> if (registry == null) {
<del> return null;
<add> @Override
<add> public BeanDefinitionRegistry getRegistry() {
<add> if (this.registry != null) {
<add> return this.registry;
<ide> }
<del> if (registry instanceof EnvironmentCapable) {
<del> return ((EnvironmentCapable) registry).getEnvironment();
<add> if(getBeanFactory() != null && getBeanFactory() instanceof BeanDefinitionRegistry) {
<add> return (BeanDefinitionRegistry) getBeanFactory();
<ide> }
<ide> return null;
<ide> }
<ide>
<del> @Override
<del> public BeanDefinitionRegistry getRegistry() {
<del> return this.registry;
<del> }
<del>
<ide> @Override
<ide> public Environment getEnvironment() {
<del> return this.environment;
<add> if (this.environment != null) {
<add> return this.environment;
<add> }
<add> if (getRegistry() != null && getRegistry() instanceof EnvironmentCapable) {
<add> return ((EnvironmentCapable) getRegistry()).getEnvironment();
<add> }
<add> return null;
<ide> }
<ide>
<ide> @Override
<ide> public ConfigurableListableBeanFactory getBeanFactory() {
<ide>
<ide> @Override
<ide> public ResourceLoader getResourceLoader() {
<add> if (this.resourceLoader != null) {
<add> return this.resourceLoader;
<add> }
<ide> if (registry instanceof ResourceLoader) {
<ide> return (ResourceLoader) registry;
<ide> }
<ide> public ResourceLoader getResourceLoader() {
<ide>
<ide> @Override
<ide> public ClassLoader getClassLoader() {
<del> ResourceLoader resourceLoader = getResourceLoader();
<del> return (resourceLoader == null ? null : resourceLoader.getClassLoader());
<add> if (this.classLoader != null) {
<add> return this.classLoader;
<add> }
<add> if (getResourceLoader() != null) {
<add> return getResourceLoader().getClassLoader();
<add> }
<add> return null;
<add> }
<add>
<add> @Override
<add> public ApplicationContext getApplicationContext() {
<add> if (this.applicationContext != null) {
<add> return this.applicationContext;
<add> }
<add> if (getRegistry() != null && getRegistry() instanceof ApplicationContext) {
<add> return (ApplicationContext) getRegistry();
<add> }
<add> return null;
<ide> }
<ide> }
<ide>
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/Conditional.java
<ide> * {@linkplain #value() specified conditions} match.
<ide> *
<ide> * <p>A <em>condition</em> is any state that can be determined programmatically
<del> * immediately before the bean is due to be created (see {@link Condition} for details).
<add> * before the bean definition is due to be registered (see {@link Condition} for details).
<ide> *
<ide> * <p>The {@code @Conditional} annotation may be used in any of the following ways:
<ide> * <ul>
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClass.java
<ide> public void validate(ProblemReporter problemReporter) {
<ide> }
<ide> }
<ide>
<del>
<ide> @Override
<ide> public boolean equals(Object other) {
<ide> return (this == other || (other instanceof ConfigurationClass &&
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassBeanDefinitionReader.java
<ide> import org.springframework.beans.factory.support.BeanDefinitionRegistry;
<ide> import org.springframework.beans.factory.support.BeanNameGenerator;
<ide> import org.springframework.beans.factory.support.RootBeanDefinition;
<add>import org.springframework.context.ApplicationContext;
<add>import org.springframework.context.annotation.ConfigurationCondition.ConfigurationPhase;
<ide> import org.springframework.core.annotation.AnnotationAttributes;
<ide> import org.springframework.core.env.Environment;
<ide> import org.springframework.core.io.Resource;
<ide> class ConfigurationClassBeanDefinitionReader {
<ide>
<ide> private final BeanNameGenerator importBeanNameGenerator;
<ide>
<add> private final ConditionEvaluator conditionEvaluator;
<add>
<ide> /**
<ide> * Create a new {@link ConfigurationClassBeanDefinitionReader} instance that will be used
<ide> * to populate the given {@link BeanDefinitionRegistry}.
<ide> */
<ide> public ConfigurationClassBeanDefinitionReader(
<del> BeanDefinitionRegistry registry, SourceExtractor sourceExtractor,
<del> ProblemReporter problemReporter, MetadataReaderFactory metadataReaderFactory,
<del> ResourceLoader resourceLoader, Environment environment, BeanNameGenerator importBeanNameGenerator) {
<add> BeanDefinitionRegistry registry, ApplicationContext applicationContext,
<add> SourceExtractor sourceExtractor, ProblemReporter problemReporter,
<add> MetadataReaderFactory metadataReaderFactory, ResourceLoader resourceLoader,
<add> Environment environment, BeanNameGenerator importBeanNameGenerator) {
<ide>
<ide> this.registry = registry;
<ide> this.sourceExtractor = sourceExtractor;
<ide> public ConfigurationClassBeanDefinitionReader(
<ide> this.resourceLoader = resourceLoader;
<ide> this.environment = environment;
<ide> this.importBeanNameGenerator = importBeanNameGenerator;
<add> this.conditionEvaluator = new ConditionEvaluator(registry, environment,
<add> applicationContext, null, resourceLoader);
<ide> }
<ide>
<ide>
<ide> public ConfigurationClassBeanDefinitionReader(
<ide> * based on its contents.
<ide> */
<ide> public void loadBeanDefinitions(Set<ConfigurationClass> configurationModel) {
<del> TrackedConditionEvaluator conditionEvaluator = new TrackedConditionEvaluator();
<add> TrackedConditionEvaluator trackedConditionEvaluator = new TrackedConditionEvaluator();
<ide> for (ConfigurationClass configClass : configurationModel) {
<del> loadBeanDefinitionsForConfigurationClass(configClass, conditionEvaluator);
<add> loadBeanDefinitionsForConfigurationClass(configClass, trackedConditionEvaluator);
<ide> }
<ide> }
<ide>
<ide> public void loadBeanDefinitions(Set<ConfigurationClass> configurationModel) {
<ide> * class itself, all its {@link Bean} methods
<ide> */
<ide> private void loadBeanDefinitionsForConfigurationClass(ConfigurationClass configClass,
<del> TrackedConditionEvaluator conditionEvaluator) {
<del>
<del> if(conditionEvaluator.shouldSkip(configClass)) {
<add> TrackedConditionEvaluator trackedConditionEvaluator) {
<add> if (trackedConditionEvaluator.shouldSkip(configClass)) {
<ide> removeBeanDefinition(configClass);
<ide> return;
<ide> }
<ide> private void registerBeanDefinitionForImportedConfigurationClass(ConfigurationCl
<ide> * with the BeanDefinitionRegistry based on its contents.
<ide> */
<ide> private void loadBeanDefinitionsForBeanMethod(BeanMethod beanMethod) {
<del> if (ConditionEvaluator.get(beanMethod.getMetadata(), false).shouldSkip(
<del> this.registry, this.environment)) {
<add> if (conditionEvaluator.shouldSkip(beanMethod.getMetadata(),
<add> ConfigurationPhase.REGISTER_BEAN)) {
<ide> return;
<ide> }
<ide> ConfigurationClass configClass = beanMethod.getConfigurationClass();
<ide> public InvalidConfigurationImportProblem(String className, Resource resource, An
<ide> }
<ide> }
<ide>
<add>
<ide> /**
<ide> * Evaluate {@Code @Conditional} annotations, tracking results and taking into
<ide> * account 'imported by'.
<ide> public boolean shouldSkip(ConfigurationClass configClass) {
<ide> }
<ide> }
<ide> if (skip == null) {
<del> skip = ConditionEvaluator.get(configClass.getMetadata(), false).shouldSkip(
<del> registry, environment);
<add> skip = conditionEvaluator.shouldSkip(configClass.getMetadata(),
<add> ConfigurationPhase.REGISTER_BEAN);
<ide> }
<ide> this.skipped.put(configClass, skip);
<ide> }
<ide> return skip;
<ide> }
<del>
<ide> }
<ide>
<ide> }
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassParser.java
<ide> import org.springframework.beans.factory.support.BeanDefinitionReader;
<ide> import org.springframework.beans.factory.support.BeanDefinitionRegistry;
<ide> import org.springframework.beans.factory.support.BeanNameGenerator;
<add>import org.springframework.context.ApplicationContext;
<ide> import org.springframework.context.EnvironmentAware;
<ide> import org.springframework.context.ResourceLoaderAware;
<add>import org.springframework.context.annotation.ConfigurationCondition.ConfigurationPhase;
<ide> import org.springframework.core.NestedIOException;
<ide> import org.springframework.core.annotation.AnnotationAttributes;
<ide> import org.springframework.core.annotation.AnnotationAwareOrderComparator;
<ide> public int compare(DeferredImportSelectorHolder o1, DeferredImportSelectorHolder
<ide>
<ide> private final List<DeferredImportSelectorHolder> deferredImportSelectors = new LinkedList<DeferredImportSelectorHolder>();
<ide>
<add> private final ConditionEvaluator conditionEvaluator;
<add>
<ide>
<ide> /**
<ide> * Create a new {@link ConfigurationClassParser} instance that will be used
<ide> * to populate the set of configuration classes.
<ide> */
<ide> public ConfigurationClassParser(MetadataReaderFactory metadataReaderFactory,
<ide> ProblemReporter problemReporter, Environment environment, ResourceLoader resourceLoader,
<del> BeanNameGenerator componentScanBeanNameGenerator, BeanDefinitionRegistry registry) {
<add> BeanNameGenerator componentScanBeanNameGenerator, BeanDefinitionRegistry registry,
<add> ApplicationContext applicationContext) {
<ide>
<ide> this.metadataReaderFactory = metadataReaderFactory;
<ide> this.problemReporter = problemReporter;
<ide> public ConfigurationClassParser(MetadataReaderFactory metadataReaderFactory,
<ide> this.registry = registry;
<ide> this.componentScanParser = new ComponentScanAnnotationParser(
<ide> resourceLoader, environment, componentScanBeanNameGenerator, registry);
<add> this.conditionEvaluator = new ConditionEvaluator(registry, environment,
<add> applicationContext, null, resourceLoader);
<ide> }
<ide>
<ide>
<ide> public void parse(Set<BeanDefinitionHolder> configCandidates) {
<ide> * @param beanName may be null, but if populated represents the bean id
<ide> * (assumes that this configuration class was configured via XML)
<ide> */
<del> public void parse(String className, String beanName) throws IOException {
<add> protected final void parse(String className, String beanName) throws IOException {
<ide> MetadataReader reader = this.metadataReaderFactory.getMetadataReader(className);
<ide> processConfigurationClass(new ConfigurationClass(reader, beanName));
<ide> }
<ide> public void parse(String className, String beanName) throws IOException {
<ide> * @param clazz the Class to parse
<ide> * @param beanName must not be null (as of Spring 3.1.1)
<ide> */
<del> public void parse(Class<?> clazz, String beanName) throws IOException {
<add> protected final void parse(Class<?> clazz, String beanName) throws IOException {
<ide> processConfigurationClass(new ConfigurationClass(clazz, beanName));
<ide> }
<ide>
<ide>
<ide> protected void processConfigurationClass(ConfigurationClass configClass) throws IOException {
<ide>
<add> if (conditionEvaluator.shouldSkip(configClass.getMetadata(), ConfigurationPhase.PARSE_CONFIGURATION)) {
<add> return;
<add> }
<add>
<ide> if (this.configurationClasses.contains(configClass) && configClass.getBeanName() != null) {
<ide> // Explicit bean definition found, probably replacing an import.
<ide> // Let's remove the old one and go with the new one.
<ide> protected final SourceClass doProcessConfigurationClass(
<ide> AnnotationAttributes componentScan = attributesFor(sourceClass.getMetadata(), ComponentScan.class);
<ide> if (componentScan != null) {
<ide> // the config class is annotated with @ComponentScan -> perform the scan immediately
<del> if (!ConditionEvaluator.get(configClass.getMetadata(), false).shouldSkip(
<del> this.registry, this.environment)) {
<add> if (!conditionEvaluator.shouldSkip(sourceClass.getMetadata(), ConfigurationPhase.REGISTER_BEAN)) {
<ide> Set<BeanDefinitionHolder> scannedBeanDefinitions =
<ide> this.componentScanParser.parse(componentScan, sourceClass.getMetadata().getClassName());
<ide>
<ide> // check the set of scanned definitions for any further config classes and parse recursively if necessary
<ide> for (BeanDefinitionHolder holder : scannedBeanDefinitions) {
<ide> if (ConfigurationClassUtils.checkConfigurationClassCandidate(holder.getBeanDefinition(), this.metadataReaderFactory)) {
<del> this.parse(holder.getBeanDefinition().getBeanClassName(), holder.getBeanName());
<add> parse(holder.getBeanDefinition().getBeanClassName(), holder.getBeanName());
<ide> }
<ide> }
<ide> }
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/ConfigurationClassPostProcessor.java
<ide> import org.springframework.beans.factory.support.BeanDefinitionRegistryPostProcessor;
<ide> import org.springframework.beans.factory.support.BeanNameGenerator;
<ide> import org.springframework.beans.factory.support.RootBeanDefinition;
<add>import org.springframework.context.ApplicationContext;
<add>import org.springframework.context.ApplicationContextAware;
<ide> import org.springframework.context.EnvironmentAware;
<ide> import org.springframework.context.ResourceLoaderAware;
<ide> import org.springframework.context.annotation.ConfigurationClassParser.ImportRegistry;
<ide> * @since 3.0
<ide> */
<ide> public class ConfigurationClassPostProcessor implements BeanDefinitionRegistryPostProcessor,
<del> ResourceLoaderAware, BeanClassLoaderAware, EnvironmentAware {
<add> ResourceLoaderAware, BeanClassLoaderAware, EnvironmentAware, ApplicationContextAware {
<ide>
<ide> private static final String IMPORT_AWARE_PROCESSOR_BEAN_NAME =
<ide> ConfigurationClassPostProcessor.class.getName() + ".importAwareProcessor";
<ide> public class ConfigurationClassPostProcessor implements BeanDefinitionRegistryPo
<ide>
<ide> private Environment environment;
<ide>
<add> private ApplicationContext applicationContext;
<add>
<ide> private ResourceLoader resourceLoader = new DefaultResourceLoader();
<ide>
<ide> private ClassLoader beanClassLoader = ClassUtils.getDefaultClassLoader();
<ide> protected String buildDefaultBeanName(BeanDefinition definition) {
<ide> };
<ide>
<ide>
<add>
<ide> /**
<ide> * Set the {@link SourceExtractor} to use for generated bean definitions
<ide> * that correspond to {@link Bean} factory methods.
<ide> public void setEnvironment(Environment environment) {
<ide> this.environment = environment;
<ide> }
<ide>
<add> @Override
<add> public void setApplicationContext(ApplicationContext applicationContext)
<add> throws BeansException {
<add> this.applicationContext = applicationContext;
<add> }
<add>
<ide> @Override
<ide> public void setResourceLoader(ResourceLoader resourceLoader) {
<ide> Assert.notNull(resourceLoader, "ResourceLoader must not be null");
<ide> public void processConfigBeanDefinitions(BeanDefinitionRegistry registry) {
<ide> // Parse each @Configuration class
<ide> ConfigurationClassParser parser = new ConfigurationClassParser(
<ide> this.metadataReaderFactory, this.problemReporter, this.environment,
<del> this.resourceLoader, this.componentScanBeanNameGenerator, registry);
<add> this.resourceLoader, this.componentScanBeanNameGenerator, registry,
<add> this.applicationContext);
<ide> parser.parse(configCandidates);
<ide> parser.validate();
<ide>
<ide> public void processConfigBeanDefinitions(BeanDefinitionRegistry registry) {
<ide> // Read the model and create bean definitions based on its content
<ide> if (this.reader == null) {
<ide> this.reader = new ConfigurationClassBeanDefinitionReader(
<del> registry, this.sourceExtractor, this.problemReporter, this.metadataReaderFactory,
<add> registry, this.applicationContext, this.sourceExtractor,
<add> this.problemReporter, this.metadataReaderFactory,
<ide> this.resourceLoader, this.environment, this.importBeanNameGenerator);
<ide> }
<ide>
<ide><path>spring-context/src/main/java/org/springframework/context/annotation/ConfigurationCondition.java
<add>/*
<add> * Copyright 2002-2013 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.context.annotation;
<add>
<add>/**
<add> * A {@link Condition} that offers more fine-grained control when used with
<add> * {@code @Configuration}. Allows certain {@link Condition}s to adapt when they match
<add> * based on the configuration phase. For example, a condition that checks if a bean has
<add> * already been registered might choose to only be evaluated on the
<add> * {@link ConfigurationPhase#REGISTER_BEAN REGISTER_BEAN} {@link ConfigurationPhase}.
<add> *
<add> * @author Phillip Webb
<add> */
<add>public interface ConfigurationCondition extends Condition {
<add>
<add> /**
<add> * Returns the {@link ConfigurationPhase} in which the condition should be evaluated.
<add> */
<add> ConfigurationPhase getConfigurationPhase();
<add>
<add> /**
<add> * The various configuration phases where the condition could be evaluated.
<add> */
<add> public static enum ConfigurationPhase {
<add>
<add> /**
<add> * The {@link Condition} should be evaluated as a {@code @Configuration} class is
<add> * being parsed.
<add> *
<add> * <p>If the condition does not match at this point the {@code @Configuration}
<add> * class will not be added.
<add> */
<add> PARSE_CONFIGURATION,
<add>
<add> /**
<add> * The {@link Condition} should be evaluated when adding a regular (non
<add> * {@code @Configuration}) bean. The condition will not prevent
<add> * {@code @Configuration} classes from being added.
<add> *
<add> * <p>At the time that the condition is evaluated all {@code @Configuration}s
<add> * will have been parsed.
<add> */
<add> REGISTER_BEAN
<add> }
<add>
<add>}
<ide><path>spring-context/src/test/java/org/springframework/context/annotation/AsmCircularImportDetectionTests.java
<ide> /*
<del> * Copyright 2002-2012 the original author or authors.
<add> * Copyright 2002-2013 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> protected ConfigurationClassParser newParser() {
<ide> new StandardEnvironment(),
<ide> new DefaultResourceLoader(),
<ide> new AnnotationBeanNameGenerator(),
<del> new DefaultListableBeanFactory());
<add> new DefaultListableBeanFactory(),
<add> null);
<ide> }
<ide>
<ide> @Override
<ide><path>spring-context/src/test/java/org/springframework/context/annotation/ConfigurationClassWithConditionTests.java
<ide>
<ide> package org.springframework.context.annotation;
<ide>
<del>import static org.hamcrest.Matchers.equalTo;
<del>import static org.junit.Assert.assertFalse;
<del>import static org.junit.Assert.assertNull;
<del>import static org.junit.Assert.assertThat;
<del>import static org.junit.Assert.assertTrue;
<del>
<ide> import java.lang.annotation.ElementType;
<ide> import java.lang.annotation.Retention;
<ide> import java.lang.annotation.RetentionPolicy;
<ide> import org.junit.Test;
<ide> import org.junit.rules.ExpectedException;
<ide> import org.springframework.beans.factory.NoSuchBeanDefinitionException;
<add>import org.springframework.beans.factory.support.BeanDefinitionRegistry;
<ide> import org.springframework.core.annotation.AnnotationAttributes;
<add>import org.springframework.core.io.DefaultResourceLoader;
<ide> import org.springframework.core.type.AnnotatedTypeMetadata;
<add>import org.springframework.core.type.AnnotationMetadata;
<ide> import org.springframework.stereotype.Component;
<ide>
<add>import static org.hamcrest.Matchers.*;
<add>import static org.junit.Assert.*;
<add>
<ide> /**
<ide> * Tests for {@link Conditional} beans.
<del> *
<add> *
<ide> * @author Phillip Webb
<ide> */
<add>@SuppressWarnings("resource")
<ide> public class ConfigurationClassWithConditionTests {
<ide>
<ide> private final AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
<ide> public void methodConditional() throws Exception {
<ide> assertNull(ctx.getBean(ExampleBean.class));
<ide> }
<ide>
<add> @Test
<add> public void importsNotCreated() throws Exception {
<add> AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
<add> ctx.register(ImportsNotCreated.class);
<add> ctx.refresh();
<add> }
<add>
<add> @Test
<add> public void importsNotLoaded() throws Exception {
<add> AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
<add> ctx.register(ImportsNotLoaded.class);
<add> ctx.refresh();
<add> assertThat(ctx.containsBeanDefinition("a"), equalTo(false));
<add> assertThat(ctx.containsBeanDefinition("b"), equalTo(false));
<add> }
<add>
<add> @Test
<add> public void sensibleConditionContext() throws Exception {
<add> AnnotationConfigApplicationContext ctx = new AnnotationConfigApplicationContext();
<add> ctx.setResourceLoader(new DefaultResourceLoader());
<add> ctx.setClassLoader(getClass().getClassLoader());
<add> ctx.register(SensibleConditionContext.class);
<add> ctx.refresh();
<add> assertThat(ctx.getBean(ExampleBean.class), instanceOf(ExampleBean.class));
<add> }
<add>
<ide> @Configuration
<ide> static class BeanOneConfiguration {
<ide> @Bean
<ide> public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata)
<ide> }
<ide> }
<ide>
<del> static class HasBeanOneCondition implements Condition {
<add> static class HasBeanOneCondition implements ConfigurationCondition {
<add>
<add> @Override
<add> public ConfigurationPhase getConfigurationPhase() {
<add> return ConfigurationPhase.REGISTER_BEAN;
<add> }
<ide>
<ide> @Override
<ide> public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) {
<ide> public ExampleBean bean1() {
<ide> }
<ide> }
<ide>
<add> @Configuration
<add> @Never
<add> @Import({ ConfigurationNotCreated.class, RegistrarNotCreated.class, ImportSelectorNotCreated.class })
<add> static class ImportsNotCreated {
<add> static {
<add> if (true) throw new RuntimeException();
<add> }
<add> }
<add>
<add> @Configuration
<add> static class ConfigurationNotCreated {
<add> static {
<add> if (true) throw new RuntimeException();
<add> }
<add> }
<add>
<add> static class RegistrarNotCreated implements ImportBeanDefinitionRegistrar {
<add> static {
<add> if (true) throw new RuntimeException();
<add> }
<add>
<add> @Override
<add> public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata,
<add> BeanDefinitionRegistry registry) {
<add> }
<add> }
<add>
<add> static class ImportSelectorNotCreated implements ImportSelector {
<add>
<add> static {
<add> if (true) throw new RuntimeException();
<add> }
<add>
<add> @Override
<add> public String[] selectImports(AnnotationMetadata importingClassMetadata) {
<add> return new String[] {};
<add> }
<add>
<add> }
<add>
<add> @Configuration
<add> @Never
<add> @Import({ ConfigurationNotLoaded.class, RegistrarNotLoaded.class, ImportSelectorNotLoaded.class })
<add> static class ImportsNotLoaded {
<add> static {
<add> if (true) throw new RuntimeException();
<add> }
<add> }
<add>
<add> @Configuration
<add> static class ConfigurationNotLoaded {
<add> @Bean
<add> public String a() {
<add> return "a";
<add> }
<add> }
<add>
<add> static class RegistrarNotLoaded implements ImportBeanDefinitionRegistrar {
<add> @Override
<add> public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata,
<add> BeanDefinitionRegistry registry) {
<add> throw new RuntimeException();
<add> }
<add> }
<add>
<add> static class ImportSelectorNotLoaded implements ImportSelector {
<add>
<add> @Override
<add> public String[] selectImports(AnnotationMetadata importingClassMetadata) {
<add> return new String[] { SelectedConfigurationNotLoaded.class.getName() };
<add> }
<add>
<add> }
<add>
<add> @Configuration
<add> static class SelectedConfigurationNotLoaded {
<add> @Bean
<add> public String b() {
<add> return "b";
<add> }
<add> }
<add>
<add> @Configuration
<add> @Conditional(SensibleConditionContextCondition.class)
<add> static class SensibleConditionContext {
<add> @Bean
<add> ExampleBean exampleBean() {
<add> return new ExampleBean();
<add> }
<add> }
<add>
<add> static class SensibleConditionContextCondition implements Condition {
<add>
<add> @Override
<add> public boolean matches(ConditionContext context, AnnotatedTypeMetadata metadata) {
<add> assertThat(context.getApplicationContext(), notNullValue());
<add> assertThat(context.getBeanFactory(), notNullValue());
<add> assertThat(context.getClassLoader(), notNullValue());
<add> assertThat(context.getEnvironment(), notNullValue());
<add> assertThat(context.getRegistry(), notNullValue());
<add> assertThat(context.getResourceLoader(), notNullValue());
<add> return true;
<add> }
<add>
<add> }
<add>
<ide> static class ExampleBean {
<ide> }
<ide>
| 14
|
Mixed
|
Go
|
add pull flag to force image pulling
|
054e57a622e6a065c343806e7334920d17a03c5b
|
<ide><path>api/client/commands.go
<ide> func (cli *DockerCli) CmdBuild(args ...string) error {
<ide> noCache := cmd.Bool([]string{"#no-cache", "-no-cache"}, false, "Do not use cache when building the image")
<ide> rm := cmd.Bool([]string{"#rm", "-rm"}, true, "Remove intermediate containers after a successful build")
<ide> forceRm := cmd.Bool([]string{"-force-rm"}, false, "Always remove intermediate containers, even after unsuccessful builds")
<add> pull := cmd.Bool([]string{"-pull"}, false, "Always attempt to pull a newer version of the image")
<ide> if err := cmd.Parse(args); err != nil {
<ide> return nil
<ide> }
<ide> func (cli *DockerCli) CmdBuild(args ...string) error {
<ide> v.Set("forcerm", "1")
<ide> }
<ide>
<add> if *pull {
<add> v.Set("pull", "1")
<add> }
<ide> cli.LoadConfigFile()
<ide>
<ide> headers := http.Header(make(map[string][]string))
<ide><path>api/server/server.go
<ide> func postBuild(eng *engine.Engine, version version.Version, w http.ResponseWrite
<ide> } else {
<ide> job.Setenv("rm", r.FormValue("rm"))
<ide> }
<add> if r.FormValue("pull") == "1" && version.GreaterThanOrEqualTo("1.16") {
<add> job.Setenv("pull", "1")
<add> }
<ide> job.Stdin.Add(r.Body)
<ide> job.Setenv("remote", r.FormValue("remote"))
<ide> job.Setenv("t", r.FormValue("t"))
<ide><path>builder/dispatchers.go
<ide> func from(b *Builder, args []string, attributes map[string]bool, original string
<ide> name := args[0]
<ide>
<ide> image, err := b.Daemon.Repositories().LookupImage(name)
<add> if b.Pull {
<add> image, err = b.pullImage(name)
<add> if err != nil {
<add> return err
<add> }
<add> }
<ide> if err != nil {
<ide> if b.Daemon.Graph().IsNotExist(err) {
<ide> image, err = b.pullImage(name)
<ide><path>builder/evaluator.go
<ide> type Builder struct {
<ide> // controls how images and containers are handled between steps.
<ide> Remove bool
<ide> ForceRemove bool
<add> Pull bool
<ide>
<ide> AuthConfig *registry.AuthConfig
<ide> AuthConfigFile *registry.ConfigFile
<ide><path>builder/job.go
<ide> func (b *BuilderJob) CmdBuild(job *engine.Job) engine.Status {
<ide> noCache = job.GetenvBool("nocache")
<ide> rm = job.GetenvBool("rm")
<ide> forceRm = job.GetenvBool("forcerm")
<add> pull = job.GetenvBool("pull")
<ide> authConfig = ®istry.AuthConfig{}
<ide> configFile = ®istry.ConfigFile{}
<ide> tag string
<ide> func (b *BuilderJob) CmdBuild(job *engine.Job) engine.Status {
<ide> UtilizeCache: !noCache,
<ide> Remove: rm,
<ide> ForceRemove: forceRm,
<add> Pull: pull,
<ide> OutOld: job.Stdout,
<ide> StreamFormatter: sf,
<ide> AuthConfig: authConfig,
<ide><path>docs/sources/reference/api/docker_remote_api_v1.16.md
<ide> Query Parameters:
<ide> the resulting image in case of success
<ide> - **q** – suppress verbose build output
<ide> - **nocache** – do not use the cache when building the image
<add>- **pull** - attempt to pull the image even if an older image exists locally
<ide> - **rm** - remove intermediate containers after a successful build (default behavior)
<ide> - **forcerm - always remove intermediate containers (includes rm)
<ide>
<ide><path>docs/sources/reference/commandline/cli.md
<ide> To kill the container, use `docker kill`.
<ide>
<ide> --force-rm=false Always remove intermediate containers, even after unsuccessful builds
<ide> --no-cache=false Do not use cache when building the image
<add> --pull=false Always attempt to pull a newer version of the image
<ide> -q, --quiet=false Suppress the verbose output generated by the containers
<ide> --rm=true Remove intermediate containers after a successful build
<ide> -t, --tag="" Repository name (and optionally a tag) to be applied to the resulting image in case of success
| 7
|
Python
|
Python
|
remove outdated localization to random.shuffle
|
2707314760923f9f5996ed9f36aa3bf64ecc2bbf
|
<ide><path>celery/concurrency/asynpool.py
<ide>
<ide> import errno
<ide> import os
<del>import random
<ide> import select
<ide> import socket
<ide> import struct
<ide> def on_inqueue_close(fd, proc):
<ide> pass
<ide> self.on_inqueue_close = on_inqueue_close
<ide>
<del> def schedule_writes(ready_fds, shuffle=random.shuffle, curindex=[0]):
<add> def schedule_writes(ready_fds, curindex=[0]):
<ide> # Schedule write operation to ready file descriptor.
<ide> # The file descriptor is writeable, but that does not
<ide> # mean the process is currently reading from the socket.
| 1
|
Text
|
Text
|
remove confusing reference in governance doc
|
15b46422645d1ed4e40bfa78930690f2b4f4010d
|
<ide><path>GOVERNANCE.md
<ide> Collaborators. All pull requests must be reviewed and accepted by a
<ide> Collaborator with sufficient expertise who is able to take full
<ide> responsibility for the change. In the case of pull requests proposed
<ide> by an existing Collaborator, an additional Collaborator is required
<del>for sign-off. Consensus should be sought if additional Collaborators
<del>participate and there is disagreement around a particular
<del>modification. See [Consensus Seeking Process](#consensus-seeking-process) below
<del>for further detail on the consensus model used for governance.
<add>for sign-off.
<add>
<add>If one or more Collaborators oppose a proposed change, then the change can not
<add>be accepted unless:
<add>
<add>* Discussions and/or additional changes result in no Collaborators objecting to
<add> the change. Previously-objecting Collaborators do not necessarily have to
<add> sign-off on the change, but they should not be opposed to it.
<add>* The change is escalated to the CTC and the CTC votes to approve the change.
<add> This should be used only after other options (especially discussion among
<add> the disagreeing Collaborators) have been exhausted.
<ide>
<ide> Collaborators may opt to elevate significant or controversial modifications to
<ide> the CTC by assigning the `ctc-review` label to a pull request or issue. The
| 1
|
Text
|
Text
|
simplify node versions to "0.10.x or above"
|
e0ab4b33f5c4116b423a46103922eb02b2184dc1
|
<ide><path>docs/build-instructions/linux.md
<ide> Ubuntu LTS 12.04 64-bit is the recommended platform.
<ide> * OS with 64-bit or 32-bit architecture
<ide> * C++ toolchain
<ide> * [Git](http://git-scm.com/)
<del> * [node.js](http://nodejs.org/download/) (0.10.x or 0.12.x) or [io.js](https://iojs.org) (1.x or 2.x)
<del> * [npm](https://www.npmjs.com/) v1.4.x (bundled with Node.js)
<add> * [node.js](http://nodejs.org/download/) (0.10.x or above)
<add> * [npm](https://www.npmjs.com/) v1.4.x or above (automatically bundled with Node.js)
<ide> * `npm -v` to check the version.
<ide> * `npm config set python /usr/bin/python2 -g` to ensure that gyp uses python2.
<ide> * You might need to run this command as `sudo`, depending on how you have set up [npm](https://github.com/joyent/node/wiki/Installing-Node.js-via-package-manager#ubuntu-mint-elementary-os).
<ide> Ubuntu LTS 12.04 64-bit is the recommended platform.
<ide> ### Ubuntu / Debian
<ide>
<ide> * `sudo apt-get install build-essential git libgnome-keyring-dev fakeroot`
<del>* Instructions for [Node.js](https://github.com/nodejs/node-v0.x-archive/wiki/Installing-Node.js-via-package-manager#debian-and-ubuntu-based-linux-distributions).
<add>* Instructions for [node.js](https://github.com/nodejs/node-v0.x-archive/wiki/Installing-Node.js-via-package-manager#debian-and-ubuntu-based-linux-distributions).
<ide> * Make sure the command `node` is available after Node.js installation (some systems install it as `nodejs`).
<ide> * Use `which node` to check if it is available.
<ide> * Use `sudo update-alternatives --install /usr/bin/node node /usr/bin/nodejs 10` to update it.
<ide>
<ide> ### Fedora / CentOS / RHEL
<ide>
<ide> * `sudo dnf --assumeyes install make gcc gcc-c++ glibc-devel git-core libgnome-keyring-devel rpmdevtools`
<del>* Instructions for [Node.js](https://github.com/nodejs/node-v0.x-archive/wiki/Installing-Node.js-via-package-manager#enterprise-linux-and-fedora).
<add>* Instructions for [node.js](https://github.com/nodejs/node-v0.x-archive/wiki/Installing-Node.js-via-package-manager#enterprise-linux-and-fedora).
<ide>
<ide> ### Arch
<ide>
<ide><path>docs/build-instructions/os-x.md
<ide> ## Requirements
<ide>
<ide> * OS X 10.8 or later
<del> * [node.js](http://nodejs.org/download/) (0.10.x or 0.12.x) or [io.js](https://iojs.org) (1.x or 2.x)
<add> * [node.js](http://nodejs.org/download/) (0.10.x or above)
<ide> * Command Line Tools for [Xcode](https://developer.apple.com/xcode/downloads/) (run `xcode-select --install` to install)
<ide>
<ide> ## Instructions
<ide><path>docs/build-instructions/windows.md
<ide> ### On Windows 7
<ide> * [Visual C++ 2010 Express](http://www.visualstudio.com/en-us/downloads/download-visual-studio-vs#DownloadFamilies_4)
<ide> * [Visual Studio 2010 Service Pack 1](http://www.microsoft.com/en-us/download/details.aspx?id=23691)
<del> * [node.js](http://nodejs.org/download/) (0.10.x or 0.12.x) or [io.js](https://iojs.org) (1.x or 2.x)
<add> * [node.js](http://nodejs.org/download/) (0.10.x or above)
<ide> * For 64-bit builds of node and native modules you **must** have the
<ide> [Windows 7 64-bit SDK](http://www.microsoft.com/en-us/download/details.aspx?id=8279).
<ide> You may also need the [compiler update for the Windows SDK 7.1](http://www.microsoft.com/en-us/download/details.aspx?id=4422)
<ide> * [Visual Studio Express 2013 or 2015 for Windows Desktop](http://www.visualstudio.com/en-us/downloads/download-visual-studio-vs#DownloadFamilies_2)
<ide> * For VS 2015, be sure to customize the installation to include Visual C++. It's not installed by default.
<ide> * Some have experienced issues with Node locating C++ on VS 2015. If so, try VS 2013.
<del> * [node.js](http://nodejs.org/download/) (0.10.x, 0.12.x or 4.x) or [io.js](https://iojs.org) (1.x or 2.x)
<add> * [node.js](http://nodejs.org/download/) (0.10.x or above)
<ide> * [Python](https://www.python.org/downloads/) v2.7.x (required by [node-gyp](https://github.com/TooTallNate/node-gyp))
<ide> * [GitHub Desktop](http://desktop.github.com/)
<ide>
| 3
|
Ruby
|
Ruby
|
allow writing to certain var directories
|
76a5ba6ae598ea0c6c204f3a30bc36dd3a423d67
|
<ide><path>Library/Homebrew/cmd/test.rb
<ide> def test
<ide> sandbox.allow_write_temp_and_cache
<ide> sandbox.allow_write_log(f)
<ide> sandbox.allow_write_xcode
<add> sandbox.allow_write_path(HOMEBREW_PREFIX/"var/cache")
<add> sandbox.allow_write_path(HOMEBREW_PREFIX/"var/log")
<add> sandbox.allow_write_path(HOMEBREW_PREFIX/"var/run")
<ide> sandbox.exec(*args)
<ide> else
<ide> exec(*args)
| 1
|
Python
|
Python
|
fix tflongformer int dtype
|
c126a239bcea9c68453cf86045a5177afbe2be6c
|
<ide><path>src/transformers/models/led/modeling_tf_led.py
<ide> def _sliding_chunks_query_key_matmul(self, query, key, window_overlap):
<ide> )
<ide> first_chunk_mask = (
<ide> tf.tile(
<del> tf.range(chunks_count + 1)[None, :, None, None],
<add> tf.range(chunks_count + 1, dtype=tf.int64)[None, :, None, None],
<ide> (batch_size * num_heads, 1, window_overlap, window_overlap),
<ide> )
<ide> < 1
<ide> class TFLEDPreTrainedModel(TFPreTrainedModel):
<ide>
<ide> @property
<ide> def dummy_inputs(self):
<del> input_ids = tf.convert_to_tensor([[7, 6, 0, 0, 1], [1, 2, 3, 0, 0]])
<add> input_ids = tf.convert_to_tensor([[7, 6, 0, 0, 1], [1, 2, 3, 0, 0]], dtype=tf.int64)
<ide> # make sure global layers are initialized
<del> attention_mask = tf.convert_to_tensor([[1, 1, 0, 0, 1], [1, 1, 1, 0, 0]])
<del> global_attention_mask = tf.convert_to_tensor([[0, 0, 0, 0, 1], [0, 0, 1, 0, 0]])
<add> attention_mask = tf.convert_to_tensor([[1, 1, 0, 0, 1], [1, 1, 1, 0, 0]], dtype=tf.int64)
<add> global_attention_mask = tf.convert_to_tensor([[0, 0, 0, 0, 1], [0, 0, 1, 0, 0]], dtype=tf.int64)
<ide> dummy_inputs = {
<ide> "input_ids": input_ids,
<ide> "attention_mask": attention_mask,
<ide> def dummy_inputs(self):
<ide> @tf.function(
<ide> input_signature=[
<ide> {
<del> "input_ids": tf.TensorSpec((None, None), tf.int32, name="input_ids"),
<del> "attention_mask": tf.TensorSpec((None, None), tf.int32, name="attention_mask"),
<del> "decoder_input_ids": tf.TensorSpec((None, None), tf.int32, name="decoder_input_ids"),
<del> "decoder_attention_mask": tf.TensorSpec((None, None), tf.int32, name="decoder_attention_mask"),
<add> "input_ids": tf.TensorSpec((None, None), tf.int64, name="input_ids"),
<add> "attention_mask": tf.TensorSpec((None, None), tf.int64, name="attention_mask"),
<add> "decoder_input_ids": tf.TensorSpec((None, None), tf.int64, name="decoder_input_ids"),
<add> "decoder_attention_mask": tf.TensorSpec((None, None), tf.int64, name="decoder_attention_mask"),
<ide> }
<ide> ]
<ide> )
<ide><path>src/transformers/models/longformer/modeling_tf_longformer.py
<ide> def _compute_global_attention_mask(input_ids_shape, sep_token_indices, before_se
<ide> Computes global attention mask by putting attention on all tokens before `sep_token_id` if `before_sep_token is
<ide> True` else after `sep_token_id`.
<ide> """
<del>
<ide> assert shape_list(sep_token_indices)[1] == 2, "`input_ids` should have two dimensions"
<ide> question_end_index = tf.reshape(sep_token_indices, (input_ids_shape[0], 3, 2))[:, 0, 1][:, None]
<ide> # bool attention mask with True in locations of global attention
<del> attention_mask = tf.expand_dims(tf.range(input_ids_shape[1]), axis=0)
<add> attention_mask = tf.expand_dims(tf.range(input_ids_shape[1], dtype=tf.int64), axis=0)
<ide> attention_mask = tf.tile(attention_mask, (input_ids_shape[0], 1))
<ide> if before_sep_token is True:
<ide> question_end_index = tf.tile(question_end_index, (1, input_ids_shape[1]))
<ide> def call(self, hidden_states):
<ide> return hidden_states
<ide>
<ide>
<del># Copied from transformers.models.roberta.modeling_tf_roberta.TFRobertaEmbeddings with Roberta->Longformer
<ide> class TFLongformerEmbeddings(tf.keras.layers.Layer):
<ide> """
<del> Same as BertEmbeddings with a tiny tweak for positional embeddings indexing.
<add> Same as BertEmbeddings with a tiny tweak for positional embeddings indexing and some extra casting.
<ide> """
<ide>
<ide> def __init__(self, config, **kwargs):
<ide> def call(
<ide> input_shape = shape_list(inputs_embeds)[:-1]
<ide>
<ide> if token_type_ids is None:
<del> token_type_ids = tf.fill(dims=input_shape, value=0)
<add> token_type_ids = tf.cast(tf.fill(dims=input_shape, value=0), tf.int64)
<ide>
<ide> if position_ids is None:
<ide> if input_ids is not None:
<ide> def call(
<ide> )
<ide> else:
<ide> position_ids = tf.expand_dims(
<del> tf.range(start=self.padding_idx + 1, limit=input_shape[-1] + self.padding_idx + 1), axis=0
<add> tf.range(start=self.padding_idx + 1, limit=input_shape[-1] + self.padding_idx + 1, dtype=tf.int64),
<add> axis=0,
<ide> )
<ide>
<ide> position_embeds = tf.gather(params=self.position_embeddings, indices=position_ids)
<ide> def _sliding_chunks_query_key_matmul(self, query, key, window_overlap):
<ide> )
<ide> first_chunk_mask = (
<ide> tf.tile(
<del> tf.range(chunks_count + 1)[None, :, None, None],
<add> tf.range(chunks_count + 1, dtype=tf.int64)[None, :, None, None],
<ide> (batch_size * num_heads, 1, window_overlap, window_overlap),
<ide> )
<ide> < 1
<ide> def call(
<ide> training=False,
<ide> ):
<ide>
<add> if input_ids is not None and not isinstance(input_ids, tf.Tensor):
<add> input_ids = tf.convert_to_tensor(input_ids, dtype=tf.int64)
<add> elif input_ids is not None:
<add> input_ids = tf.cast(input_ids, tf.int64)
<add>
<add> if attention_mask is not None and not isinstance(attention_mask, tf.Tensor):
<add> attention_mask = tf.convert_to_tensor(attention_mask, dtype=tf.int64)
<add> elif attention_mask is not None:
<add> attention_mask = tf.cast(attention_mask, tf.int64)
<add>
<add> if global_attention_mask is not None and not isinstance(global_attention_mask, tf.Tensor):
<add> global_attention_mask = tf.convert_to_tensor(global_attention_mask, dtype=tf.int64)
<add> elif global_attention_mask is not None:
<add> global_attention_mask = tf.cast(global_attention_mask, tf.int64)
<add>
<ide> if input_ids is not None and inputs_embeds is not None:
<ide> raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
<ide> elif input_ids is not None:
<ide> def call(
<ide> raise ValueError("You have to specify either input_ids or inputs_embeds")
<ide>
<ide> if attention_mask is None:
<del> attention_mask = tf.fill(input_shape, 1)
<add> attention_mask = tf.cast(tf.fill(input_shape, 1), tf.int64)
<ide>
<ide> if token_type_ids is None:
<del> token_type_ids = tf.fill(input_shape, 0)
<add> token_type_ids = tf.cast(tf.fill(input_shape, 0), tf.int64)
<ide>
<ide> # merge `global_attention_mask` and `attention_mask`
<ide> if global_attention_mask is not None:
<ide> def _pad_to_window_size(
<ide> if inputs_embeds is not None:
<ide>
<ide> def pad_embeddings():
<del> input_ids_padding = tf.fill((batch_size, padding_len), self.pad_token_id)
<add> input_ids_padding = tf.cast(tf.fill((batch_size, padding_len), self.pad_token_id), tf.int64)
<ide> inputs_embeds_padding = self.embeddings(input_ids_padding)
<ide> return tf.concat([inputs_embeds, inputs_embeds_padding], axis=-2)
<ide>
<ide> class TFLongformerPreTrainedModel(TFPreTrainedModel):
<ide>
<ide> @property
<ide> def dummy_inputs(self):
<del> input_ids = tf.convert_to_tensor([[7, 6, 0, 0, 1], [1, 2, 3, 0, 0], [0, 0, 0, 4, 5]])
<add> input_ids = tf.convert_to_tensor([[7, 6, 0, 0, 1], [1, 2, 3, 0, 0], [0, 0, 0, 4, 5]], dtype=tf.int64)
<ide> # make sure global layers are initialized
<del> attention_mask = tf.convert_to_tensor([[1, 1, 0, 0, 1], [1, 1, 1, 0, 0], [1, 0, 0, 1, 1]])
<del> global_attention_mask = tf.convert_to_tensor([[0, 0, 0, 0, 1], [0, 0, 1, 0, 0], [0, 0, 0, 0, 1]])
<add> attention_mask = tf.convert_to_tensor([[1, 1, 0, 0, 1], [1, 1, 1, 0, 0], [1, 0, 0, 1, 1]], dtype=tf.int64)
<add> global_attention_mask = tf.convert_to_tensor(
<add> [[0, 0, 0, 0, 1], [0, 0, 1, 0, 0], [0, 0, 0, 0, 1]], dtype=tf.int64
<add> )
<add> global_attention_mask = tf.convert_to_tensor(
<add> [[0, 0, 0, 0, 1], [0, 0, 1, 0, 0], [0, 0, 0, 0, 1]], dtype=tf.int64
<add> )
<ide> return {
<ide> "input_ids": input_ids,
<ide> "attention_mask": attention_mask,
<ide> def dummy_inputs(self):
<ide> @tf.function(
<ide> input_signature=[
<ide> {
<del> "input_ids": tf.TensorSpec((None, None), tf.int32, name="input_ids"),
<del> "attention_mask": tf.TensorSpec((None, None), tf.int32, name="attention_mask"),
<add> "input_ids": tf.TensorSpec((None, None), tf.int64, name="input_ids"),
<add> "attention_mask": tf.TensorSpec((None, None), tf.int64, name="attention_mask"),
<ide> }
<ide> ]
<ide> )
<ide> def call(
<ide> are not taken into account for computing the loss.
<ide> """
<ide>
<add> if input_ids is not None and not isinstance(input_ids, tf.Tensor):
<add> input_ids = tf.convert_to_tensor(input_ids, dtype=tf.int64)
<add> elif input_ids is not None:
<add> input_ids = tf.cast(input_ids, tf.int64)
<add>
<add> if attention_mask is not None and not isinstance(attention_mask, tf.Tensor):
<add> attention_mask = tf.convert_to_tensor(attention_mask, dtype=tf.int64)
<add> elif attention_mask is not None:
<add> attention_mask = tf.cast(attention_mask, tf.int64)
<add>
<add> if global_attention_mask is not None and not isinstance(global_attention_mask, tf.Tensor):
<add> global_attention_mask = tf.convert_to_tensor(global_attention_mask, dtype=tf.int64)
<add> elif global_attention_mask is not None:
<add> global_attention_mask = tf.cast(global_attention_mask, tf.int64)
<add>
<ide> # set global attention on question tokens
<ide> if global_attention_mask is None and input_ids is not None:
<ide> if shape_list(tf.where(input_ids == self.config.sep_token_id))[0] != 3 * shape_list(input_ids)[0]:
<ide> def call(
<ide> " forward function to avoid this. This is most likely an error. The global attention is disabled"
<ide> " for this forward pass."
<ide> )
<del> global_attention_mask = tf.fill(shape_list(input_ids), value=0)
<add> global_attention_mask = tf.cast(tf.fill(shape_list(input_ids), value=0), tf.int64)
<ide> else:
<ide> logger.info("Initializing global attention on question tokens...")
<ide> # put global attention on all tokens until `config.sep_token_id` is reached
<ide> sep_token_indices = tf.where(input_ids == self.config.sep_token_id)
<del> sep_token_indices = tf.cast(sep_token_indices, dtype=input_ids.dtype)
<add> sep_token_indices = tf.cast(sep_token_indices, dtype=tf.int64)
<ide> global_attention_mask = _compute_global_attention_mask(shape_list(input_ids), sep_token_indices)
<ide>
<ide> outputs = self.longformer(
<ide> def call(
<ide> training: Optional[bool] = False,
<ide> ) -> Union[TFLongformerSequenceClassifierOutput, Tuple[tf.Tensor]]:
<ide>
<add> if input_ids is not None and not isinstance(input_ids, tf.Tensor):
<add> input_ids = tf.convert_to_tensor(input_ids, dtype=tf.int64)
<add> elif input_ids is not None:
<add> input_ids = tf.cast(input_ids, tf.int64)
<add>
<add> if attention_mask is not None and not isinstance(attention_mask, tf.Tensor):
<add> attention_mask = tf.convert_to_tensor(attention_mask, dtype=tf.int64)
<add> elif attention_mask is not None:
<add> attention_mask = tf.cast(attention_mask, tf.int64)
<add>
<add> if global_attention_mask is not None and not isinstance(global_attention_mask, tf.Tensor):
<add> global_attention_mask = tf.convert_to_tensor(global_attention_mask, dtype=tf.int64)
<add> elif global_attention_mask is not None:
<add> global_attention_mask = tf.cast(global_attention_mask, tf.int64)
<add>
<ide> if global_attention_mask is None and input_ids is not None:
<ide> logger.info("Initializing global attention on CLS token...")
<ide> # global attention on cls token
<ide> global_attention_mask = tf.zeros_like(input_ids)
<del> updates = tf.ones(shape_list(input_ids)[0], dtype=tf.int32)
<add> updates = tf.ones(shape_list(input_ids)[0], dtype=tf.int64)
<ide> indices = tf.pad(
<del> tensor=tf.expand_dims(tf.range(shape_list(input_ids)[0]), axis=1),
<add> tensor=tf.expand_dims(tf.range(shape_list(input_ids)[0], dtype=tf.int64), axis=1),
<ide> paddings=[[0, 0], [0, 1]],
<ide> constant_values=0,
<ide> )
<ide> def __init__(self, config, *inputs, **kwargs):
<ide>
<ide> @property
<ide> def dummy_inputs(self):
<del> input_ids = tf.convert_to_tensor(MULTIPLE_CHOICE_DUMMY_INPUTS)
<add> input_ids = tf.convert_to_tensor(MULTIPLE_CHOICE_DUMMY_INPUTS, dtype=tf.int64)
<ide> # make sure global layers are initialized
<del> global_attention_mask = tf.convert_to_tensor([[[0, 0, 0, 1], [0, 0, 0, 1]]] * 2)
<add> global_attention_mask = tf.convert_to_tensor([[[0, 0, 0, 1], [0, 0, 0, 1]]] * 2, dtype=tf.int64)
<ide> return {"input_ids": input_ids, "global_attention_mask": global_attention_mask}
<ide>
<ide> @unpack_inputs
<ide> def call(
<ide> @tf.function(
<ide> input_signature=[
<ide> {
<del> "input_ids": tf.TensorSpec((None, None, None), tf.int32, name="input_ids"),
<del> "attention_mask": tf.TensorSpec((None, None, None), tf.int32, name="attention_mask"),
<add> "input_ids": tf.TensorSpec((None, None, None), tf.int64, name="input_ids"),
<add> "attention_mask": tf.TensorSpec((None, None, None), tf.int64, name="attention_mask"),
<ide> }
<ide> ]
<ide> )
<ide><path>tests/models/led/test_modeling_tf_led.py
<ide> def test_inference_no_head(self):
<ide> expected_slice = tf.convert_to_tensor(
<ide> [[2.3050, 2.8279, 0.6531], [-1.8457, -0.1455, -3.5661], [-1.0186, 0.4586, -2.2043]],
<ide> )
<del> tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=TOLERANCE)
<add> tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=1e-3)
<ide>
<ide> def test_inference_with_head(self):
<ide> model = TFLEDForConditionalGeneration.from_pretrained("allenai/led-base-16384")
<ide> def test_inference_with_head(self):
<ide> expected_slice = tf.convert_to_tensor(
<ide> [[33.6507, 6.4572, 16.8089], [5.8739, -2.4238, 11.2902], [-3.2139, -4.3149, 4.2783]],
<ide> )
<del> tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=TOLERANCE)
<add> tf.debugging.assert_near(output[:, :3, :3], expected_slice, atol=1e-3, rtol=1e-3)
<ide><path>tests/models/longformer/test_modeling_tf_longformer.py
<ide> def create_and_check_attention_mask_determinism(
<ide> ):
<ide> model = TFLongformerModel(config=config)
<ide>
<del> attention_mask = tf.ones(input_ids.shape, dtype=tf.dtypes.int32)
<add> attention_mask = tf.ones(input_ids.shape, dtype=tf.int64)
<ide> output_with_mask = model(input_ids, attention_mask=attention_mask)[0]
<ide> output_without_mask = model(input_ids)[0]
<ide> tf.debugging.assert_near(output_with_mask[0, 0, :5], output_without_mask[0, 0, :5], rtol=1e-4)
<ide> def test_diagonalize(self):
<ide>
<ide> # first row => [0.4983, 2.6918, -0.0071, 1.0492, 0.0000, 0.0000, 0.0000]
<ide> tf.debugging.assert_near(padded_hidden_states[0, 0, 0, :4], chunked_hidden_states[0, 0, 0], rtol=1e-3)
<del> tf.debugging.assert_near(padded_hidden_states[0, 0, 0, 4:], tf.zeros((3,), dtype=tf.dtypes.float32), rtol=1e-3)
<add> tf.debugging.assert_near(padded_hidden_states[0, 0, 0, 4:], tf.zeros((3,), dtype=tf.float32), rtol=1e-3)
<ide>
<ide> # last row => [0.0000, 0.0000, 0.0000, 2.0514, -1.1600, 0.5372, 0.2629]
<ide> tf.debugging.assert_near(padded_hidden_states[0, 0, -1, 3:], chunked_hidden_states[0, 0, -1], rtol=1e-3)
<del> tf.debugging.assert_near(
<del> padded_hidden_states[0, 0, -1, :3], tf.zeros((3,), dtype=tf.dtypes.float32), rtol=1e-3
<del> )
<add> tf.debugging.assert_near(padded_hidden_states[0, 0, -1, :3], tf.zeros((3,), dtype=tf.float32), rtol=1e-3)
<ide>
<ide> def test_pad_and_transpose_last_two_dims(self):
<ide> hidden_states = self._get_hidden_states()
<ide> self.assertEqual(shape_list(hidden_states), [1, 4, 8])
<ide>
<ide> # pad along seq length dim
<del> paddings = tf.constant([[0, 0], [0, 0], [0, 1], [0, 0]], dtype=tf.dtypes.int32)
<add> paddings = tf.constant([[0, 0], [0, 0], [0, 1], [0, 0]], dtype=tf.int64)
<ide>
<ide> hidden_states = TFLongformerSelfAttention._chunk(hidden_states, window_overlap=2)
<ide> padded_hidden_states = TFLongformerSelfAttention._pad_and_transpose_last_two_dims(hidden_states, paddings)
<ide> self.assertTrue(shape_list(padded_hidden_states) == [1, 1, 8, 5])
<ide>
<del> expected_added_dim = tf.zeros((5,), dtype=tf.dtypes.float32)
<add> expected_added_dim = tf.zeros((5,), dtype=tf.float32)
<ide> tf.debugging.assert_near(expected_added_dim, padded_hidden_states[0, 0, -1, :], rtol=1e-6)
<ide> tf.debugging.assert_near(
<ide> hidden_states[0, 0, -1, :], tf.reshape(padded_hidden_states, (1, -1))[0, 24:32], rtol=1e-6
<ide> def test_mask_invalid_locations(self):
<ide> hid_states_3 = TFLongformerSelfAttention._mask_invalid_locations(hidden_states[:, :, :, :3], 2)
<ide> hid_states_4 = TFLongformerSelfAttention._mask_invalid_locations(hidden_states[:, :, 2:, :], 2)
<ide>
<del> self.assertTrue(tf.math.reduce_sum(tf.cast(tf.math.is_inf(hid_states_1), tf.dtypes.int32)) == 8)
<del> self.assertTrue(tf.math.reduce_sum(tf.cast(tf.math.is_inf(hid_states_2), tf.dtypes.int32)) == 24)
<del> self.assertTrue(tf.math.reduce_sum(tf.cast(tf.math.is_inf(hid_states_3), tf.dtypes.int32)) == 24)
<del> self.assertTrue(tf.math.reduce_sum(tf.cast(tf.math.is_inf(hid_states_4), tf.dtypes.int32)) == 12)
<add> self.assertTrue(tf.math.reduce_sum(tf.cast(tf.math.is_inf(hid_states_1), tf.int64)) == 8)
<add> self.assertTrue(tf.math.reduce_sum(tf.cast(tf.math.is_inf(hid_states_2), tf.int64)) == 24)
<add> self.assertTrue(tf.math.reduce_sum(tf.cast(tf.math.is_inf(hid_states_3), tf.int64)) == 24)
<add> self.assertTrue(tf.math.reduce_sum(tf.cast(tf.math.is_inf(hid_states_4), tf.int64)) == 12)
<ide>
<ide> def test_chunk(self):
<ide> hidden_states = self._get_hidden_states()
<ide> def test_chunk(self):
<ide> chunked_hidden_states = TFLongformerSelfAttention._chunk(hidden_states, window_overlap=2)
<ide>
<ide> # expected slices across chunk and seq length dim
<del> expected_slice_along_seq_length = tf.convert_to_tensor([0.4983, -0.7584, -1.6944], dtype=tf.dtypes.float32)
<del> expected_slice_along_chunk = tf.convert_to_tensor([0.4983, -1.8348, -0.7584, 2.0514], dtype=tf.dtypes.float32)
<add> expected_slice_along_seq_length = tf.convert_to_tensor([0.4983, -0.7584, -1.6944], dtype=tf.float32)
<add> expected_slice_along_chunk = tf.convert_to_tensor([0.4983, -1.8348, -0.7584, 2.0514], dtype=tf.float32)
<ide>
<ide> self.assertTrue(shape_list(chunked_hidden_states) == [1, 3, 4, 4])
<del> tf.debugging.assert_near(chunked_hidden_states[0, :, 0, 0], expected_slice_along_seq_length, rtol=1e-3)
<del> tf.debugging.assert_near(chunked_hidden_states[0, 0, :, 0], expected_slice_along_chunk, rtol=1e-3)
<add> tf.debugging.assert_near(
<add> chunked_hidden_states[0, :, 0, 0], expected_slice_along_seq_length, rtol=1e-3, atol=1e-4
<add> )
<add> tf.debugging.assert_near(chunked_hidden_states[0, 0, :, 0], expected_slice_along_chunk, rtol=1e-3, atol=1e-4)
<ide>
<ide> def test_layer_local_attn(self):
<ide> model = TFLongformerModel.from_pretrained("patrickvonplaten/longformer-random-tiny")
<ide> layer = model.longformer.encoder.layer[0].attention.self_attention
<ide> hidden_states = self._get_hidden_states()
<ide> batch_size, seq_length, hidden_size = hidden_states.shape
<ide>
<del> attention_mask = tf.zeros((batch_size, seq_length), dtype=tf.dtypes.float32)
<add> attention_mask = tf.zeros((batch_size, seq_length), dtype=tf.float32)
<ide> is_index_global_attn = tf.math.greater(attention_mask, 1)
<ide> is_global_attn = tf.math.reduce_any(is_index_global_attn)
<ide>
<ide> def test_layer_local_attn(self):
<ide> )[0]
<ide>
<ide> expected_slice = tf.convert_to_tensor(
<del> [0.00188, 0.012196, -0.017051, -0.025571, -0.02996, 0.017297, -0.011521, 0.004848], dtype=tf.dtypes.float32
<add> [0.00188, 0.012196, -0.017051, -0.025571, -0.02996, 0.017297, -0.011521, 0.004848], dtype=tf.float32
<ide> )
<ide>
<ide> self.assertEqual(output_hidden_states.shape, (1, 4, 8))
<del> tf.debugging.assert_near(output_hidden_states[0, 1], expected_slice, rtol=1e-3)
<add> tf.debugging.assert_near(output_hidden_states[0, 1], expected_slice, rtol=1e-3, atol=1e-4)
<ide>
<ide> def test_layer_global_attn(self):
<ide> model = TFLongformerModel.from_pretrained("patrickvonplaten/longformer-random-tiny")
<ide> def test_layer_global_attn(self):
<ide> batch_size, seq_length, hidden_size = hidden_states.shape
<ide>
<ide> # create attn mask
<del> attention_mask_1 = tf.zeros((1, 1, 1, seq_length), dtype=tf.dtypes.float32)
<del> attention_mask_2 = tf.zeros((1, 1, 1, seq_length), dtype=tf.dtypes.float32)
<add> attention_mask_1 = tf.zeros((1, 1, 1, seq_length), dtype=tf.float32)
<add> attention_mask_2 = tf.zeros((1, 1, 1, seq_length), dtype=tf.float32)
<ide>
<ide> attention_mask_1 = tf.where(tf.range(4)[None, :, None, None] > 1, 10000.0, attention_mask_1)
<ide> attention_mask_1 = tf.where(tf.range(4)[None, :, None, None] > 2, -10000.0, attention_mask_1)
<ide> def test_layer_global_attn(self):
<ide>
<ide> self.assertEqual(output_hidden_states.shape, (2, 4, 8))
<ide> expected_slice_0 = tf.convert_to_tensor(
<del> [-0.06508, -0.039306, 0.030934, -0.03417, -0.00656, -0.01553, -0.02088, -0.04938], dtype=tf.dtypes.float32
<add> [-0.06508, -0.039306, 0.030934, -0.03417, -0.00656, -0.01553, -0.02088, -0.04938], dtype=tf.float32
<ide> )
<ide>
<ide> expected_slice_1 = tf.convert_to_tensor(
<del> [-0.04055, -0.038399, 0.0396, -0.03735, -0.03415, 0.01357, 0.00145, -0.05709], dtype=tf.dtypes.float32
<add> [-0.04055, -0.038399, 0.0396, -0.03735, -0.03415, 0.01357, 0.00145, -0.05709], dtype=tf.float32
<ide> )
<ide>
<del> tf.debugging.assert_near(output_hidden_states[0, 2], expected_slice_0, rtol=1e-3)
<del> tf.debugging.assert_near(output_hidden_states[1, -2], expected_slice_1, rtol=1e-3)
<add> tf.debugging.assert_near(output_hidden_states[0, 2], expected_slice_0, rtol=1e-3, atol=1e-4)
<add> tf.debugging.assert_near(output_hidden_states[1, -2], expected_slice_1, rtol=1e-3, atol=1e-4)
<ide>
<ide> def test_layer_attn_probs(self):
<ide> model = TFLongformerModel.from_pretrained("patrickvonplaten/longformer-random-tiny")
<ide> def test_layer_attn_probs(self):
<ide> batch_size, seq_length, hidden_size = hidden_states.shape
<ide>
<ide> # create attn mask
<del> attention_mask_1 = tf.zeros((1, 1, 1, seq_length), dtype=tf.dtypes.float32)
<del> attention_mask_2 = tf.zeros((1, 1, 1, seq_length), dtype=tf.dtypes.float32)
<add> attention_mask_1 = tf.zeros((1, 1, 1, seq_length), dtype=tf.float32)
<add> attention_mask_2 = tf.zeros((1, 1, 1, seq_length), dtype=tf.float32)
<ide>
<ide> attention_mask_1 = tf.where(tf.range(4)[None, :, None, None] > 1, 10000.0, attention_mask_1)
<ide> attention_mask_1 = tf.where(tf.range(4)[None, :, None, None] > 2, -10000.0, attention_mask_1)
<ide> def test_layer_attn_probs(self):
<ide>
<ide> tf.debugging.assert_near(
<ide> local_attentions[0, 0, 0, :],
<del> tf.convert_to_tensor(
<del> [0.3328, 0.0000, 0.0000, 0.0000, 0.0000, 0.3355, 0.3318, 0.0000], dtype=tf.dtypes.float32
<del> ),
<add> tf.convert_to_tensor([0.3328, 0.0000, 0.0000, 0.0000, 0.0000, 0.3355, 0.3318, 0.0000], dtype=tf.float32),
<ide> rtol=1e-3,
<add> atol=1e-4,
<ide> )
<ide>
<ide> tf.debugging.assert_near(
<ide> local_attentions[1, 0, 0, :],
<del> tf.convert_to_tensor(
<del> [0.2492, 0.2502, 0.2502, 0.0000, 0.0000, 0.2505, 0.0000, 0.0000], dtype=tf.dtypes.float32
<del> ),
<add> tf.convert_to_tensor([0.2492, 0.2502, 0.2502, 0.0000, 0.0000, 0.2505, 0.0000, 0.0000], dtype=tf.float32),
<ide> rtol=1e-3,
<add> atol=1e-4,
<ide> )
<ide>
<ide> # All the global attention weights must sum to 1.
<ide> self.assertTrue((tf.math.abs(tf.math.reduce_sum(global_attentions, axis=-1) - 1) < 1e-6).numpy().tolist())
<ide>
<ide> tf.debugging.assert_near(
<ide> global_attentions[0, 0, 1, :],
<del> tf.convert_to_tensor([0.2500, 0.2500, 0.2500, 0.2500], dtype=tf.dtypes.float32),
<add> tf.convert_to_tensor([0.2500, 0.2500, 0.2500, 0.2500], dtype=tf.float32),
<ide> rtol=1e-3,
<add> atol=1e-4,
<ide> )
<ide> tf.debugging.assert_near(
<ide> global_attentions[1, 0, 0, :],
<del> tf.convert_to_tensor([0.2497, 0.2500, 0.2499, 0.2504], dtype=tf.dtypes.float32),
<add> tf.convert_to_tensor([0.2497, 0.2500, 0.2499, 0.2504], dtype=tf.float32),
<ide> rtol=1e-3,
<add> atol=1e-4,
<ide> )
<ide>
<ide> @slow
<ide> def test_inference_no_head(self):
<ide> model = TFLongformerModel.from_pretrained("allenai/longformer-base-4096")
<ide>
<ide> # 'Hello world!'
<del> input_ids = tf.convert_to_tensor([[0, 20920, 232, 328, 1437, 2]], dtype=tf.dtypes.int32)
<del> attention_mask = tf.ones(shape_list(input_ids), dtype=tf.dtypes.int32)
<add> input_ids = tf.convert_to_tensor([[0, 20920, 232, 328, 1437, 2]], dtype=tf.int64)
<add> attention_mask = tf.ones(shape_list(input_ids), dtype=tf.int64)
<ide>
<ide> output = model(input_ids, attention_mask=attention_mask)[0]
<ide> output_without_mask = model(input_ids)[0]
<ide>
<del> expected_output_slice = tf.convert_to_tensor(
<del> [0.0549, 0.1087, -0.1119, -0.0368, 0.0250], dtype=tf.dtypes.float32
<del> )
<add> expected_output_slice = tf.convert_to_tensor([0.0549, 0.1087, -0.1119, -0.0368, 0.0250], dtype=tf.float32)
<ide>
<del> tf.debugging.assert_near(output[0, 0, -5:], expected_output_slice, rtol=1e-3)
<del> tf.debugging.assert_near(output_without_mask[0, 0, -5:], expected_output_slice, rtol=1e-3)
<add> tf.debugging.assert_near(output[0, 0, -5:], expected_output_slice, rtol=1e-3, atol=1e-4)
<add> tf.debugging.assert_near(output_without_mask[0, 0, -5:], expected_output_slice, rtol=1e-3, atol=1e-4)
<ide>
<ide> @slow
<ide> def test_inference_no_head_long(self):
<ide> model = TFLongformerModel.from_pretrained("allenai/longformer-base-4096")
<ide>
<ide> # 'Hello world! ' repeated 1000 times
<del> input_ids = tf.convert_to_tensor([[0] + [20920, 232, 328, 1437] * 1000 + [2]], dtype=tf.dtypes.int32)
<add> input_ids = tf.convert_to_tensor([[0] + [20920, 232, 328, 1437] * 1000 + [2]], dtype=tf.int64)
<ide>
<del> attention_mask = tf.ones(shape_list(input_ids), dtype=tf.dtypes.int32)
<del> global_attention_mask = tf.zeros(shape_list(input_ids), dtype=tf.dtypes.int32)
<add> attention_mask = tf.ones(shape_list(input_ids), dtype=tf.int64)
<add> global_attention_mask = tf.zeros(shape_list(input_ids), dtype=tf.int64)
<ide> # Set global attention on a few random positions
<ide> global_attention_mask = tf.tensor_scatter_nd_update(
<del> global_attention_mask, tf.constant([[0, 1], [0, 4], [0, 21]]), tf.constant([1, 1, 1])
<add> global_attention_mask,
<add> tf.constant([[0, 1], [0, 4], [0, 21]], dtype=tf.int64),
<add> tf.constant([1, 1, 1], dtype=tf.int64),
<ide> )
<ide>
<ide> output = model(input_ids, attention_mask=attention_mask, global_attention_mask=global_attention_mask)[0]
<ide> def test_inference_no_head_long(self):
<ide> expected_output_mean = tf.constant(0.024267)
<ide>
<ide> # assert close
<del> tf.debugging.assert_near(tf.reduce_sum(output), expected_output_sum, rtol=1e-4)
<del> tf.debugging.assert_near(tf.reduce_mean(output), expected_output_mean, rtol=1e-4)
<add> tf.debugging.assert_near(tf.reduce_sum(output), expected_output_sum, rtol=1e-4, atol=1e-4)
<add> tf.debugging.assert_near(tf.reduce_mean(output), expected_output_mean, rtol=1e-4, atol=1e-4)
<ide>
<ide> @slow
<ide> def test_inference_masked_lm_long(self):
<ide> model = TFLongformerForMaskedLM.from_pretrained("allenai/longformer-base-4096")
<ide>
<ide> # 'Hello world! ' repeated 1000 times
<del> input_ids = tf.convert_to_tensor([[0] + [20920, 232, 328, 1437] * 1000 + [2]], dtype=tf.dtypes.int32)
<add> input_ids = tf.convert_to_tensor([[0] + [20920, 232, 328, 1437] * 1000 + [2]], dtype=tf.int64)
<ide>
<ide> output = model(input_ids, labels=input_ids)
<ide> loss = output.loss
<ide> def test_inference_masked_lm_long(self):
<ide> expected_prediction_scores_mean = tf.constant(-3.03477)
<ide>
<ide> # assert close
<del> tf.debugging.assert_near(tf.reduce_mean(loss), expected_loss, rtol=1e-4)
<del> tf.debugging.assert_near(tf.reduce_sum(prediction_scores), expected_prediction_scores_sum, rtol=1e-4)
<del> tf.debugging.assert_near(tf.reduce_mean(prediction_scores), expected_prediction_scores_mean, rtol=1e-4)
<add> tf.debugging.assert_near(tf.reduce_mean(loss), expected_loss, rtol=1e-4, atol=1e-4)
<add> tf.debugging.assert_near(
<add> tf.reduce_sum(prediction_scores), expected_prediction_scores_sum, rtol=1e-4, atol=1e-4
<add> )
<add> tf.debugging.assert_near(
<add> tf.reduce_mean(prediction_scores), expected_prediction_scores_mean, rtol=1e-4, atol=1e-4
<add> )
<ide>
<ide> @slow
<ide> def test_inference_masked_lm(self):
| 4
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.