content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
|---|---|---|---|---|---|
Javascript
|
Javascript
|
fix missing restore of resourceresolvedata
|
877cb13806cdf0eec0bd8d668f1afd238b790047
|
<ide><path>lib/NormalModule.js
<ide> class NormalModule extends Module {
<ide> this.generator = m.generator;
<ide> this.generatorOptions = m.generatorOptions;
<ide> this.resource = m.resource;
<add> this.resourceResolveData = m.resourceResolveData;
<ide> this.context = m.context;
<ide> this.matchResource = m.matchResource;
<ide> this.loaders = m.loaders;
<ide><path>test/configCases/asset-modules/data-url/index.js
<ide> import png from "../_images/file.png";
<ide> import svg from "../_images/file.svg";
<ide> import jpg from "../_images/file.jpg";
<add>import dataSvg from "data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCA2MDAgNjAwIj48dGl0bGU+aWNvbi1zcXVhcmUtc21hbGw8L3RpdGxlPjxwYXRoIGZpbGw9IiNGRkYiIGQ9Ik0zMDAgLjFMNTY1IDE1MHYyOTkuOUwzMDAgNTk5LjggMzUgNDQ5LjlWMTUweiIvPjxwYXRoIGZpbGw9IiM4RUQ2RkIiIGQ9Ik01MTcuNyA0MzkuNUwzMDguOCA1NTcuOHYtOTJMNDM5IDM5NC4xbDc4LjcgNDUuNHptMTQuMy0xMi45VjE3OS40bC03Ni40IDQ0LjF2MTU5bDc2LjQgNDQuMXpNODEuNSA0MzkuNWwyMDguOSAxMTguMnYtOTJsLTEzMC4yLTcxLjYtNzguNyA0NS40em0tMTQuMy0xMi45VjE3OS40bDc2LjQgNDQuMXYxNTlsLTc2LjQgNDQuMXptOC45LTI2My4yTDI5MC40IDQyLjJ2ODlsLTEzNy4zIDc1LjUtMS4xLjYtNzUuOS00My45em00NDYuOSAwTDMwOC44IDQyLjJ2ODlMNDQ2IDIwNi44bDEuMS42IDc1LjktNDR6Ii8+PHBhdGggZmlsbD0iIzFDNzhDMCIgZD0iTTI5MC40IDQ0NC44TDE2MiAzNzQuMVYyMzQuMmwxMjguNCA3NC4xdjEzNi41em0xOC40IDBsMTI4LjQtNzAuNnYtMTQwbC0xMjguNCA3NC4xdjEzNi41ek0yOTkuNiAzMDN6bS0xMjktODVsMTI5LTcwLjlMNDI4LjUgMjE4bC0xMjguOSA3NC40LTEyOS03NC40eiIvPjwvc3ZnPgo=";
<add>const urlSvg = new URL(
<add> "data:image/svg;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHZpZXdCb3g9IjAgMCA2MDAgNjAwIj48dGl0bGU+aWNvbi1zcXVhcmUtc21hbGw8L3RpdGxlPjxwYXRoIGZpbGw9IiNGRkYiIGQ9Ik0zMDAgLjFMNTY1IDE1MHYyOTkuOUwzMDAgNTk5LjggMzUgNDQ5LjlWMTUweiIvPjxwYXRoIGZpbGw9IiM4RUQ2RkIiIGQ9Ik01MTcuNyA0MzkuNUwzMDguOCA1NTcuOHYtOTJMNDM5IDM5NC4xbDc4LjcgNDUuNHptMTQuMy0xMi45VjE3OS40bC03Ni40IDQ0LjF2MTU5bDc2LjQgNDQuMXpNODEuNSA0MzkuNWwyMDguOSAxMTguMnYtOTJsLTEzMC4yLTcxLjYtNzguNyA0NS40em0tMTQuMy0xMi45VjE3OS40bDc2LjQgNDQuMXYxNTlsLTc2LjQgNDQuMXptOC45LTI2My4yTDI5MC40IDQyLjJ2ODlsLTEzNy4zIDc1LjUtMS4xLjYtNzUuOS00My45em00NDYuOSAwTDMwOC44IDQyLjJ2ODlMNDQ2IDIwNi44bDEuMS42IDc1LjktNDR6Ii8+PHBhdGggZmlsbD0iIzFDNzhDMCIgZD0iTTI5MC40IDQ0NC44TDE2MiAzNzQuMVYyMzQuMmwxMjguNCA3NC4xdjEzNi41em0xOC40IDBsMTI4LjQtNzAuNnYtMTQwbC0xMjguNCA3NC4xdjEzNi41ek0yOTkuNiAzMDN6bS0xMjktODVsMTI5LTcwLjlMNDI4LjUgMjE4bC0xMjguOSA3NC40LTEyOS03NC40eiIvPjwvc3ZnPgo="
<add>);
<ide>
<ide> it("should generate various data-url types", () => {
<ide> expect(png).toContain("data:image/png;base64,");
<ide> expect(svg).toContain("data:image/svg+xml;base64");
<ide> expect(jpg).toContain("data:image/jpeg;base64,");
<add> expect(dataSvg).toContain("data:image/svg+xml;base64,");
<add> expect(urlSvg.href).toContain("data:image/svg;base64,");
<ide> });
<ide><path>test/configCases/asset-modules/data-url/webpack.config.js
<ide> module.exports = {
<ide> test: /\.(png|svg)$/,
<ide> type: "asset/inline"
<ide> },
<add> {
<add> mimetype: "image/svg+xml",
<add> type: "asset/inline"
<add> },
<ide> {
<ide> test: /\.jpg$/,
<ide> type: "asset",
| 3
|
Javascript
|
Javascript
|
use window.execscript instead of window.eval on ie
|
e54909f5efa37cd4013d7f21a77817ea459cde64
|
<ide><path>docs/src/templates/doc_widgets.js
<ide> element.append(tabs);
<ide>
<ide> var script = (exampleSrc.match(/<script[^\>]*>([\s\S]*)<\/script>/) || [])[1] || '';
<add>
<ide> try {
<del> window.eval(script);
<add> if (window.execScript) { // IE
<add> window.execScript(script || '"stupid IE!"'); // IE complains when evaling empty string
<add> } else {
<add> window.eval(script);
<add> }
<ide> } catch (e) {
<ide> alert(e);
<ide> }
| 1
|
Java
|
Java
|
fix javadoc links
|
20d0becbc62bee7e9459be1186012abd3781d4f5
|
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/client/standard/AnnotatedEndpointConnectionManager.java
<ide> /**
<ide> * WebSocket {@link ConnectionManagerSupport connection manager} that connects
<ide> * to the server via {@link WebSocketContainer} and handles the session with an
<del> * {@link javax.websocket.ClientEndpoint @ClientEndpoint} endpoint.
<add> * {@link jakarta.websocket.ClientEndpoint @ClientEndpoint} endpoint.
<ide> *
<ide> * @author Rossen Stoyanchev
<ide> * @since 4.0
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/config/annotation/SockJsServiceRegistration.java
<ide> public SockJsServiceRegistration setTaskScheduler(TaskScheduler scheduler) {
<ide> * server. Since the iframe needs to load the SockJS javascript client library,
<ide> * this property allows specifying where to load it from.
<ide> * <p>By default this is set to point to
<del> * "https://cdn.jsdelivr.net/sockjs/0.3.4/sockjs.min.js". However it can
<add> * "<a href="https://cdn.jsdelivr.net/sockjs/1.0.0/sockjs.min.js">sockjs.min.js</a>". However, it can
<ide> * also be set to point to a URL served by the application.
<ide> * <p>Note that it's possible to specify a relative URL in which case the URL
<ide> * must be relative to the iframe URL. For example assuming a SockJS endpoint
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/sockjs/frame/AbstractSockJsMessageCodec.java
<ide> public String encode(String... messages) {
<ide> }
<ide>
<ide> /**
<del> * Apply standard JSON string quoting (see https://www.json.org/).
<add> * Apply standard JSON string quoting (see <a href="https://www.json.org/">json.org</a>).
<ide> */
<ide> protected abstract char[] applyJsonQuoting(String content);
<ide>
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/sockjs/support/AbstractSockJsService.java
<ide> public String getName() {
<ide> * server. Since the iframe needs to load the SockJS javascript client library,
<ide> * this property allows specifying where to load it from.
<ide> * <p>By default this is set to point to
<del> * "https://cdn.jsdelivr.net/sockjs/1.0.0/sockjs.min.js".
<add> * "<a href="https://cdn.jsdelivr.net/sockjs/1.0.0/sockjs.min.js">sockjs.min.js</a>".
<ide> * However, it can also be set to point to a URL served by the application.
<ide> * <p>Note that it's possible to specify a relative URL in which case the URL
<ide> * must be relative to the iframe URL. For example assuming a SockJS endpoint
| 4
|
PHP
|
PHP
|
apply fixes from styleci
|
c1a33b865d009b2aec4d8b174db30d4bb68e95a0
|
<ide><path>src/Illuminate/Foundation/Exceptions/Handler.php
<ide> protected function prepareJsonResponse($request, Exception $e)
<ide> 'line' => $e->getLine(),
<ide> 'trace' => $e->getTrace(),
<ide> ], JSON_PRETTY_PRINT | JSON_UNESCAPED_SLASHES), $status, array_merge($headers, [
<del> 'Content-Type' => 'application/json'
<add> 'Content-Type' => 'application/json',
<ide> ]));
<ide> }
<ide>
| 1
|
Mixed
|
Javascript
|
expose offset parameters for toastandroid
|
546a43bda04bd5f24d24ebefb86cf790f8a17e57
|
<ide><path>Libraries/Components/ToastAndroid/ToastAndroid.android.js
<ide> * of patent rights can be found in the PATENTS file in the same directory.
<ide> *
<ide> * @providesModule ToastAndroid
<add> * @flow
<ide> */
<ide>
<ide> 'use strict';
<ide> var RCTToastAndroid = require('NativeModules').ToastAndroid;
<ide> * There is also a function `showWithGravity` to specify the layout gravity. May be
<ide> * ToastAndroid.TOP, ToastAndroid.BOTTOM, ToastAndroid.CENTER.
<ide> *
<add> * The 'showWithGravityWithOffset' function adds on the ability to specify offset
<add> * These offset values will translate to pixels.
<add> *
<ide> * Basic usage:
<ide> * ```javascript
<ide> * ToastAndroid.show('A pikachu appeared nearby !', ToastAndroid.SHORT);
<ide> * ToastAndroid.showWithGravity('All Your Base Are Belong To Us', ToastAndroid.SHORT, ToastAndroid.CENTER);
<add> * ToastAndroid.showWithGravityAndOffset('A wild toast appeared!', ToastAndroid.LONG, ToastAndroid.BOTTOM, 25, 50);
<ide> * ```
<ide> */
<ide>
<ide> var ToastAndroid = {
<ide> ): void {
<ide> RCTToastAndroid.showWithGravity(message, duration, gravity);
<ide> },
<add>
<add> showWithGravityAndOffset: function (
<add> message: string,
<add> duration: number,
<add> gravity: number,
<add> xOffset: number,
<add> yOffset: number,
<add> ): void {
<add> RCTToastAndroid.showWithGravityAndOffset(message, duration, gravity, xOffset, yOffset);
<add> },
<ide> };
<ide>
<ide> module.exports = ToastAndroid;
<ide><path>RNTester/js/ToastAndroidExample.android.js
<ide> class ToastExample extends React.Component {
<ide> <Text style={styles.text}>Click me.</Text>
<ide> </TouchableWithoutFeedback>
<ide> </RNTesterBlock>
<add> <RNTesterBlock title="Toast with x offset">
<add> <TouchableWithoutFeedback
<add> onPress={() =>
<add> ToastAndroid.showWithGravityAndOffset(
<add> 'This is a toast with x offset',
<add> ToastAndroid.SHORT,
<add> ToastAndroid.CENTER,
<add> 50,
<add> 0,
<add> )
<add> }>
<add> <Text style={styles.text}>Click me.</Text>
<add> </TouchableWithoutFeedback>
<add> </RNTesterBlock>
<add> <RNTesterBlock title="Toast with y offset">
<add> <TouchableWithoutFeedback
<add> onPress={() =>
<add> ToastAndroid.showWithGravityAndOffset(
<add> 'This is a toast with y offset',
<add> ToastAndroid.SHORT,
<add> ToastAndroid.BOTTOM,
<add> 0,
<add> 50,
<add> )
<add> }>
<add> <Text style={styles.text}>Click me.</Text>
<add> </TouchableWithoutFeedback>
<add> </RNTesterBlock>
<ide> </RNTesterPage>
<ide> );
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/react/modules/toast/ToastModule.java
<ide>
<ide> import android.view.Gravity;
<ide> import android.widget.Toast;
<del>
<ide> import com.facebook.react.bridge.NativeModule;
<ide> import com.facebook.react.bridge.ReactApplicationContext;
<ide> import com.facebook.react.bridge.ReactContextBaseJavaModule;
<ide> import com.facebook.react.bridge.ReactMethod;
<del>import com.facebook.react.common.MapBuilder;
<ide> import com.facebook.react.bridge.UiThreadUtil;
<add>import com.facebook.react.common.MapBuilder;
<ide> import com.facebook.react.module.annotations.ReactModule;
<del>
<ide> import java.util.Map;
<ide>
<ide> /**
<ide> public void run() {
<ide> }
<ide> });
<ide> }
<add>
<add> @ReactMethod
<add> public void showWithGravityAndOffset(
<add> final String message,
<add> final int duration,
<add> final int gravity,
<add> final int xOffset,
<add> final int yOffset) {
<add> UiThreadUtil.runOnUiThread(
<add> new Runnable() {
<add> @Override
<add> public void run() {
<add> Toast toast = Toast.makeText(getReactApplicationContext(), message, duration);
<add> toast.setGravity(gravity, xOffset, yOffset);
<add> toast.show();
<add> }
<add> });
<add> }
<ide> }
| 3
|
Javascript
|
Javascript
|
extract batchprocessor module
|
f8b37b7d241c610fbea75b8cd632eff254a3f77e
|
<ide><path>packager/react-packager/src/lib/BatchProcessor.js
<add>/**
<add> * Copyright (c) 2016-present, Facebook, Inc.
<add> * All rights reserved.
<add> *
<add> * This source code is licensed under the BSD-style license found in the
<add> * LICENSE file in the root directory of this source tree. An additional grant
<add> * of patent rights can be found in the PATENTS file in the same directory.
<add> *
<add> * @flow
<add> */
<add>
<add>'use strict';
<add>
<add>const invariant = require('fbjs/lib/invariant');
<add>
<add>type ProcessBatch<TItem, TResult> = (
<add> batch: Array<TItem>,
<add> callback: (error?: Error, orderedResults?: Array<TResult>) => mixed,
<add>) => mixed;
<add>
<add>type BatchProcessorOptions = {
<add> maximumDelayMs: number,
<add> maximumItems: number,
<add> concurrency: number,
<add>};
<add>
<add>/**
<add> * We batch items together trying to minimize their processing, for example as
<add> * network queries. For that we wait a small moment before processing a batch.
<add> * We limit also the number of items we try to process in a single batch so that
<add> * if we have many items pending in a short amount of time, we can start
<add> * processing right away.
<add> */
<add>class BatchProcessor<TItem, TResult> {
<add>
<add> _options: BatchProcessorOptions;
<add> _processBatch: ProcessBatch<TItem, TResult>;
<add> _queue: Array<{
<add> item: TItem,
<add> callback: (error?: Error, result?: TResult) => mixed,
<add> }>;
<add> _timeoutHandle: ?number;
<add> _currentProcessCount: number;
<add>
<add> constructor(
<add> options: BatchProcessorOptions,
<add> processBatch: ProcessBatch<TItem, TResult>,
<add> ) {
<add> this._options = options;
<add> this._processBatch = processBatch;
<add> this._queue = [];
<add> this._timeoutHandle = null;
<add> this._currentProcessCount = 0;
<add> (this: any)._processQueue = this._processQueue.bind(this);
<add> }
<add>
<add> _processQueue() {
<add> this._timeoutHandle = null;
<add> while (
<add> this._queue.length > 0 &&
<add> this._currentProcessCount < this._options.concurrency
<add> ) {
<add> this._currentProcessCount++;
<add> const jobs = this._queue.splice(0, this._options.maximumItems);
<add> const items = jobs.map(job => job.item);
<add> this._processBatch(items, (error, results) => {
<add> invariant(
<add> results == null || results.length === items.length,
<add> 'Not enough results returned.',
<add> );
<add> for (let i = 0; i < items.length; ++i) {
<add> jobs[i].callback(error, results && results[i]);
<add> }
<add> this._currentProcessCount--;
<add> this._processQueueOnceReady();
<add> });
<add> }
<add> }
<add>
<add> _processQueueOnceReady() {
<add> if (this._queue.length >= this._options.maximumItems) {
<add> clearTimeout(this._timeoutHandle);
<add> process.nextTick(this._processQueue);
<add> return;
<add> }
<add> if (this._timeoutHandle == null) {
<add> this._timeoutHandle = setTimeout(
<add> this._processQueue,
<add> this._options.maximumDelayMs,
<add> );
<add> }
<add> }
<add>
<add> queue(
<add> item: TItem,
<add> callback: (error?: Error, result?: TResult) => mixed,
<add> ) {
<add> this._queue.push({item, callback});
<add> this._processQueueOnceReady();
<add> }
<add>
<add>}
<add>
<add>module.exports = BatchProcessor;
<ide><path>packager/react-packager/src/lib/GlobalTransformCache.js
<ide>
<ide> 'use strict';
<ide>
<add>const BatchProcessor = require('./BatchProcessor');
<add>
<ide> const crypto = require('crypto');
<ide> const imurmurhash = require('imurmurhash');
<del>const invariant = require('fbjs/lib/invariant');
<ide> const jsonStableStringify = require('json-stable-stringify');
<ide> const path = require('path');
<ide> const request = require('request');
<ide> type FetchProps = {
<ide> type FetchCallback = (error?: Error, result?: ?CachedResult) => mixed;
<ide> type FetchURICallback = (error?: Error, resultURI?: ?string) => mixed;
<ide>
<del>type ProcessBatch<TItem, TResult> = (
<del> batch: Array<TItem>,
<del> callback: (error?: Error, orderedResults?: Array<TResult>) => mixed,
<del>) => mixed;
<del>type BatchProcessorOptions = {
<del> maximumDelayMs: number,
<del> maximumItems: number,
<del> concurrency: number,
<del>};
<del>
<del>/**
<del> * We batch keys together trying to make a smaller amount of queries. For that
<del> * we wait a small moment before starting to fetch. We limit also the number of
<del> * keys we try to fetch at once, so if we already have that many keys pending,
<del> * we can start fetching right away.
<del> */
<del>class BatchProcessor<TItem, TResult> {
<del>
<del> _options: BatchProcessorOptions;
<del> _processBatch: ProcessBatch<TItem, TResult>;
<del> _queue: Array<{
<del> item: TItem,
<del> callback: (error?: Error, result?: TResult) => mixed,
<del> }>;
<del> _timeoutHandle: ?number;
<del> _currentProcessCount: number;
<del>
<del> constructor(
<del> options: BatchProcessorOptions,
<del> processBatch: ProcessBatch<TItem, TResult>,
<del> ) {
<del> this._options = options;
<del> this._processBatch = processBatch;
<del> this._queue = [];
<del> this._timeoutHandle = null;
<del> this._currentProcessCount = 0;
<del> (this: any)._processQueue = this._processQueue.bind(this);
<del> }
<del>
<del> _processQueue() {
<del> this._timeoutHandle = null;
<del> while (
<del> this._queue.length > 0 &&
<del> this._currentProcessCount < this._options.concurrency
<del> ) {
<del> this._currentProcessCount++;
<del> const jobs = this._queue.splice(0, this._options.maximumItems);
<del> const items = jobs.map(job => job.item);
<del> this._processBatch(items, (error, results) => {
<del> invariant(
<del> results == null || results.length === items.length,
<del> 'Not enough results returned.',
<del> );
<del> for (let i = 0; i < items.length; ++i) {
<del> jobs[i].callback(error, results && results[i]);
<del> }
<del> this._currentProcessCount--;
<del> this._processQueueOnceReady();
<del> });
<del> }
<del> }
<del>
<del> _processQueueOnceReady() {
<del> if (this._queue.length >= this._options.maximumItems) {
<del> clearTimeout(this._timeoutHandle);
<del> process.nextTick(this._processQueue);
<del> return;
<del> }
<del> if (this._timeoutHandle == null) {
<del> this._timeoutHandle = setTimeout(
<del> this._processQueue,
<del> this._options.maximumDelayMs,
<del> );
<del> }
<del> }
<del>
<del> queue(
<del> item: TItem,
<del> callback: (error?: Error, result?: TResult) => mixed,
<del> ) {
<del> this._queue.push({item, callback});
<del> this._processQueueOnceReady();
<del> }
<del>
<del>}
<del>
<ide> type URI = string;
<ide>
<ide> /**
| 2
|
Javascript
|
Javascript
|
extract root element class into variable
|
505031063e8c4ba096c359506dc979fa2ccfc96a
|
<ide><path>packages/ember-views/lib/system/event_dispatcher.js
<ide> import ActionManager from 'ember-views/system/action_manager';
<ide> import View from 'ember-views/views/view';
<ide> import assign from 'ember-metal/assign';
<ide>
<add>let ROOT_ELEMENT_CLASS = 'ember-application';
<add>let ROOT_ELEMENT_SELECTOR = '.' + ROOT_ELEMENT_CLASS;
<add>
<ide> /**
<ide> `Ember.EventDispatcher` handles delegating browser events to their
<ide> corresponding `Ember.Views.` For example, when you click on a view,
<ide> export default EmberObject.extend({
<ide>
<ide> rootElement = jQuery(get(this, 'rootElement'));
<ide>
<del> Ember.assert(`You cannot use the same root element (${rootElement.selector || rootElement[0].tagName}) multiple times in an Ember.Application`, !rootElement.is('.ember-application'));
<del> Ember.assert('You cannot make a new Ember.Application using a root element that is a descendent of an existing Ember.Application', !rootElement.closest('.ember-application').length);
<del> Ember.assert('You cannot make a new Ember.Application using a root element that is an ancestor of an existing Ember.Application', !rootElement.find('.ember-application').length);
<add> Ember.assert(`You cannot use the same root element (${rootElement.selector || rootElement[0].tagName}) multiple times in an Ember.Application`, !rootElement.is(ROOT_ELEMENT_SELECTOR));
<add> Ember.assert('You cannot make a new Ember.Application using a root element that is a descendent of an existing Ember.Application', !rootElement.closest(ROOT_ELEMENT_SELECTOR).length);
<add> Ember.assert('You cannot make a new Ember.Application using a root element that is an ancestor of an existing Ember.Application', !rootElement.find(ROOT_ELEMENT_SELECTOR).length);
<ide>
<del> rootElement.addClass('ember-application');
<add> rootElement.addClass(ROOT_ELEMENT_CLASS);
<ide>
<del> Ember.assert('Unable to add "ember-application" class to rootElement. Make sure you set rootElement to the body or an element in the body.', rootElement.is('.ember-application'));
<add> Ember.assert(`Unable to add '${ROOT_ELEMENT_CLASS}' class to rootElement. Make sure you set rootElement to the body or an element in the body.`, rootElement.is(ROOT_ELEMENT_SELECTOR));
<ide>
<ide> for (event in events) {
<ide> if (events.hasOwnProperty(event)) {
<ide> export default EmberObject.extend({
<ide>
<ide> destroy() {
<ide> var rootElement = get(this, 'rootElement');
<del> jQuery(rootElement).off('.ember', '**').removeClass('ember-application');
<add> jQuery(rootElement).off('.ember', '**').removeClass(ROOT_ELEMENT_CLASS);
<ide> return this._super(...arguments);
<ide> },
<ide>
| 1
|
Python
|
Python
|
return none if an xcomarg fails to resolve
|
a4e38978194ef46565bc1e5ba53ecc65308d09aa
|
<ide><path>airflow/exceptions.py
<ide> class AirflowOptionalProviderFeatureException(AirflowException):
<ide> """Raise by providers when imports are missing for optional provider features."""
<ide>
<ide>
<add>class XComNotFound(AirflowException):
<add> """Raise when an XCom reference is being resolved against a non-existent XCom."""
<add>
<add> def __init__(self, dag_id: str, task_id: str, key: str) -> None:
<add> super().__init__()
<add> self.dag_id = dag_id
<add> self.task_id = task_id
<add> self.key = key
<add>
<add> def __str__(self) -> str:
<add> return f'XComArg result from {self.task_id} at {self.dag_id} with key="{self.key}" is not found!'
<add>
<add>
<ide> class UnmappableOperator(AirflowException):
<ide> """Raise when an operator is not implemented to be mappable."""
<ide>
<ide><path>airflow/models/xcom_arg.py
<ide> from sqlalchemy import func
<ide> from sqlalchemy.orm import Session
<ide>
<del>from airflow.exceptions import AirflowException
<add>from airflow.exceptions import XComNotFound
<ide> from airflow.models.abstractoperator import AbstractOperator
<ide> from airflow.models.taskmixin import DAGNode, DependencyMixin
<ide> from airflow.models.xcom import XCOM_RETURN_KEY
<ide> def get_task_map_length(self, run_id: str, *, session: "Session") -> Optional[in
<ide> raise NotImplementedError()
<ide>
<ide> def resolve(self, context: Context, session: "Session" = NEW_SESSION) -> Any:
<add> """Pull XCom value.
<add>
<add> This should only be called during ``op.execute()`` in respectable context.
<add>
<add> :meta private:
<add> """
<ide> raise NotImplementedError()
<ide>
<ide>
<ide> def get_task_map_length(self, run_id: str, *, session: "Session") -> Optional[in
<ide>
<ide> @provide_session
<ide> def resolve(self, context: Context, session: "Session" = NEW_SESSION) -> Any:
<del> """
<del> Pull XCom value for the existing arg. This method is run during ``op.execute()``
<del> in respectable context.
<del> """
<del> result = context["ti"].xcom_pull(
<del> task_ids=self.operator.task_id, key=str(self.key), default=NOTSET, session=session
<del> )
<del> if result is NOTSET:
<del> raise AirflowException(
<del> f'XComArg result from {self.operator.task_id} at {context["ti"].dag_id} '
<del> f'with key="{self.key}" is not found!'
<del> )
<del> return result
<add> task_id = self.operator.task_id
<add> result = context["ti"].xcom_pull(task_ids=task_id, key=str(self.key), default=NOTSET, session=session)
<add> if result is not NOTSET:
<add> return result
<add> if self.key == XCOM_RETURN_KEY:
<add> return None
<add> raise XComNotFound(context["ti"].dag_id, task_id, self.key)
<ide>
<ide>
<ide> class _MapResult(Sequence):
<ide><path>tests/decorators/test_python.py
<ide> def org_test_func():
<ide> decorated_test_func, '__wrapped__'
<ide> ), "decorated function does not have __wrapped__ attribute"
<ide> assert decorated_test_func.__wrapped__ is org_test_func, "__wrapped__ attr is not the original function"
<add>
<add>
<add>def test_upstream_exception_produces_none_xcom(dag_maker, session):
<add> from airflow.exceptions import AirflowSkipException
<add> from airflow.models.dagrun import DagRun
<add> from airflow.utils.trigger_rule import TriggerRule
<add>
<add> result = None
<add>
<add> with dag_maker(session=session) as dag:
<add>
<add> @dag.task()
<add> def up1() -> str:
<add> return "example"
<add>
<add> @dag.task()
<add> def up2() -> None:
<add> raise AirflowSkipException()
<add>
<add> @dag.task(trigger_rule=TriggerRule.NONE_FAILED_MIN_ONE_SUCCESS)
<add> def down(a, b):
<add> nonlocal result
<add> result = f"{a!r} {b!r}"
<add>
<add> down(up1(), up2())
<add>
<add> dr: DagRun = dag_maker.create_dagrun()
<add>
<add> decision = dr.task_instance_scheduling_decisions(session=session)
<add> assert len(decision.schedulable_tis) == 2 # "up1" and "up2"
<add> for ti in decision.schedulable_tis:
<add> ti.run(session=session)
<add>
<add> decision = dr.task_instance_scheduling_decisions(session=session)
<add> assert len(decision.schedulable_tis) == 1 # "down"
<add> decision.schedulable_tis[0].run(session=session)
<add> assert result == "'example' None"
| 3
|
Ruby
|
Ruby
|
fix duplicate formulae in search results
|
da347ca1aa03c11087f2963d7cc1f160f68812d4
|
<ide><path>Library/Homebrew/search.rb
<ide> def search_formulae(string_or_regex)
<ide> .search(string_or_regex)
<ide> .sort
<ide>
<del> results += Formula.fuzzy_search(string_or_regex)
<add> results |= Formula.fuzzy_search(string_or_regex)
<ide>
<ide> results.map do |name|
<ide> formula, canonical_full_name = begin
| 1
|
Javascript
|
Javascript
|
add reloadwithreason to fast refresh
|
4cea628e0c215f2a13489022068f6e38cab5aaa4
|
<ide><path>Libraries/Core/setUpReactRefresh.js
<ide> 'use strict';
<ide>
<ide> if (__DEV__) {
<del> const NativeDevSettings = require('../NativeModules/specs/NativeDevSettings')
<del> .default;
<add> const DevSettings = require('../Utilities/DevSettings');
<ide>
<del> if (typeof NativeDevSettings.reload !== 'function') {
<add> if (typeof DevSettings.reload !== 'function') {
<ide> throw new Error('Could not find the reload() implementation.');
<ide> }
<ide>
<ide> if (__DEV__) {
<ide> ReactRefreshRuntime.injectIntoGlobalHook(global);
<ide>
<ide> const Refresh = {
<del> performFullRefresh() {
<del> NativeDevSettings.reload();
<add> performFullRefresh(reason: string) {
<add> DevSettings.reload(reason);
<ide> },
<ide>
<ide> createSignatureFunctionForTransform:
<ide> if (__DEV__) {
<ide>
<ide> performReactRefresh() {
<ide> if (ReactRefreshRuntime.hasUnrecoverableErrors()) {
<del> NativeDevSettings.reload();
<add> DevSettings.reload('Fast Refresh - Unrecoverable');
<ide> return;
<ide> }
<ide> ReactRefreshRuntime.performReactRefresh();
| 1
|
Python
|
Python
|
add detailed comment to _block_check_depths_match
|
7eb104463f192f50d184c3217a0c0febe3a88c94
|
<ide><path>numpy/core/shape_base.py
<ide> def stack(arrays, axis=0, out=None):
<ide>
<ide>
<ide> def _block_check_depths_match(arrays, index=[]):
<add> # Recursive function checking that the depths of nested lists in `arrays`
<add> # all match. Mismatch raises a ValueError as described in the block
<add> # docstring below.
<add> # The entire index (rather than just the depth) is calculated for each
<add> # innermost list, in case an error needs to be raised, so that the index
<add> # of the offending list can be printed as part of the error.
<ide> def format_index(index):
<ide> idx_str = ''.join('[{}]'.format(i) for i in index if i is not None)
<ide> return 'arrays' + idx_str
| 1
|
Text
|
Text
|
fix some broken contents links
|
bac792c1634724def05c5ebc93891ecd5b353a53
|
<ide><path>docs/reference/run.md
<ide> Only the operator (the person executing `docker run`) can set the
<ide> following options.
<ide>
<ide> - [Detached vs foreground](#detached-vs-foreground)
<del> - [Detached (-d)](#detached-d)
<add> - [Detached (-d)](#detached--d)
<ide> - [Foreground](#foreground)
<ide> - [Container identification](#container-identification)
<del> - [Name (--name)](#name-name)
<add> - [Name (--name)](#name---name)
<ide> - [PID equivalent](#pid-equivalent)
<del> - [IPC settings (--ipc)](#ipc-settings-ipc)
<add> - [IPC settings (--ipc)](#ipc-settings---ipc)
<ide> - [Network settings](#network-settings)
<del> - [Restart policies (--restart)](#restart-policies-restart)
<del> - [Clean up (--rm)](#clean-up-rm)
<add> - [Restart policies (--restart)](#restart-policies---restart)
<add> - [Clean up (--rm)](#clean-up---rm)
<ide> - [Runtime constraints on resources](#runtime-constraints-on-resources)
<ide> - [Runtime privilege and Linux capabilities](#runtime-privilege-and-linux-capabilities)
<ide>
| 1
|
Javascript
|
Javascript
|
avoid global resolver in tests/node app setup
|
ceeeb5a6c22bae016df07c44470eb74831879b1b
|
<ide><path>tests/node/helpers/setup-app.js
<ide> function createApplication() {
<ide>
<ide> let app = this.Ember.Application.extend().create({
<ide> autoboot: false,
<add> Resolver: {
<add> create: (specifier) => {
<add> return this.registry[specifier];
<add> },
<add> },
<ide> });
<ide>
<del> app.Router = this.Ember.Router.extend({
<add> let Router = this.Ember.Router.extend({
<ide> location: 'none',
<ide> });
<ide>
<ide> if (this.routesCallback) {
<del> app.Router.map(this.routesCallback);
<add> Router.map(this.routesCallback);
<ide> }
<ide>
<add> this.register('router:main', Router);
<add>
<ide> registerApplicationClasses(app, this.registry);
<ide>
<ide> // Run application initializers
| 1
|
PHP
|
PHP
|
add two tests to the collection
|
4c75d5d801f264c3557dbd2ac001236294e6a3bc
|
<ide><path>tests/Support/SupportCollectionTest.php
<ide> public function testIterable()
<ide> }
<ide>
<ide>
<add> public function testFilter()
<add> {
<add> $c = new Collection(array(array('id' => 1, 'name' => 'Hello'), array('id' => 2, 'name' => 'World')));
<add> $this->assertEquals(array(1 => array('id' => 2, 'name' => 'World')), $c->filter(function($item)
<add> {
<add> return $item['id'] == 2;
<add> })->all());
<add> }
<add>
<add>
<add> public function testValues()
<add> {
<add> $c = new Collection(array(array('id' => 1, 'name' => 'Hello'), array('id' => 2, 'name' => 'World')));
<add> $this->assertEquals(array(array('id' => 2, 'name' => 'World')), $c->filter(function($item)
<add> {
<add> return $item['id'] == 2;
<add> })->values()->all());
<add> }
<add>
<add>
<ide> public function testFlatten()
<ide> {
<ide> $c = new Collection(array(array('#foo', '#bar'), array('#baz')));
<ide> public function testMerge()
<ide> $this->assertEquals(array($object1, $object2), $data->merge()->all());
<ide> }
<ide>
<del>}
<ide>\ No newline at end of file
<add>}
| 1
|
Javascript
|
Javascript
|
prevent infinite loop when ssr-rendering a portal
|
c3f1b6cd91ffa3e05465c3ed2f48225876c1bd3b
|
<ide><path>packages/react-dom/src/__tests__/ReactServerRendering-test.js
<ide> 'use strict';
<ide>
<ide> var React;
<add>var ReactCallReturn;
<ide> var ReactDOM;
<ide> var ReactDOMServer;
<ide> var ReactTestUtils;
<ide> describe('ReactDOMServer', () => {
<ide> beforeEach(() => {
<ide> jest.resetModules();
<ide> React = require('react');
<add> ReactCallReturn = require('react-call-return');
<ide> ReactDOM = require('react-dom');
<ide> ReactTestUtils = require('react-dom/test-utils');
<ide> PropTypes = require('prop-types');
<ide> describe('ReactDOMServer', () => {
<ide> );
<ide> }
<ide> });
<add>
<add> it('should throw rendering portals on the server', () => {
<add> var div = document.createElement('div');
<add> expect(() => {
<add> ReactDOMServer.renderToString(
<add> <div>{ReactDOM.createPortal(<div />, div)}</div>,
<add> );
<add> }).toThrow(
<add> 'Portals are not currently supported by the server renderer. ' +
<add> 'Render them conditionally so that they only appear on the client render.',
<add> );
<add> });
<add>
<add> it('should throw rendering call/return on the server', () => {
<add> var div = document.createElement('div');
<add> expect(() => {
<add> ReactDOMServer.renderToString(
<add> <div>{ReactCallReturn.unstable_createReturn(42)}</div>,
<add> );
<add> }).toThrow(
<add> 'The experimental Call and Return types are not currently supported by the server renderer.',
<add> );
<add> expect(() => {
<add> ReactDOMServer.renderToString(
<add> <div>
<add> {ReactCallReturn.unstable_createCall(null, function() {}, {})}
<add> </div>,
<add> );
<add> }).toThrow(
<add> 'The experimental Call and Return types are not currently supported by the server renderer.',
<add> );
<add> });
<ide> });
<ide><path>packages/react-dom/src/server/ReactPartialRenderer.js
<ide> import warning from 'fbjs/lib/warning';
<ide> import checkPropTypes from 'prop-types/checkPropTypes';
<ide> import describeComponentFrame from 'shared/describeComponentFrame';
<ide> import {ReactDebugCurrentFrame} from 'shared/ReactGlobalSharedState';
<del>import {REACT_FRAGMENT_TYPE} from 'shared/ReactSymbols';
<add>import {
<add> REACT_FRAGMENT_TYPE,
<add> REACT_CALL_TYPE,
<add> REACT_RETURN_TYPE,
<add> REACT_PORTAL_TYPE,
<add>} from 'shared/ReactSymbols';
<ide>
<ide> import {
<ide> createMarkupForCustomAttribute,
<ide> class ReactDOMServerRenderer {
<ide> if (nextChild === null || nextChild === false) {
<ide> return '';
<ide> } else if (!React.isValidElement(nextChild)) {
<add> if (nextChild != null && nextChild.$$typeof != null) {
<add> // Catch unexpected special types early.
<add> const $$typeof = nextChild.$$typeof;
<add> invariant(
<add> $$typeof !== REACT_PORTAL_TYPE,
<add> 'Portals are not currently supported by the server renderer. ' +
<add> 'Render them conditionally so that they only appear on the client render.',
<add> );
<add> invariant(
<add> $$typeof !== REACT_CALL_TYPE && $$typeof !== REACT_RETURN_TYPE,
<add> 'The experimental Call and Return types are not currently ' +
<add> 'supported by the server renderer.',
<add> );
<add> // Catch-all to prevent an infinite loop if React.Children.toArray() supports some new type.
<add> invariant(
<add> false,
<add> 'Unknown element-like object type: %s. This is likely a bug in React. ' +
<add> 'Please file an issue.',
<add> ($$typeof: any).toString(),
<add> );
<add> }
<ide> const nextChildren = toArray(nextChild);
<ide> const frame: Frame = {
<ide> domNamespace: parentNamespace,
| 2
|
PHP
|
PHP
|
fix cs errors
|
de218605cb484a41bcadb71a8b228d6c9f57eaca
|
<ide><path>src/Http/ActionDispatcher.php
<ide> namespace Cake\Http;
<ide>
<ide> use Cake\Controller\Controller;
<del>use Cake\Http\Response;
<ide> use Cake\Routing\Router;
<ide> use LogicException;
<ide> use Psr\Http\Message\ResponseInterface;
<ide><path>tests/TestCase/Http/ActionDispatcherTest.php
<ide> */
<ide> namespace Cake\Test\TestCase\Http;
<ide>
<del>use Cake\Event\EventInterface;
<ide> use Cake\Http\ActionDispatcher;
<ide> use Cake\Http\Response;
<ide> use Cake\Http\ServerRequest;
<del>use Cake\Http\Session;
<ide> use Cake\Routing\Router;
<ide> use Cake\TestSuite\TestCase;
<ide>
| 2
|
Text
|
Text
|
fix api of example code
|
d5f2a707c564614a4cae6e0324229c4fbb7361ba
|
<ide><path>README.md
<ide> method call.
<ide> For example, the following does not perform any work, because the resulting
<ide> Seq is never used:
<ide>
<del> var oddSquares = Immutable.Sequence.of(1,2,3,4,5,6,7,8)
<add> var oddSquares = Immutable.Seq.of(1,2,3,4,5,6,7,8)
<ide> .filter(x => x % 2).map(x => x * x);
<ide>
<ide> Once the Seq is used, it performs only the work necessary. In this
| 1
|
Text
|
Text
|
fix some doc typos and spacings
|
3b2ee9a704f1e3974dbb9ce857886d5e377580ab
|
<ide><path>docs/understanding-docker.md
<ide> Docker Engine is a client-server application with these major components:
<ide>
<ide> 
<ide>
<del>The CLI imakes use of the Docker REST API to control or interact with the Docker daemon through scripting or direct CLI commands. Many other Docker applications make use of the underlying API and CLI.
<add>The CLI makes use of the Docker REST API to control or interact with the Docker daemon through scripting or direct CLI commands. Many other Docker applications make use of the underlying API and CLI.
<ide>
<ide> The daemon creates and manages Docker objects. Docker objects include images, containers, networks, data volumes, and so forth.
<ide>
<ide> isolated workspace we call the *container*. When you run a container, Docker
<ide> creates a set of *namespaces* for that container.
<ide>
<ide> This provides a layer of isolation: each aspect of a container runs in its own
<del>namespace and does not have access outside it.
<add>namespace and does not have access outside of it.
<ide>
<ide> Some of the namespaces that Docker Engine uses on Linux are:
<ide>
<ide><path>docs/userguide/networking/default_network/container-communication.md
<ide> Many using Docker will want `ip_forward` to be on, to at least make
<ide> communication _possible_ between containers and the wider world. May also be
<ide> needed for inter-container communication if you are in a multiple bridge setup.
<ide>
<del>Docker will never make changes to your system `iptables` rules if you set
<add>Docker will never make changes to your system `iptables` rules if you set
<ide> `--iptables=false` when the daemon starts. Otherwise the Docker server will
<ide> append forwarding rules to the `DOCKER` filter chain.
<ide>
<ide><path>docs/userguide/networking/dockernetworks.md
<ide> NETWORK ID NAME DRIVER
<ide> cf03ee007fb4 host host
<ide> ```
<ide>
<del>Historically, these three networks are part of Docker's implementation. When
<add>Historically, these three networks are part of Docker's implementation. When
<ide> you run a container you can use the `--net` flag to specify which network you
<ide> want to run a container on. These three networks are still available to you.
<ide>
| 3
|
Text
|
Text
|
fix typo in with-material-ui readme
|
b52b91a840f585594678d9746087e3bdb1a668f0
|
<ide><path>examples/with-material-ui/README.md
<ide> Deploy it to the cloud with [now](https://zeit.co/now) ([download](https://zeit.
<ide>
<ide> ```bash
<ide> now
<add>```
<ide>
<ide> ## The idea behind the example
<ide>
| 1
|
Javascript
|
Javascript
|
remove bind() and self
|
45367a2a8fca611279fe8e18b2af244564da7616
|
<ide><path>lib/cluster.js
<ide> function RoundRobinHandle(key, address, port, addressType, backlog, fd) {
<ide> else
<ide> this.server.listen(address); // UNIX socket path.
<ide>
<del> var self = this;
<del> this.server.once('listening', function() {
<del> self.handle = self.server._handle;
<del> self.handle.onconnection = self.distribute.bind(self);
<del> self.server._handle = null;
<del> self.server = null;
<add> this.server.once('listening', () => {
<add> this.handle = this.server._handle;
<add> this.handle.onconnection = (err, handle) => this.distribute(err, handle);
<add> this.server._handle = null;
<add> this.server = null;
<ide> });
<ide> }
<ide>
<ide> RoundRobinHandle.prototype.add = function(worker, send) {
<ide> assert(worker.id in this.all === false);
<ide> this.all[worker.id] = worker;
<ide>
<del> var self = this;
<del> function done() {
<del> if (self.handle.getsockname) {
<add> const done = () => {
<add> if (this.handle.getsockname) {
<ide> var out = {};
<del> self.handle.getsockname(out);
<add> this.handle.getsockname(out);
<ide> // TODO(bnoordhuis) Check err.
<ide> send(null, { sockname: out }, null);
<ide> } else {
<ide> send(null, null, null); // UNIX socket.
<ide> }
<del> self.handoff(worker); // In case there are connections pending.
<del> }
<add> this.handoff(worker); // In case there are connections pending.
<add> };
<ide>
<ide> if (this.server === null) return done();
<ide> // Still busy binding.
<ide> RoundRobinHandle.prototype.handoff = function(worker) {
<ide> return;
<ide> }
<ide> var message = { act: 'newconn', key: this.key };
<del> var self = this;
<del> sendHelper(worker.process, message, handle, function(reply) {
<add>
<add> sendHelper(worker.process, message, handle, (reply) => {
<ide> if (reply.accepted)
<ide> handle.close();
<ide> else
<del> self.distribute(0, handle); // Worker is shutting down. Send to another.
<del> self.handoff(worker);
<add> this.distribute(0, handle); // Worker is shutting down. Send to another.
<add> this.handoff(worker);
<ide> });
<ide> };
<ide>
<ide> function masterInit() {
<ide> cluster.disconnect = function(cb) {
<ide> var workers = Object.keys(cluster.workers);
<ide> if (workers.length === 0) {
<del> process.nextTick(intercom.emit.bind(intercom, 'disconnect'));
<add> process.nextTick(() => intercom.emit('disconnect'));
<ide> } else {
<ide> for (var key in workers) {
<ide> key = workers[key];
<ide> function masterInit() {
<ide> signo = signo || 'SIGTERM';
<ide> var proc = this.process;
<ide> if (this.isConnected()) {
<del> this.once('disconnect', proc.kill.bind(proc, signo));
<add> this.once('disconnect', () => proc.kill(signo));
<ide> this.disconnect();
<ide> return;
<ide> }
| 1
|
PHP
|
PHP
|
fix style in orderby
|
6bbb0bc1f41cd35bb82dae5e28ca24853311f9e3
|
<ide><path>src/Illuminate/Database/Query/Builder.php
<ide> public function orHavingRaw($sql, array $bindings = [])
<ide> */
<ide> public function orderBy($column, $direction = 'asc')
<ide> {
<del> if (!in_array(strtolower($direction), ['asc', 'desc'], true)) {
<add> if (! in_array(strtolower($direction), ['asc', 'desc'], true)) {
<ide> throw new InvalidArgumentException('Invalid value of direction.');
<ide> }
<ide>
| 1
|
Python
|
Python
|
fix dropout in rnns
|
e341e73c6ae89cb2fedfa5051ea7ed6c1651c6dc
|
<ide><path>keras/layers/recurrent.py
<ide> def get_constants(self, x):
<ide> if 0 < self.dropout_U < 1:
<ide> ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
<ide> ones = K.concatenate([ones] * self.output_dim, 1)
<del> B_U = [K.dropout(ones, self.dropout_U) for _ in range(3)]
<add> B_U = [K.in_train_phase(K.dropout(ones, self.dropout_U), ones) for _ in range(3)]
<ide> constants.append(B_U)
<ide> else:
<ide> constants.append([K.cast_to_floatx(1.) for _ in range(4)])
<ide>
<del> if self.consume_less == 'cpu' and 0 < self.dropout_W < 1:
<add> if 0 < self.dropout_W < 1:
<ide> input_shape = self.input_spec[0].shape
<ide> input_dim = input_shape[-1]
<ide> ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
<ide> ones = K.concatenate([ones] * input_dim, 1)
<del> B_W = [K.dropout(ones, self.dropout_W) for _ in range(3)]
<add> B_W = [K.in_train_phase(K.dropout(ones, self.dropout_W), ones) for _ in range(3)]
<ide> constants.append(B_W)
<ide> else:
<ide> constants.append([K.cast_to_floatx(1.) for _ in range(4)])
<ide> def reset_states(self):
<ide> self.states = [K.zeros((input_shape[0], self.output_dim)),
<ide> K.zeros((input_shape[0], self.output_dim))]
<ide>
<del> def preprocess_input(self, x, train=False):
<add> def preprocess_input(self, x):
<ide> if self.consume_less == 'cpu':
<del> if train and (0 < self.dropout_W < 1):
<add> if 0 < self.dropout_W < 1:
<ide> dropout = self.dropout_W
<ide> else:
<ide> dropout = 0
<ide> def get_constants(self, x):
<ide> if 0 < self.dropout_U < 1:
<ide> ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
<ide> ones = K.concatenate([ones] * self.output_dim, 1)
<del> B_U = [K.dropout(ones, self.dropout_U) for _ in range(4)]
<add> B_U = [K.in_train_phase(K.dropout(ones, self.dropout_U), ones) for _ in range(4)]
<ide> constants.append(B_U)
<ide> else:
<ide> constants.append([K.cast_to_floatx(1.) for _ in range(4)])
<ide>
<del> if self.consume_less == 'cpu' and 0 < self.dropout_W < 1:
<add> if 0 < self.dropout_W < 1:
<ide> input_shape = self.input_spec[0].shape
<ide> input_dim = input_shape[-1]
<ide> ones = K.ones_like(K.reshape(x[:, 0, 0], (-1, 1)))
<ide> ones = K.concatenate([ones] * input_dim, 1)
<del> B_W = [K.dropout(ones, self.dropout_W) for _ in range(4)]
<add> B_W = [K.in_train_phase(K.dropout(ones, self.dropout_W), ones) for _ in range(4)]
<ide> constants.append(B_W)
<ide> else:
<ide> constants.append([K.cast_to_floatx(1.) for _ in range(4)])
| 1
|
Text
|
Text
|
correct some typos for docs
|
4d66f18be07b13cf41c48190d813fc045bb46678
|
<ide><path>docs/userguide/networking/dockernetworks.md
<ide> networks.
<ide>
<ide> Within a user-defined bridge network, linking is not supported. You can
<ide> expose and publish container ports on containers in this network. This is useful
<del>if you want make a portion of the `bridge` network available to an outside
<add>if you want to make a portion of the `bridge` network available to an outside
<ide> network.
<ide>
<ide> 
<ide><path>docs/userguide/networking/work-with-networks.md
<ide> $ docker run -itd --name=container2 busybox
<ide> 498eaaaf328e1018042c04b2de04036fc04719a6e39a097a4f4866043a2c2152
<ide> ```
<ide>
<del>Then create a isolated, `bridge` network to test with.
<add>Then create an isolated, `bridge` network to test with.
<ide>
<ide> ```bash
<ide> $ docker network create -d bridge isolated_nw
<ide><path>docs/userguide/networkingcontainers.md
<ide> You can also use `docker inspect` with the container's name.
<ide>
<ide> Container names must be unique. That means you can only call one container
<ide> `web`. If you want to re-use a container name you must delete the old container
<del>(with `docker rm`) before you can reuse the name with a new container. Go ahead and stop and them remove your `web` container.
<add>(with `docker rm`) before you can reuse the name with a new container. Go ahead and stop and remove your old `web` container.
<ide>
<ide> $ docker stop web
<ide> web
<ide> Container names must be unique. That means you can only call one container
<ide>
<ide> Docker includes support for networking containers through the use of **network
<ide> drivers**. By default, Docker provides two network drivers for you, the
<del>`bridge` and the `overlay` driver. You can also write a network driver plugin so
<add>`bridge` and the `overlay` drivers. You can also write a network driver plugin so
<ide> that you can create your own drivers but that is an advanced task.
<ide>
<ide> Every installation of the Docker Engine automatically includes three default networks. You can list them:
| 3
|
Go
|
Go
|
add missing test for full coverage
|
c2699d8d896d592dc02276d76dfdf544fd26d0dd
|
<ide><path>api/server/server_unit_test.go
<ide> func TestGetImagesHistory(t *testing.T) {
<ide> var called bool
<ide> eng.Register("history", func(job *engine.Job) engine.Status {
<ide> called = true
<add> if len(job.Args) == 0 {
<add> t.Fatal("Job arguments is empty")
<add> }
<ide> if job.Args[0] != imageName {
<ide> t.Fatalf("name != '%s': %#v", imageName, job.Args[0])
<ide> }
| 1
|
Python
|
Python
|
recommend polynomial.polynomial over np.polyfit
|
4a0c307b1af162c4e3363a8b4c90fa1a138adebf
|
<ide><path>numpy/lib/polynomial.py
<ide> def polyfit(x, y, deg, rcond=None, full=False, w=None, cov=False):
<ide>
<ide> Fit a polynomial ``p(x) = p[0] * x**deg + ... + p[deg]`` of degree `deg`
<ide> to points `(x, y)`. Returns a vector of coefficients `p` that minimises
<del> the squared error.
<add> the squared error in the order `deg`, `deg-1`, ... `0`.
<add>
<add> The `Polynomial.fit <numpy.polynomial.polynomial.Polynomial.fit>` class
<add> method is recommended for new code as it is more stable numerically. See
<add> the documentation for the method for more information, or the convenience
<add> function `polynomial.polyfit <numpy.polynomial.polynomial.polyfit>`.
<ide>
<ide> Parameters
<ide> ----------
<ide><path>numpy/polynomial/_polybase.py
<ide> class domain in NumPy 1.4 and ``None`` in later versions.
<ide> -------
<ide> new_series : series
<ide> A series that represents the least squares fit to the data and
<del> has the domain specified in the call.
<add> has the domain and window specified in the call. If the
<add> coefficients for the unscaled and unshifted basis polynomials are
<add> of interest, do ``new_series.convert().coef``
<ide>
<ide> [resid, rank, sv, rcond] : list
<ide> These values are only returned if `full` = True
| 2
|
Javascript
|
Javascript
|
validate the input data before opening file
|
3e9302b2b34ab0309de8eda45c17efc8fd9cc2f5
|
<ide><path>lib/internal/fs/promises.js
<ide> function validateFileHandle(handle) {
<ide> }
<ide>
<ide> async function writeFileHandle(filehandle, data, options) {
<del> if (!isArrayBufferView(data)) {
<del> validateStringAfterArrayBufferView(data, 'data');
<del> data = Buffer.from(data, options.encoding || 'utf8');
<del> }
<ide> let remaining = data.length;
<ide> if (remaining === 0) return;
<ide> do {
<ide> async function writeFile(path, data, options) {
<ide> options = getOptions(options, { encoding: 'utf8', mode: 0o666, flag: 'w' });
<ide> const flag = options.flag || 'w';
<ide>
<add> if (!isArrayBufferView(data)) {
<add> validateStringAfterArrayBufferView(data, 'data');
<add> data = Buffer.from(data, options.encoding || 'utf8');
<add> }
<add>
<ide> if (path instanceof FileHandle)
<ide> return writeFileHandle(path, data, options);
<ide>
<ide><path>test/parallel/test-fs-append-file.js
<ide> const throwNextTick = (e) => { process.nextTick(() => { throw e; }); };
<ide> .catch(throwNextTick);
<ide> }
<ide>
<del>// Test that appendFile does not accept numbers (callback API).
<del>[false, 5, {}, [], null, undefined].forEach((data) => {
<add>// Test that appendFile does not accept invalid data type (callback API).
<add>[false, 5, {}, [], null, undefined].forEach(async (data) => {
<ide> const errObj = {
<ide> code: 'ERR_INVALID_ARG_TYPE',
<ide> message: /"data"|"buffer"/
<ide> };
<add> const filename = join(tmpdir.path, 'append-invalid-data.txt');
<add>
<add> assert.throws(
<add> () => fs.appendFile(filename, data, common.mustNotCall()),
<add> errObj
<add> );
<add>
<ide> assert.throws(
<del> () => fs.appendFile('foobar', data, common.mustNotCall()),
<add> () => fs.appendFileSync(filename, data),
<ide> errObj
<ide> );
<del> assert.throws(() => fs.appendFileSync('foobar', data), errObj);
<del> assert.rejects(fs.promises.appendFile('foobar', data), errObj);
<add>
<add> await assert.rejects(
<add> fs.promises.appendFile(filename, data),
<add> errObj
<add> );
<add> // The filename shouldn't exist if throwing error.
<add> assert.throws(
<add> () => fs.statSync(filename),
<add> {
<add> code: 'ENOENT',
<add> message: /no such file or directory/
<add> }
<add> );
<ide> });
<ide>
<ide> // Test that appendFile accepts file descriptors (callback API).
| 2
|
Java
|
Java
|
improve decodetomono support
|
f4d8c7cc2b6e3add1b1b06dc487cce82dceb4a49
|
<ide><path>spring-core/src/main/java/org/springframework/core/codec/AbstractDataBufferDecoder.java
<add>/*
<add> * Copyright 2002-2017 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.core.codec;
<add>
<add>import java.util.Map;
<add>
<add>import org.reactivestreams.Publisher;
<add>import reactor.core.publisher.Flux;
<add>import reactor.core.publisher.Mono;
<add>
<add>import org.springframework.core.ResolvableType;
<add>import org.springframework.core.io.buffer.DataBuffer;
<add>import org.springframework.lang.Nullable;
<add>import org.springframework.util.MimeType;
<add>
<add>/**
<add> * Abstract base class for {@code Decoder} implementations that can decode
<add> * a {@code DataBuffer} directly to the target element type.
<add> *
<add> * <p>Sub-classes must implement {@link #decodeDataBuffer} to provide a way to
<add> * transform a {@code DataBuffer} to the target data type. The default
<add> * {@link #decode} implementation transforms each individual data buffer while
<add> * {@link #decodeToMono} applies "reduce" and transforms the aggregated buffer.
<add> *
<add> * <p>Sub-classes can override {@link #decode} in order to split the input stream
<add> * along different boundaries (e.g. on new line characters for {@code String})
<add> * or always reduce to a single data buffer (e.g. {@code Resource}).
<add> *
<add> * @author Rossen Stoyanchev
<add> * @since 5.0
<add> */
<add>public abstract class AbstractDataBufferDecoder<T> extends AbstractDecoder<T> {
<add>
<add>
<add> protected AbstractDataBufferDecoder(MimeType... supportedMimeTypes) {
<add> super(supportedMimeTypes);
<add> }
<add>
<add>
<add> @Override
<add> public Flux<T> decode(Publisher<DataBuffer> inputStream, ResolvableType elementType,
<add> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
<add>
<add> return Flux.from(inputStream).map(buffer -> decodeDataBuffer(buffer, elementType, mimeType, hints));
<add> }
<add>
<add> @Override
<add> public Mono<T> decodeToMono(Publisher<DataBuffer> inputStream, ResolvableType elementType,
<add> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
<add>
<add> return Flux.from(inputStream)
<add> .reduce(DataBuffer::write)
<add> .map(buffer -> decodeDataBuffer(buffer, elementType, mimeType, hints));
<add> }
<add>
<add> /**
<add> * How to decode a {@code DataBuffer} to the target element type.
<add> */
<add> protected abstract T decodeDataBuffer(DataBuffer buffer, ResolvableType elementType,
<add> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints);
<add>
<add>}
<ide><path>spring-core/src/main/java/org/springframework/core/codec/ByteArrayDecoder.java
<ide>
<ide> import java.util.Map;
<ide>
<del>import org.reactivestreams.Publisher;
<del>import reactor.core.publisher.Flux;
<del>
<ide> import org.springframework.core.ResolvableType;
<ide> import org.springframework.core.io.buffer.DataBuffer;
<ide> import org.springframework.core.io.buffer.DataBufferUtils;
<ide> * Decoder for {@code byte} arrays.
<ide> *
<ide> * @author Arjen Poutsma
<add> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide> */
<del>public class ByteArrayDecoder extends AbstractDecoder<byte[]> {
<add>public class ByteArrayDecoder extends AbstractDataBufferDecoder<byte[]> {
<add>
<ide>
<ide> public ByteArrayDecoder() {
<ide> super(MimeTypeUtils.ALL);
<ide> public boolean canDecode(ResolvableType elementType, @Nullable MimeType mimeType
<ide> }
<ide>
<ide> @Override
<del> public Flux<byte[]> decode(Publisher<DataBuffer> inputStream, ResolvableType elementType,
<add> protected byte[] decodeDataBuffer(DataBuffer dataBuffer, ResolvableType elementType,
<ide> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
<ide>
<del> return Flux.from(inputStream).map((dataBuffer) -> {
<del> byte[] result = new byte[dataBuffer.readableByteCount()];
<del> dataBuffer.read(result);
<del> DataBufferUtils.release(dataBuffer);
<del> return result ;
<del> });
<add> byte[] result = new byte[dataBuffer.readableByteCount()];
<add> dataBuffer.read(result);
<add> DataBufferUtils.release(dataBuffer);
<add> return result;
<ide> }
<ide>
<del>
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/core/codec/ByteBufferDecoder.java
<ide> import java.nio.ByteBuffer;
<ide> import java.util.Map;
<ide>
<del>import org.reactivestreams.Publisher;
<del>import reactor.core.publisher.Flux;
<del>
<ide> import org.springframework.core.ResolvableType;
<ide> import org.springframework.core.io.buffer.DataBuffer;
<ide> import org.springframework.core.io.buffer.DataBufferUtils;
<ide> *
<ide> * @author Sebastien Deleuze
<ide> * @author Arjen Poutsma
<add> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide> */
<del>public class ByteBufferDecoder extends AbstractDecoder<ByteBuffer> {
<add>public class ByteBufferDecoder extends AbstractDataBufferDecoder<ByteBuffer> {
<add>
<ide>
<ide> public ByteBufferDecoder() {
<ide> super(MimeTypeUtils.ALL);
<ide> public boolean canDecode(ResolvableType elementType, @Nullable MimeType mimeType
<ide> }
<ide>
<ide> @Override
<del> public Flux<ByteBuffer> decode(Publisher<DataBuffer> inputStream, ResolvableType elementType,
<add> protected ByteBuffer decodeDataBuffer(DataBuffer dataBuffer, ResolvableType elementType,
<ide> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
<ide>
<del> return Flux.from(inputStream).map((dataBuffer) -> {
<del> ByteBuffer copy = ByteBuffer.allocate(dataBuffer.readableByteCount());
<del> copy.put(dataBuffer.asByteBuffer());
<del> copy.flip();
<del> DataBufferUtils.release(dataBuffer);
<del> return copy;
<del> });
<add> ByteBuffer copy = ByteBuffer.allocate(dataBuffer.readableByteCount());
<add> copy.put(dataBuffer.asByteBuffer());
<add> copy.flip();
<add> DataBufferUtils.release(dataBuffer);
<add> return copy;
<ide> }
<ide>
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/core/codec/DataBufferDecoder.java
<ide> * {@link org.springframework.core.io.buffer.DataBufferUtils#release(DataBuffer)}.
<ide> *
<ide> * @author Arjen Poutsma
<add> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide> */
<del>public class DataBufferDecoder extends AbstractDecoder<DataBuffer> {
<add>public class DataBufferDecoder extends AbstractDataBufferDecoder<DataBuffer> {
<add>
<ide>
<ide> public DataBufferDecoder() {
<ide> super(MimeTypeUtils.ALL);
<ide> public Flux<DataBuffer> decode(Publisher<DataBuffer> inputStream, ResolvableType
<ide> return Flux.from(inputStream);
<ide> }
<ide>
<add> @Override
<add> protected DataBuffer decodeDataBuffer(DataBuffer buffer, ResolvableType elementType,
<add> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
<add>
<add> return buffer;
<add> }
<add>
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/core/codec/ResourceDecoder.java
<ide>
<ide> import org.reactivestreams.Publisher;
<ide> import reactor.core.publisher.Flux;
<del>import reactor.core.publisher.Mono;
<ide>
<ide> import org.springframework.core.ResolvableType;
<ide> import org.springframework.core.io.ByteArrayResource;
<ide> * Decoder for {@link Resource}s.
<ide> *
<ide> * @author Arjen Poutsma
<add> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide> */
<del>public class ResourceDecoder extends AbstractDecoder<Resource> {
<add>public class ResourceDecoder extends AbstractDataBufferDecoder<Resource> {
<add>
<ide>
<ide> public ResourceDecoder() {
<ide> super(MimeTypeUtils.ALL);
<ide> public Flux<Resource> decode(Publisher<DataBuffer> inputStream, ResolvableType e
<ide> }
<ide>
<ide> @Override
<del> public Mono<Resource> decodeToMono(Publisher<DataBuffer> inputStream, ResolvableType elementType,
<add> protected Resource decodeDataBuffer(DataBuffer dataBuffer, ResolvableType elementType,
<ide> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
<ide>
<add> byte[] bytes = new byte[dataBuffer.readableByteCount()];
<add> dataBuffer.read(bytes);
<add> DataBufferUtils.release(dataBuffer);
<add>
<ide> Class<?> clazz = elementType.getRawClass();
<ide> Assert.state(clazz != null, "No resource class");
<ide>
<del> Mono<byte[]> byteArray = Flux.from(inputStream).
<del> reduce(DataBuffer::write).
<del> map(dataBuffer -> {
<del> byte[] bytes = new byte[dataBuffer.readableByteCount()];
<del> dataBuffer.read(bytes);
<del> DataBufferUtils.release(dataBuffer);
<del> return bytes;
<del> });
<del>
<del>
<ide> if (InputStreamResource.class == clazz) {
<del> return Mono.from(byteArray.map(ByteArrayInputStream::new).map(InputStreamResource::new));
<add> return new InputStreamResource(new ByteArrayInputStream(bytes));
<ide> }
<ide> else if (clazz.isAssignableFrom(ByteArrayResource.class)) {
<del> return Mono.from(byteArray.map(ByteArrayResource::new));
<add> return new ByteArrayResource(bytes);
<ide> }
<ide> else {
<del> return Mono.error(new IllegalStateException("Unsupported resource class: " + clazz));
<add> throw new IllegalStateException("Unsupported resource class: " + clazz);
<ide> }
<ide> }
<ide>
<ide><path>spring-core/src/main/java/org/springframework/core/codec/StringDecoder.java
<ide>
<ide> import org.reactivestreams.Publisher;
<ide> import reactor.core.publisher.Flux;
<del>import reactor.core.publisher.Mono;
<ide>
<ide> import org.springframework.core.ResolvableType;
<ide> import org.springframework.core.io.buffer.DataBuffer;
<ide> * @since 5.0
<ide> * @see CharSequenceEncoder
<ide> */
<del>public class StringDecoder extends AbstractDecoder<String> {
<add>public class StringDecoder extends AbstractDataBufferDecoder<String> {
<ide>
<ide> public static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
<ide>
<ide> public Flux<String> decode(Publisher<DataBuffer> inputStream, ResolvableType ele
<ide> if (this.splitOnNewline) {
<ide> inputFlux = Flux.from(inputStream).flatMap(StringDecoder::splitOnNewline);
<ide> }
<del> return inputFlux.map(buffer -> decodeDataBuffer(buffer, mimeType));
<del> }
<del>
<del> @Override
<del> public Mono<String> decodeToMono(Publisher<DataBuffer> inputStream, ResolvableType elementType,
<del> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
<del>
<del> return Flux.from(inputStream)
<del> .reduce(DataBuffer::write)
<del> .map(buffer -> decodeDataBuffer(buffer, mimeType));
<add> return super.decode(inputFlux, elementType, mimeType, hints);
<ide> }
<ide>
<ide> private static Flux<DataBuffer> splitOnNewline(DataBuffer dataBuffer) {
<ide> private static Flux<DataBuffer> splitOnNewline(DataBuffer dataBuffer) {
<ide> return Flux.fromIterable(results);
<ide> }
<ide>
<del> private String decodeDataBuffer(DataBuffer dataBuffer, @Nullable MimeType mimeType) {
<add> @Override
<add> protected String decodeDataBuffer(DataBuffer dataBuffer, ResolvableType elementType,
<add> @Nullable MimeType mimeType, @Nullable Map<String, Object> hints) {
<add>
<ide> Charset charset = getCharset(mimeType);
<ide> CharBuffer charBuffer = charset.decode(dataBuffer.asByteBuffer());
<ide> DataBufferUtils.release(dataBuffer);
<ide><path>spring-core/src/test/java/org/springframework/core/codec/ByteArrayDecoderTests.java
<ide> /*
<del> * Copyright 2002-2016 the original author or authors.
<add> * Copyright 2002-2017 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import org.junit.Test;
<ide> import org.reactivestreams.Publisher;
<ide> import reactor.core.publisher.Flux;
<add>import reactor.core.publisher.Mono;
<ide> import reactor.test.StepVerifier;
<ide>
<ide> import org.springframework.core.ResolvableType;
<ide> public class ByteArrayDecoderTests extends AbstractDataBufferAllocatingTestCase
<ide>
<ide> private final ByteArrayDecoder decoder = new ByteArrayDecoder();
<ide>
<add>
<ide> @Test
<ide> public void canDecode() {
<ide> assertTrue(this.decoder.canDecode(ResolvableType.forClass(byte[].class),
<ide> public void decode() {
<ide> .verify();
<ide> }
<ide>
<add> @Test
<add> public void decodeToMono() {
<add> DataBuffer fooBuffer = stringBuffer("foo");
<add> DataBuffer barBuffer = stringBuffer("bar");
<add> Flux<DataBuffer> source = Flux.just(fooBuffer, barBuffer);
<add> Mono<byte[]> output = this.decoder.decodeToMono(source,
<add> ResolvableType.forClassWithGenerics(Publisher.class, byte[].class),
<add> null, Collections.emptyMap());
<add>
<add> StepVerifier.create(output)
<add> .consumeNextWith(bytes -> assertArrayEquals("foobar".getBytes(), bytes))
<add> .expectComplete()
<add> .verify();
<add> }
<add>
<ide> }
<ide>\ No newline at end of file
<ide><path>spring-core/src/test/java/org/springframework/core/codec/ByteBufferDecoderTests.java
<ide> import org.junit.Test;
<ide> import org.reactivestreams.Publisher;
<ide> import reactor.core.publisher.Flux;
<add>import reactor.core.publisher.Mono;
<ide> import reactor.test.StepVerifier;
<ide>
<ide> import org.springframework.core.ResolvableType;
<ide> public void decode() {
<ide> .expectComplete()
<ide> .verify();
<ide> }
<add>
<add> @Test
<add> public void decodeToMono() {
<add> DataBuffer fooBuffer = stringBuffer("foo");
<add> DataBuffer barBuffer = stringBuffer("bar");
<add> Flux<DataBuffer> source = Flux.just(fooBuffer, barBuffer);
<add> Mono<ByteBuffer> output = this.decoder.decodeToMono(source,
<add> ResolvableType.forClassWithGenerics(Publisher.class, ByteBuffer.class),
<add> null, Collections.emptyMap());
<add>
<add> StepVerifier.create(output)
<add> .expectNext(ByteBuffer.wrap("foobar".getBytes()))
<add> .expectComplete()
<add> .verify();
<add> }
<ide> }
<ide><path>spring-core/src/test/java/org/springframework/core/codec/DataBufferDecoderTests.java
<ide> package org.springframework.core.codec;
<ide>
<ide> import java.nio.ByteBuffer;
<add>import java.nio.charset.StandardCharsets;
<add>import java.time.Duration;
<ide> import java.util.Collections;
<ide>
<ide> import org.junit.Test;
<ide> import org.reactivestreams.Publisher;
<ide> import reactor.core.publisher.Flux;
<add>import reactor.core.publisher.Mono;
<ide>
<ide> import org.springframework.core.ResolvableType;
<ide> import org.springframework.core.io.buffer.AbstractDataBufferAllocatingTestCase;
<ide> import org.springframework.core.io.buffer.DataBuffer;
<add>import org.springframework.core.io.buffer.support.DataBufferTestUtils;
<ide> import org.springframework.util.MimeTypeUtils;
<ide>
<ide> import static org.junit.Assert.*;
<ide> public void decode() {
<ide> DataBuffer barBuffer = stringBuffer("bar");
<ide> Flux<DataBuffer> source = Flux.just(fooBuffer, barBuffer);
<ide> Flux<DataBuffer> output = this.decoder.decode(source,
<del> ResolvableType.forClassWithGenerics(Publisher.class, ByteBuffer.class),
<add> ResolvableType.forClassWithGenerics(Publisher.class, DataBuffer.class),
<ide> null, Collections.emptyMap());
<ide>
<ide> assertSame(source, output);
<ide>
<ide> release(fooBuffer, barBuffer);
<ide> }
<del>}
<add>
<add> @Test
<add> public void decodeToMono() {
<add> DataBuffer fooBuffer = stringBuffer("foo");
<add> DataBuffer barBuffer = stringBuffer("bar");
<add> Flux<DataBuffer> source = Flux.just(fooBuffer, barBuffer);
<add> Mono<DataBuffer> output = this.decoder.decodeToMono(source,
<add> ResolvableType.forClassWithGenerics(Publisher.class, DataBuffer.class),
<add> null, Collections.emptyMap());
<add>
<add> DataBuffer outputBuffer = output.block(Duration.ofSeconds(5));
<add> assertEquals("foobar", DataBufferTestUtils.dumpString(outputBuffer, StandardCharsets.UTF_8));
<add>
<add> release(outputBuffer);
<add> }}
| 9
|
PHP
|
PHP
|
emulate transaction nesting
|
2633e1249db313062c7b6c0b30bb84130ec88dd9
|
<ide><path>src/Illuminate/Database/Connection.php
<ide> class Connection implements ConnectionInterface {
<ide> */
<ide> protected $fetchMode = PDO::FETCH_ASSOC;
<ide>
<add> /**
<add> * The number of active transasctions.
<add> *
<add> * @var int
<add> */
<add> protected $transactions = 0;
<add>
<ide> /**
<ide> * All of the queries run against the connection.
<ide> *
<ide> public function prepareBindings(array $bindings)
<ide> */
<ide> public function transaction(Closure $callback)
<ide> {
<del> $this->pdo->beginTransaction();
<add> $this->beginTransaction();
<ide>
<ide> // We'll simply execute the given callback within a try / catch block
<ide> // and if we catch any exception we can rollback the transaction
<ide> public function transaction(Closure $callback)
<ide> {
<ide> $result = $callback($this);
<ide>
<del> $this->pdo->commit();
<add> $this->commit();
<ide> }
<ide>
<ide> // If we catch an exception, we will roll back so nothing gets messed
<ide> // up in the database. Then we'll re-throw the exception so it can
<ide> // be handled how the developer sees fit for their applications.
<ide> catch (\Exception $e)
<ide> {
<del> $this->pdo->rollBack();
<add> $this->rollBack();
<ide>
<ide> throw $e;
<ide> }
<ide>
<ide> return $result;
<ide> }
<ide>
<add> /**
<add> * Start a new database transaction.
<add> *
<add> * @return void
<add> */
<add> public function beginTransaction()
<add> {
<add> ++$this->transactions;
<add>
<add> if ($this->transactions == 1)
<add> {
<add> $this->pdo->beginTransaction();
<add> }
<add> }
<add>
<add> /**
<add> * Commit the active database transaction.
<add> *
<add> * @return void
<add> */
<add> public function commit()
<add> {
<add> if ($this->transactions == 1) $this->pdo->commit();
<add>
<add> --$this->transactions;
<add> }
<add>
<add> /**
<add> * Rollback the active database transaction.
<add> *
<add> * @return void
<add> */
<add> public function rollBack()
<add> {
<add> if ($this->transactions == 1)
<add> {
<add> $this->transactions = 0;
<add>
<add> $this->pdo->rollBack();
<add> }
<add> else
<add> {
<add> --$this->transactions;
<add> }
<add> }
<add>
<ide> /**
<ide> * Execute the given callback in "dry run" mode.
<ide> *
| 1
|
Javascript
|
Javascript
|
cover dgram socket close during cluster bind
|
ac602a251a0ca385775a71357d52e9c752430dfb
|
<ide><path>test/parallel/test-dgram-cluster-close-during-bind.js
<add>'use strict';
<add>const common = require('../common');
<add>const assert = require('assert');
<add>const cluster = require('cluster');
<add>const dgram = require('dgram');
<add>
<add>if (common.isWindows) {
<add> common.skip('dgram clustering is currently not supported on windows.');
<add> return;
<add>}
<add>
<add>if (cluster.isMaster) {
<add> cluster.fork();
<add>} else {
<add> // When the socket attempts to bind, it requests a handle from the cluster.
<add> // Close the socket before returning the handle from the cluster.
<add> const socket = dgram.createSocket('udp4');
<add> const _getServer = cluster._getServer;
<add>
<add> cluster._getServer = common.mustCall(function(self, options, callback) {
<add> socket.close(common.mustCall(() => {
<add> _getServer.call(this, self, options, common.mustCall((err, handle) => {
<add> assert.strictEqual(err, 0);
<add>
<add> // When the socket determines that it was already closed, it will
<add> // close the handle. Use handle.close() to terminate the test.
<add> const close = handle.close;
<add>
<add> handle.close = common.mustCall(function() {
<add> setImmediate(() => cluster.worker.disconnect());
<add> return close.call(this);
<add> });
<add>
<add> callback(err, handle);
<add> }));
<add> }));
<add> });
<add>
<add> socket.bind(common.mustNotCall('Socket should not bind.'));
<add>}
| 1
|
Text
|
Text
|
fix documentation about applicationrecord
|
cbbb89f1c9edcf438625b10178c8b2faf6ef4114
|
<ide><path>guides/source/active_record_basics.md
<ide> What if you need to follow a different naming convention or need to use your
<ide> Rails application with a legacy database? No problem, you can easily override
<ide> the default conventions.
<ide>
<del>`ApplicationRecord` inherits from `ActionController::Base`, which defines a
<add>`ApplicationRecord` inherits from `ActiveRecord::Base`, which defines a
<ide> number of helpful methods. You can use the `ActiveRecord::Base.table_name=`
<ide> method to specify the table name that should be used:
<ide>
| 1
|
Javascript
|
Javascript
|
fold dispatch function into main event dispatch
|
b208042f520b2ee768a93323d6bdc65eabfb0d65
|
<ide><path>src/event.js
<ide> jQuery.event = {
<ide> // Discard the second event of a jQuery.event.trigger() and
<ide> // when an event is called after a page has unloaded
<ide> return typeof jQuery !== "undefined" && (!e || jQuery.event.triggered !== e.type) ?
<del> jQuery.event.handle.apply( eventHandle.elem, arguments ) :
<add> jQuery.event.dispatch.apply( eventHandle.elem, arguments ) :
<ide> undefined;
<ide> };
<ide> // Add elem as a property of the handle fn to prevent a memory leak with IE non-native events
<ide> jQuery.event = {
<ide> return event.result;
<ide> },
<ide>
<del> handle: function( event ) {
<add> dispatch: function( event ) {
<ide>
<ide> // Make a writable jQuery.Event from the native event object
<ide> event = jQuery.event.fix( event || window.event );
<ide>
<ide> var handlers = ((jQuery._data( this, "events" ) || {})[ event.type ] || []),
<ide> delegateCount = handlers.delegateCount,
<ide> args = [].slice.call( arguments, 0 ),
<add> run_all = !event.exclusive && !event.namespace,
<add> specialHandle = ( jQuery.event.special[ event.type ] || {} ).handle,
<ide> handlerQueue = [],
<del> i, cur, selMatch, matches, handleObj, sel, hit, related;
<add> i, j, cur, ret, selMatch, matches, handleObj, sel, hit, related;
<ide>
<ide> // Use the fix-ed jQuery.Event rather than the (read-only) native event
<ide> args[0] = event;
<ide> jQuery.event = {
<ide>
<ide> for ( i = 0; i < handlerQueue.length && !event.isPropagationStopped(); i++ ) {
<ide> matched = handlerQueue[ i ];
<del> dispatch( matched.elem, event, matched.matches, args );
<add> for ( j = 0; j < matched.matches.length && !event.isImmediatePropagationStopped(); j++ ) {
<add> handleObj = matched.matches[ j ];
<add>
<add> // Triggered event must either 1) be non-exclusive and have no namespace, or
<add> // 2) have namespace(s) a subset or equal to those in the bound event (both can have no namespace).
<add> if ( run_all || (!event.namespace && !handleObj.namespace) || event.namespace_re && event.namespace_re.test( handleObj.namespace ) ) {
<add>
<add> // Pass in a reference to the handler function itself
<add> // So that we can later remove it
<add> event.handler = handleObj.handler;
<add> event.data = handleObj.data;
<add> event.handleObj = handleObj;
<add>
<add> ret = ( specialHandle || handleObj.handler ).apply( matched.elem, args );
<add>
<add> if ( ret !== undefined ) {
<add> event.result = ret;
<add> if ( ret === false ) {
<add> event.preventDefault();
<add> event.stopPropagation();
<add> }
<add> }
<add> }
<add> }
<ide> }
<ide>
<ide> return event.result;
<ide> jQuery.event = {
<ide> if ( bubble ) {
<ide> jQuery.event.trigger( e, null, elem );
<ide> } else {
<del> jQuery.event.handle.call( elem, e );
<add> jQuery.event.dispatch.call( elem, e );
<ide> }
<ide> if ( e.isDefaultPrevented() ) {
<ide> event.preventDefault();
<ide> }
<ide> }
<ide> };
<ide>
<del>// Run jQuery handler functions; called from jQuery.event.handle
<del>function dispatch( target, event, handlers, args ) {
<del> var run_all = !event.exclusive && !event.namespace,
<del> specialHandle = ( jQuery.event.special[ event.type ] || {} ).handle,
<del> j, handleObj, ret;
<del>
<del> for ( j = 0; j < handlers.length && !event.isImmediatePropagationStopped(); j++ ) {
<del> handleObj = handlers[ j ];
<del>
<del> // Triggered event must either 1) be non-exclusive and have no namespace, or
<del> // 2) have namespace(s) a subset or equal to those in the bound event (both can have no namespace).
<del> if ( run_all || (!event.namespace && !handleObj.namespace) || event.namespace_re && event.namespace_re.test( handleObj.namespace ) ) {
<del>
<del> // Pass in a reference to the handler function itself
<del> // So that we can later remove it
<del> event.handler = handleObj.handler;
<del> event.data = handleObj.data;
<del> event.handleObj = handleObj;
<del>
<del> ret = ( specialHandle || handleObj.handler ).apply( target, args );
<del>
<del> if ( ret !== undefined ) {
<del> event.result = ret;
<del> if ( ret === false ) {
<del> event.preventDefault();
<del> event.stopPropagation();
<del> }
<del> }
<del> }
<del> }
<del>}
<del>
<ide> jQuery.removeEvent = document.removeEventListener ?
<ide> function( elem, type, handle ) {
<ide> if ( elem.removeEventListener ) {
| 1
|
PHP
|
PHP
|
remove duplicate docs
|
c36bf10602f20e4fb53797076ce185297db37bed
|
<ide><path>src/Routing/Router.php
<ide> public static function url($url = null, $full = false)
<ide> *
<ide> * ### Usage
<ide> *
<del> * - `Router::url('/posts/edit/1');` Returns the string with the base dir prepended.
<del> * This usage does not use reverser routing.
<del> * - `Router::url(['controller' => 'posts', 'action' => 'edit']);` Returns a URL
<del> * generated through reverse routing.
<del> * - `Router::url(['_name' => 'custom-name', ...]);` Returns a URL generated
<del> * through reverse routing. This form allows you to leverage named routes.
<del> *
<del> * There are a few 'special' parameters that can change the final URL string that is generated
<del> *
<del> * - `_base` - Set to false to remove the base path from the generated URL. If your application
<del> * is not in the root directory, this can be used to generate URLs that are 'cake relative'.
<del> * cake relative URLs are required when using requestAction.
<del> * - `_scheme` - Set to create links on different schemes like `webcal` or `ftp`. Defaults
<del> * to the current scheme.
<del> * - `_host` - Set the host to use for the link. Defaults to the current host.
<del> * - `_port` - Set the port if you need to create links on non-standard ports.
<del> * - `_full` - If true output of `Router::fullBaseUrl()` will be prepended to generated URLs.
<del> * - `#` - Allows you to set URL hash fragments.
<del> * - `_ssl` - Set to true to convert the generated URL to https, or false to force http.
<del> * - `_name` - Name of route. If you have setup named routes you can use this key
<del> * to specify it.
<add> * @see Router::url()
<ide> *
<ide> * @param string|array|null $url An array specifying any of the following:
<ide> * 'controller', 'action', 'plugin' additionally, you can provide routed
| 1
|
PHP
|
PHP
|
add maptodictionary tests
|
7d8d81693aaa18d07e3fe0cd6e380a7518361406
|
<ide><path>tests/Support/SupportCollectionTest.php
<ide> public function testFlatMap()
<ide> $this->assertEquals(['programming', 'basketball', 'music', 'powerlifting'], $data->all());
<ide> }
<ide>
<add> public function testMapToDictionary()
<add> {
<add> $data = new Collection([
<add> ['id' => 1, 'name' => 'A'],
<add> ['id' => 2, 'name' => 'B'],
<add> ['id' => 3, 'name' => 'C'],
<add> ['id' => 4, 'name' => 'B'],
<add> ]);
<add>
<add> $groups = $data->mapToDictionary(function ($item, $key) {
<add> return [$item['name'] => $item['id']];
<add> });
<add>
<add> $this->assertInstanceOf(Collection::class, $groups);
<add> $this->assertEquals(['A' => [1], 'B' => [2, 4], 'C' => [3]], $groups->toArray());
<add> $this->assertInternalType('array', $groups['A']);
<add> }
<add>
<add> public function testMapToDictionaryWithNumericKeys()
<add> {
<add> $data = new Collection([1, 2, 3, 2, 1]);
<add>
<add> $groups = $data->mapToDictionary(function ($item, $key) {
<add> return [$item => $key];
<add> });
<add>
<add> $this->assertEquals([1 => [0, 4], 2 => [1, 3], 3 => [2]], $groups->toArray());
<add> }
<add>
<ide> public function testMapToGroups()
<ide> {
<ide> $data = new Collection([
| 1
|
Javascript
|
Javascript
|
handle division by zero in partition
|
dce7571381c9be26b33d1964abc7f0b02d2b8aa7
|
<ide><path>src/layout/partition.js
<ide> d3.layout.partition = function() {
<ide> n = children.length,
<ide> c,
<ide> d;
<del> dx /= node.value;
<add> dx = node.value === 0
<add> ? 0 : dx / node.value;
<ide> while (++i < n) {
<ide> position(c = children[i], x, d = c.value * dx, dy);
<ide> x += d;
| 1
|
Javascript
|
Javascript
|
remove unused vars and fix typo.
|
48d3ae2dd6a326814914cc01f87f263d3afad38c
|
<ide><path>examples/with-apollo-auth/components/RegisterBox.js
<ide> const RegisterBox = ({ client }) => {
<ide>
<ide> name.value = email.value = password.value = ''
<ide> }}>
<del> {error && <p>Issue occured while registering :(</p>}
<add> {error && <p>Issue occurred while registering :(</p>}
<ide> <input name='name' placeholder='Name' ref={node => { name = node }} /><br />
<ide> <input name='email' placeholder='Email' ref={node => { email = node }} /><br />
<ide> <input name='password' placeholder='Password' ref={node => { password = node }} type='password' /><br />
<ide><path>test/integration/basic/pages/error-in-the-browser-global-scope.js
<ide> if (typeof window !== 'undefined') {
<del> throw new Error('An Expected error occured')
<add> throw new Error('An Expected error occurred')
<ide> }
<ide>
<ide> export default () => <div />
<ide><path>test/integration/basic/pages/error-inside-browser-page.js
<ide> import React from 'react'
<ide> export default class ErrorInRenderPage extends React.Component {
<ide> render () {
<ide> if (typeof window !== 'undefined') {
<del> throw new Error('An Expected error occured')
<add> throw new Error('An Expected error occurred')
<ide> }
<ide> return <div />
<ide> }
<ide><path>test/integration/basic/test/client-navigation.js
<ide> import webdriver from 'next-webdriver'
<ide> import {waitFor, getReactErrorOverlayContent} from 'next-test-utils'
<ide>
<del>export default (context, render) => {
<add>export default (context) => {
<ide> describe('Client Navigation', () => {
<ide> describe('with <Link/>', () => {
<ide> it('should navigate the page', async () => {
<ide> export default (context, render) => {
<ide> browser = await webdriver(context.appPort, '/error-inside-browser-page')
<ide> await waitFor(3000)
<ide> const text = await getReactErrorOverlayContent(browser)
<del> expect(text).toMatch(/An Expected error occured/)
<add> expect(text).toMatch(/An Expected error occurred/)
<ide> expect(text).toMatch(/pages\/error-inside-browser-page\.js:5/)
<ide> } finally {
<ide> if (browser) {
<ide> export default (context, render) => {
<ide> browser = await webdriver(context.appPort, '/error-in-the-browser-global-scope')
<ide> await waitFor(3000)
<ide> const text = await getReactErrorOverlayContent(browser)
<del> expect(text).toMatch(/An Expected error occured/)
<add> expect(text).toMatch(/An Expected error occurred/)
<ide> expect(text).toMatch(/error-in-the-browser-global-scope\.js:2/)
<ide> } finally {
<ide> if (browser) {
<ide><path>test/integration/basic/test/error-recovery.js
<ide> import webdriver from 'next-webdriver'
<ide> import { join } from 'path'
<ide> import { check, File, waitFor, getReactErrorOverlayContent, getBrowserBodyText } from 'next-test-utils'
<ide>
<del>export default (context, render) => {
<add>export default (context) => {
<ide> describe('Error Recovery', () => {
<ide> it('should recover from 404 after a page has been added', async () => {
<ide> let browser
<ide><path>test/integration/basic/test/process-env.js
<ide> /* eslint-env jest */
<ide> import webdriver from 'next-webdriver'
<ide>
<del>export default (context, render) => {
<add>export default (context) => {
<ide> describe('process.env', () => {
<ide> it('should set process.env.NODE_ENV in development', async () => {
<ide> const browser = await webdriver(context.appPort, '/process-env')
<ide><path>test/integration/basic/test/rendering.js
<ide> import cheerio from 'cheerio'
<ide> import {BUILD_MANIFEST, REACT_LOADABLE_MANIFEST} from 'next-server/constants'
<ide> import { join } from 'path'
<ide>
<del>export default function ({ app }, suiteName, render, fetch, appPort) {
<add>export default function ({ app }, suiteName, render, fetch) {
<ide> async function get$ (path, query) {
<ide> const html = await render(path, query)
<ide> return cheerio.load(html)
<ide><path>test/integration/production/pages/error-in-browser-render-status-code.js
<ide> import React from 'react'
<ide> export default class ErrorInRenderPage extends React.Component {
<ide> render () {
<ide> if (typeof window !== 'undefined') {
<del> const error = new Error('An Expected error occured')
<add> const error = new Error('An Expected error occurred')
<ide> // This will be extracted by getInitialProps in the _error page,
<ide> // which will result in a different error message being rendered.
<ide> error.statusCode = 404
| 8
|
Python
|
Python
|
tweak a sentence about broadcasting
|
9de802ab28252b1d1480382db96a917e7f519cfa
|
<ide><path>numpy/doc/broadcasting.py
<ide> General Broadcasting Rules
<ide> ==========================
<ide> When operating on two arrays, NumPy compares their shapes element-wise.
<del>It starts with the trailing dimensions and works its way forward. Two
<del>dimensions are compatible when
<add>It starts with the trailing (i.e. rightmost) dimensions and works its
<add>way left. Two dimensions are compatible when
<ide>
<ide> 1) they are equal, or
<ide> 2) one of them is 1
| 1
|
Ruby
|
Ruby
|
fix typo on the add_index
|
611020b4dd16799cc836ab15d7a010e6ad48c1ad
|
<ide><path>activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
<ide> def rename_column(table_name, column_name, new_column_name)
<ide> # Note: SQLite doesn't support index length
<ide> #
<ide> # ====== Creating an index with a sort order (desc or asc, asc is the default)
<del> # add_index(:accounts, [:branch_id, :party_id, :surname], :order => {:branch_id => :desc, :part_id => :asc})
<add> # add_index(:accounts, [:branch_id, :party_id, :surname], :order => {:branch_id => :desc, :party_id => :asc})
<ide> # generates
<ide> # CREATE INDEX by_branch_desc_party ON accounts(branch_id DESC, party_id ASC, surname)
<ide> #
| 1
|
Ruby
|
Ruby
|
remove potential `variants` mutation in `decorate`
|
bcf49299ff3afcb08ba5e155cedeb62a8b79f1fa
|
<ide><path>actionview/lib/action_view/template/resolver.rb
<ide> def decorate(templates, path_info, details, locals)
<ide> cached = nil
<ide> templates.each do |t|
<ide> t.locals = locals
<del> t.variants = details[:variants] || [] if t.variants.empty?
<ide> t.virtual_path ||= (cached ||= build_path(*path_info))
<ide> end
<ide> end
| 1
|
PHP
|
PHP
|
fix docstriing link
|
781466cea4a4766acf8f84a0dca1a22543a2be9d
|
<ide><path>src/Console/ConsoleIo.php
<ide> public function ask($prompt, $default = null)
<ide> *
<ide> * @param int $mode The output mode.
<ide> * @return void
<del> * @see \Cake\Console\ConsoleOutput::outputAs()
<add> * @see \Cake\Console\ConsoleOutput::setOutputAs()
<ide> */
<ide> public function setOutputAs($mode)
<ide> {
| 1
|
Javascript
|
Javascript
|
add reactfibererrordialog from react + tests
|
a9cab21010146eea7f6e5e22c026bd297468bbca
|
<ide><path>Libraries/Core/ReactFiberErrorDialog.js
<add>/**
<add> * Copyright (c) Facebook, Inc. and its affiliates.
<add> *
<add> * This source code is licensed under the MIT license found in the
<add> * LICENSE file in the root directory of this source tree.
<add> *
<add> * @format
<add> * @flow strict-local
<add> */
<add>
<add>export type CapturedError = {
<add> +componentName: ?string,
<add> +componentStack: string,
<add> +error: mixed,
<add> +errorBoundary: ?{},
<add> +errorBoundaryFound: boolean,
<add> +errorBoundaryName: string | null,
<add> +willRetry: boolean,
<add>};
<add>
<add>import {handleException} from './ExceptionsManager';
<add>
<add>/**
<add> * Intercept lifecycle errors and ensure they are shown with the correct stack
<add> * trace within the native redbox component.
<add> */
<add>export function showErrorDialog(capturedError: CapturedError): boolean {
<add> const {componentStack, error} = capturedError;
<add>
<add> let errorToHandle: Error;
<add>
<add> // Typically Errors are thrown but eg strings or null can be thrown as well.
<add> if (error instanceof Error) {
<add> const {message, name} = error;
<add>
<add> const summary = message ? `${name}: ${message}` : name;
<add>
<add> errorToHandle = error;
<add>
<add> try {
<add> errorToHandle.message = `${summary}\n\nThis error is located at:${componentStack}`;
<add> } catch (e) {}
<add> } else if (typeof error === 'string') {
<add> errorToHandle = new Error(
<add> `${error}\n\nThis error is located at:${componentStack}`,
<add> );
<add> } else {
<add> errorToHandle = new Error(`Unspecified error at:${componentStack}`);
<add> }
<add>
<add> handleException(errorToHandle, false);
<add>
<add> // Return false here to prevent ReactFiberErrorLogger default behavior of
<add> // logging error details to console.error. Calls to console.error are
<add> // automatically routed to the native redbox controller, which we've already
<add> // done above by calling ExceptionsManager.
<add> return false;
<add>}
<ide><path>Libraries/Core/__tests__/ReactFiberErrorDialog-test.js
<add>/**
<add> * Copyright (c) Facebook, Inc. and its affiliates.
<add> *
<add> * This source code is licensed under the MIT license found in the
<add> * LICENSE file in the root directory of this source tree.
<add> *
<add> * @format
<add> * @emails oncall+react_native
<add> */
<add>'use strict';
<add>
<add>const capturedErrorDefaults = {
<add> componentName: 'A',
<add> componentStack: '\n in A\n in B\n in C',
<add> errorBoundary: null,
<add> errorBoundaryFound: false,
<add> errorBoundaryName: null,
<add> willRetry: false,
<add>};
<add>
<add>describe('ReactFiberErrorDialog', () => {
<add> let ReactFiberErrorDialog, ExceptionsManager;
<add> beforeEach(() => {
<add> jest.resetModules();
<add> jest.mock('../ExceptionsManager', () => {
<add> return {
<add> handleException: jest.fn(),
<add> };
<add> });
<add> ReactFiberErrorDialog = require('../ReactFiberErrorDialog');
<add> ExceptionsManager = require('../ExceptionsManager');
<add> });
<add>
<add> describe('showErrorDialog', () => {
<add> test('forwards error instance to handleException', () => {
<add> const error = new ReferenceError('Some error happened');
<add> error.someCustomProp = 42;
<add> // Copy all the data we care about before any possible mutation.
<add> const {name, stack, message, someCustomProp} = error;
<add>
<add> const logToConsole = ReactFiberErrorDialog.showErrorDialog({
<add> ...capturedErrorDefaults,
<add> error,
<add> });
<add>
<add> expect(ExceptionsManager.handleException.mock.calls.length).toBe(1);
<add> const errorArg = ExceptionsManager.handleException.mock.calls[0][0];
<add> const isFatalArg = ExceptionsManager.handleException.mock.calls[0][1];
<add> // We intentionally don't test whether errorArg === error, because this
<add> // implementation detail might change. Instead, we test that they are
<add> // functionally equivalent.
<add> expect(errorArg).toBeInstanceOf(ReferenceError);
<add> expect(errorArg).toHaveProperty('name', name);
<add> expect(errorArg).toHaveProperty('stack', stack);
<add> expect(errorArg).toHaveProperty('someCustomProp', someCustomProp);
<add> expect(errorArg).toHaveProperty(
<add> 'message',
<add> 'ReferenceError: ' +
<add> message +
<add> '\n\n' +
<add> 'This error is located at:' +
<add> capturedErrorDefaults.componentStack,
<add> );
<add> expect(isFatalArg).toBe(false);
<add> expect(logToConsole).toBe(false);
<add> });
<add>
<add> test('wraps string in an Error and sends to handleException', () => {
<add> const message = 'Some error happened';
<add>
<add> const logToConsole = ReactFiberErrorDialog.showErrorDialog({
<add> ...capturedErrorDefaults,
<add> error: message,
<add> });
<add>
<add> expect(ExceptionsManager.handleException.mock.calls.length).toBe(1);
<add> const errorArg = ExceptionsManager.handleException.mock.calls[0][0];
<add> const isFatalArg = ExceptionsManager.handleException.mock.calls[0][1];
<add> expect(errorArg).toBeInstanceOf(Error);
<add> expect(errorArg).toHaveProperty(
<add> 'message',
<add> message +
<add> '\n\n' +
<add> 'This error is located at:' +
<add> capturedErrorDefaults.componentStack,
<add> );
<add> expect(isFatalArg).toBe(false);
<add> expect(logToConsole).toBe(false);
<add> });
<add>
<add> test('reports "Unspecified error" if error is null', () => {
<add> const logToConsole = ReactFiberErrorDialog.showErrorDialog({
<add> ...capturedErrorDefaults,
<add> error: null,
<add> });
<add>
<add> expect(ExceptionsManager.handleException.mock.calls.length).toBe(1);
<add> const errorArg = ExceptionsManager.handleException.mock.calls[0][0];
<add> const isFatalArg = ExceptionsManager.handleException.mock.calls[0][1];
<add> expect(errorArg).toBeInstanceOf(Error);
<add> expect(errorArg).toHaveProperty(
<add> 'message',
<add> 'Unspecified error at:' + capturedErrorDefaults.componentStack,
<add> );
<add> expect(isFatalArg).toBe(false);
<add> expect(logToConsole).toBe(false);
<add> });
<add> });
<add>});
<ide><path>Libraries/ReactPrivate/ReactNativePrivateInterface.js
<ide> module.exports = {
<ide> get flattenStyle() {
<ide> return require('../StyleSheet/flattenStyle');
<ide> },
<add> get ReactFiberErrorDialog() {
<add> return require('../Core/ReactFiberErrorDialog');
<add> },
<ide> };
| 3
|
Python
|
Python
|
add some options on windows
|
3bb1ced681fbff6b0282a1bbbea4c164399beed1
|
<ide><path>glances/main.py
<ide> def init_args(self):
<ide> dest='webserver', help='run Glances in web server mode (bottle needed)')
<ide> parser.add_argument('--cached-time', default=self.cached_time, type=int,
<ide> dest='cached_time', help='set the server cache time [default: {} sec]'.format(self.cached_time))
<del> parser.add_argument('--open-web-browser', action='store_true', default=False,
<del> dest='open_web_browser', help='try to open the Web UI in the default Web browser')
<add> if not WINDOWS:
<add> parser.add_argument('--open-web-browser', action='store_true', default=False,
<add> dest='open_web_browser', help='try to open the Web UI in the default Web browser')
<ide> # Display options
<del> parser.add_argument('-q', '--quiet', default=False, action='store_true',
<del> dest='quiet', help='do not display the curses interface')
<add> if not WINDOWS:
<add> parser.add_argument('-q', '--quiet', default=False, action='store_true',
<add> dest='quiet', help='do not display the curses interface')
<ide> parser.add_argument('-f', '--process-filter', default=None, type=str,
<ide> dest='process_filter', help='set the process filter pattern (regular expression)')
<ide> parser.add_argument('--process-short-name', action='store_true', default=False,
| 1
|
PHP
|
PHP
|
add type mapping for binary uuids
|
3324bcb842d1ebfb7a02c0ec9ccd73bedfa34a26
|
<ide><path>src/Database/Type.php
<ide> class Type implements TypeInterface
<ide> 'integer' => 'Cake\Database\Type\IntegerType',
<ide> 'biginteger' => 'Cake\Database\Type\IntegerType',
<ide> 'binary' => 'Cake\Database\Type\BinaryType',
<add> 'binaryuuid' => 'Cake\Database\Type\BinaryUuidType',
<ide> 'boolean' => 'Cake\Database\Type\BoolType',
<ide> 'date' => 'Cake\Database\Type\DateType',
<ide> 'datetime' => 'Cake\Database\Type\DateTimeType',
| 1
|
Ruby
|
Ruby
|
allow dashes in tap-formula names
|
fe9e2be8fe059d8dd67274f19fecc3fdca132054
|
<ide><path>Library/Homebrew/exceptions.rb
<ide> def dependent_s
<ide> end
<ide>
<ide> def to_s
<del> if name =~ %r{(\w+)/(\w+)/(\w+)} then <<-EOS.undent
<add> if name =~ %r{(\w+)/(\w+)/([^/]+)} then <<-EOS.undent
<ide> No available formula for #$3 #{dependent_s}
<ide> Please tap it and then try again: brew tap #$1/#$2
<ide> EOS
| 1
|
PHP
|
PHP
|
add test to show the handle of special formats
|
647d226715f9600e0e61f36f33e1465eff535524
|
<ide><path>tests/Database/DatabaseEloquentIntegrationTest.php
<ide> public function testTimestampsUsingOldSqlServerDateFormatFallbackToDefaultParsin
<ide> $this->assertFalse(Date::hasFormat('2017-11-14 08:23:19.734', $model->getDateFormat()));
<ide> }
<ide>
<add> public function testSpecialFormats()
<add> {
<add> $model = new EloquentTestUser;
<add> $model->setDateFormat('!Y-d-m \\Y');
<add> $model->setRawAttributes([
<add> 'updated_at' => '2017-05-11 Y',
<add> ]);
<add>
<add> $date = $model->getAttribute('updated_at');
<add> $this->assertSame('2017-11-05 00:00:00.000000', $date->format('Y-m-d H:i:s.u'), 'the date should respect the whole format');
<add>
<add> $model->setDateFormat('Y d m|');
<add> $model->setRawAttributes([
<add> 'updated_at' => '2020 11 09',
<add> ]);
<add>
<add> $date = $model->getAttribute('updated_at');
<add> $this->assertSame('2020-09-11 00:00:00.000000', $date->format('Y-m-d H:i:s.u'), 'the date should respect the whole format');
<add>
<add> $model->setDateFormat('Y d m|*');
<add> $model->setRawAttributes([
<add> 'updated_at' => '2020 11 09 foo',
<add> ]);
<add>
<add> $date = $model->getAttribute('updated_at');
<add> $this->assertSame('2020-09-11 00:00:00.000000', $date->format('Y-m-d H:i:s.u'), 'the date should respect the whole format');
<add> }
<add>
<ide> public function testUpdatingChildModelTouchesParent()
<ide> {
<ide> $before = Carbon::now();
| 1
|
Text
|
Text
|
add project tests
|
531c3f9997a4e91ad27a0c64a7676a5d14f60ece
|
<ide><path>curriculum/challenges/english/09-information-security/information-security-projects/secure-real-time-multiplayer-game.md
<ide> Develop a 2D real time multiplayer game using the HTML Canvas API and [Socket.io
<ide>
<ide> When you are done, make sure a working demo of your project is hosted somewhere public. Then submit the URL to it in the `Solution Link` field. Optionally, also submit a link to your project's source code in the `GitHub Link` field.
<ide>
<add># --instructions--
<add>
<add>**Note**: `helmet@^3.21.3` is needed for the user stories. This means you will need to use the previous version of Helmet's docs, for information on how to achieve the user stories.
<add>
<ide> # --hints--
<ide>
<ide> You can provide your own project, not the example URL.
<ide> Players can disconnect from the game at any time.
<ide> Prevent the client from trying to guess / sniff the MIME type.
<ide>
<ide> ```js
<del>
<add>async (getUserInput) => {
<add> const data = await fetch(getUserInput('url') + '/_api/app-info');
<add> const parsed = await data.json();
<add> assert.equal(parsed.headers['x-content-type-options'], 'nosniff');
<add>};
<ide> ```
<ide>
<ide> Prevent cross-site scripting (XSS) attacks.
<ide>
<ide> ```js
<del>
<add>async (getUserInput) => {
<add> const data = await fetch(getUserInput('url') + '/_api/app-info');
<add> const parsed = await data.json();
<add> assert.equal(parsed.headers['x-xss-protection'], '1; mode=block');
<add>};
<ide> ```
<ide>
<ide> Nothing from the website is cached in the client.
<ide>
<ide> ```js
<del>
<add>async (getUserInput) => {
<add> const data = await fetch(getUserInput('url') + '/_api/app-info');
<add> const parsed = await data.json();
<add> assert.equal(parsed.headers['surrogate-control'], 'no-store');
<add> assert.equal(
<add> parsed.headers['cache-control'],
<add> 'no-store, no-cache, must-revalidate, proxy-revalidate'
<add> );
<add> assert.equal(parsed.headers['pragma'], 'no-cache');
<add> assert.equal(parsed.headers['expires'], '0');
<add>};
<ide> ```
<ide>
<ide> The headers say that the site is powered by "PHP 7.4.3" even though it isn't (as a security measure).
<ide>
<ide> ```js
<del>
<add>async (getUserInput) => {
<add> const data = await fetch(getUserInput('url') + '/_api/app-info');
<add> const parsed = await data.json();
<add> assert.equal(parsed.headers['x-powered-by'], 'PHP 7.4.3');
<add>};
<ide> ```
<ide>
<ide> # --solutions--
| 1
|
Ruby
|
Ruby
|
add incineration by default
|
3c8fc4e9ae53af9739a0e84b1141a79e03552d4a
|
<ide><path>app/jobs/action_mailroom/inbound_email/incineration_job.rb
<add>class ActionMailroom::InboundEmail::IncinerationJob < ApplicationJob
<add> queue_as :action_mailroom_incineration
<add>
<add> def self.schedule(inbound_email)
<add> set(wait: ActionMailroom::InboundEmail::Incineratable::INCINERATABLE_AFTER).perform_later(inbound_email)
<add> end
<add>
<add> def perform(inbound_email)
<add> inbound_email.incinerate
<add> end
<add>end
<ide><path>app/models/action_mailroom/inbound_email.rb
<ide> require "mail"
<ide>
<ide> class ActionMailroom::InboundEmail < ActiveRecord::Base
<add> include Incineratable
<add>
<ide> self.table_name = "action_mailroom_inbound_emails"
<ide>
<ide> has_one_attached :raw_email
<ide><path>app/models/action_mailroom/inbound_email/incineratable.rb
<add>module ActionMailroom::InboundEmail::Incineratable
<add> extend ActiveSupport::Concern
<add>
<add> # TODO: Extract into framework configuration
<add> INCINERATABLE_AFTER = 30.days
<add>
<add> included do
<add> before_update :remember_to_incinerate_later
<add> after_update_commit :incinerate_later, if: :need_to_incinerate_later?
<add> end
<add>
<add> def incinerate
<add> Incineration.new(self).run
<add> end
<add>
<add> private
<add> # TODO: Use enum change tracking once merged into Active Support
<add> def remember_to_incinerate_later
<add> if status_changed? && (delivered? || failed?)
<add> @incinerate_later = true
<add> end
<add> end
<add>
<add> def need_to_incinerate_later?
<add> @incinerate_later
<add> end
<add>
<add> def incinerate_later
<add> ActionMailroom::InboundEmail::IncinerationJob.schedule(self)
<add> end
<add>end
<ide><path>app/models/action_mailroom/inbound_email/incineratable/incineration.rb
<add>class ActionMailroom::InboundEmail::Incineratable::Incineration
<add> def initialize(inbound_email)
<add> @inbound_email = inbound_email
<add> end
<add>
<add> def run
<add> @inbound_email.destroy if due? && processed?
<add> end
<add>
<add> private
<add> def due?
<add> @inbound_email.updated_at < ActionMailroom::InboundEmail::Incineratable::INCINERATABLE_AFTER.ago.end_of_day
<add> end
<add>
<add> def processed?
<add> @inbound_email.delivered? || @inbound_email.failed?
<add> end
<add>end
<ide><path>test/unit/inbound_email/incineration_test.rb
<add>require_relative '../../test_helper'
<add>
<add>class ActionMailroom::InboundEmail::IncinerationTest < ActiveSupport::TestCase
<add> include ActiveJob::TestHelper
<add>
<add> test "incinerate emails 30 days after they have been processed" do
<add> freeze_time
<add>
<add> assert_enqueued_with job: ActionMailroom::InboundEmail::IncinerationJob, at: 30.days.from_now do
<add> inbound_email = create_inbound_email("welcome.eml")
<add> inbound_email.delivered!
<add> end
<add> end
<add>end
| 5
|
Text
|
Text
|
fix syntax errors
|
fb45ef36795ff763646daf6c39ac46caa7e44f8b
|
<ide><path>docs/docs/10.1-animation.md
<ide> It is also possible to use custom class names for each of the steps in your tran
<ide> ```javascript
<ide> ...
<ide> <ReactCSSTransitionGroup
<del> transitionName={
<add> transitionName={{
<ide> enter: 'enter',
<ide> enterActive: 'enterActive',
<ide> leave: 'leave',
<ide> leaveActive: 'leaveActive',
<ide> appear: 'appear',
<ide> appearActive: 'appearActive'
<del> }>
<add> }}>
<ide> {item}
<ide> </ReactCSSTransitionGroup>
<ide>
<ide> <ReactCSSTransitionGroup
<del> transitionName={
<add> transitionName={{
<ide> enter: 'enter',
<ide> leave: 'leave',
<ide> appear: 'appear'
<del> }>
<add> }}>
<ide> {item2}
<ide> </ReactCSSTransitionGroup>
<ide> ...
| 1
|
Go
|
Go
|
remove unused pluginregistryservice
|
61599d0a4d61262e00142b9bc4d555350d70f7a5
|
<ide><path>plugin/manager.go
<ide> import (
<ide>
<ide> "github.com/containerd/containerd/content"
<ide> "github.com/containerd/containerd/content/local"
<del> "github.com/docker/distribution/reference"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/pkg/authorization"
<ide> "github.com/docker/docker/pkg/containerfs"
<ide> type controller struct {
<ide> timeoutInSecs int
<ide> }
<ide>
<del>// pluginRegistryService ensures that all resolved repositories
<del>// are of the plugin class.
<del>type pluginRegistryService struct {
<del> registry.Service
<del>}
<del>
<del>func (s pluginRegistryService) ResolveRepository(name reference.Named) (repoInfo *registry.RepositoryInfo, err error) {
<del> repoInfo, err = s.Service.ResolveRepository(name)
<del> if repoInfo != nil {
<del> repoInfo.Class = "plugin"
<del> }
<del> return
<del>}
<del>
<ide> // NewManager returns a new plugin manager.
<ide> func NewManager(config ManagerConfig) (*Manager, error) {
<del> if config.RegistryService != nil {
<del> config.RegistryService = pluginRegistryService{config.RegistryService}
<del> }
<ide> manager := &Manager{
<ide> config: config,
<ide> }
| 1
|
Javascript
|
Javascript
|
avoid duplicate attachtodom call
|
218eb57f3a2ada94d6691d2c60afc56556d7719a
|
<ide><path>spec/text-editor-element-spec.js
<ide> describe('TextEditorElement', () => {
<ide> spyOn(window, 'requestAnimationFrame').andCallFake(fn => fn())
<ide>
<ide> const element = buildTextEditorElement()
<del> jasmine.attachToDOM(element)
<ide>
<ide> expect(element.isUpdatedSynchronously()).toBe(false)
<ide>
| 1
|
Python
|
Python
|
test strict like=
|
15483daf1d047bb9929bf54a3b826f49d4555725
|
<ide><path>numpy/core/tests/test_overrides.py
<ide> def test_array_like(self, function, args, kwargs, numpy_ref):
<ide> assert array_like.function is my_func
<ide>
<ide> @pytest.mark.parametrize('function, args, kwargs', _array_tests)
<del> @pytest.mark.parametrize('numpy_ref', [True, False])
<add> @pytest.mark.parametrize('ref', [1, [1], MyNoArrayFunctionArray])
<ide> @requires_array_function
<del> def test_no_array_function_like(self, function, args, kwargs, numpy_ref):
<add> def test_no_array_function_like(self, function, args, kwargs, ref):
<ide> TestArrayLike.add_method('array', TestArrayLike.MyNoArrayFunctionArray)
<ide> TestArrayLike.add_method(function, TestArrayLike.MyNoArrayFunctionArray)
<ide> np_func = getattr(np, function)
<del> my_func = getattr(TestArrayLike.MyNoArrayFunctionArray, function)
<ide>
<del> if numpy_ref is True:
<del> ref = np.array(1)
<del> else:
<del> ref = TestArrayLike.MyNoArrayFunctionArray.array()
<add> # Instantiate ref if it's the MyNoArrayFunctionArray class
<add> if ref is TestArrayLike.MyNoArrayFunctionArray:
<add> ref = ref.array()
<ide>
<ide> like_args = tuple(a() if callable(a) else a for a in args)
<del> array_like = np_func(*like_args, **kwargs, like=ref)
<del>
<del> assert type(array_like) is np.ndarray
<del> if numpy_ref is True:
<del> np_args = tuple(a() if callable(a) else a for a in args)
<del> np_arr = np_func(*np_args, **kwargs)
<del>
<del> # Special-case np.empty to ensure values match
<del> if function == "empty":
<del> np_arr.fill(1)
<del> array_like.fill(1)
<ide>
<del> assert_equal(array_like, np_arr)
<add> with assert_raises(ValueError):
<add> np_func(*like_args, **kwargs, like=ref)
<ide>
<ide> @pytest.mark.parametrize('numpy_ref', [True, False])
<ide> def test_array_like_fromfile(self, numpy_ref):
| 1
|
Javascript
|
Javascript
|
allow tabs in input"
|
d9e250295bca90438d87a6f7bb85186ad75d2ba0
|
<ide><path>lib/readline.js
<ide> function Interface(input, output, completer, terminal) {
<ide> }
<ide> historySize = historySize || kHistorySize;
<ide>
<del> if (completer && typeof completer !== 'function') {
<add> completer = completer || function() { return []; };
<add>
<add> if (typeof completer !== 'function') {
<ide> throw new TypeError('Argument \'completer\' must be a function');
<ide> }
<ide>
<ide> function Interface(input, output, completer, terminal) {
<ide> this.historySize = historySize;
<ide>
<ide> // Check arity, 2 - for async, 1 for sync
<del> if (typeof completer === 'function') {
<del> this.completer = completer.length === 2 ? completer : function(v, cb) {
<del> cb(null, completer(v));
<del> };
<del> }
<add> this.completer = completer.length === 2 ? completer : function(v, callback) {
<add> callback(null, completer(v));
<add> };
<ide>
<ide> this.setPrompt('> ');
<ide>
<ide> Interface.prototype._normalWrite = function(b) {
<ide> };
<ide>
<ide> Interface.prototype._insertString = function(c) {
<add> //BUG: Problem when adding tabs with following content.
<add> // Perhaps the bug is in _refreshLine(). Not sure.
<add> // A hack would be to insert spaces instead of literal '\t'.
<ide> if (this.cursor < this.line.length) {
<ide> var beg = this.line.slice(0, this.cursor);
<ide> var end = this.line.slice(this.cursor, this.line.length);
<ide> Interface.prototype._ttyWrite = function(s, key) {
<ide> this._deleteRight();
<ide> break;
<ide>
<add> case 'tab': // tab completion
<add> this._tabComplete();
<add> break;
<add>
<ide> case 'left':
<ide> this._moveCursor(-1);
<ide> break;
<ide> Interface.prototype._ttyWrite = function(s, key) {
<ide> this._historyNext();
<ide> break;
<ide>
<del> case 'tab':
<del> // If tab completion enabled, do that...
<del> if (typeof this.completer === 'function') {
<del> this._tabComplete();
<del> break;
<del> }
<del> // falls through
<del>
<ide> default:
<ide> if (s instanceof Buffer)
<ide> s = s.toString('utf-8');
<ide><path>test/parallel/test-readline-interface.js
<ide> function isWarned(emitter) {
<ide> assert.equal(callCount, expectedLines.length);
<ide> rli.close();
<ide>
<del> // \t when there is no completer function should behave like an ordinary
<del> // character
<del> fi = new FakeInput();
<del> rli = new readline.Interface({ input: fi, output: fi, terminal: true });
<del> called = false;
<del> rli.on('line', function(line) {
<del> assert.equal(line, '\t');
<del> assert.strictEqual(called, false);
<del> called = true;
<del> });
<del> fi.emit('data', '\t');
<del> fi.emit('data', '\n');
<del> assert.ok(called);
<del> rli.close();
<del>
<del> // \t does not become part of the input when there is a completer function
<del> fi = new FakeInput();
<del> var completer = function(line) {
<del> return [[], line];
<del> };
<del> rli = new readline.Interface({
<del> input: fi,
<del> output: fi,
<del> terminal: true,
<del> completer: completer
<del> });
<del> called = false;
<del> rli.on('line', function(line) {
<del> assert.equal(line, 'foo');
<del> assert.strictEqual(called, false);
<del> called = true;
<del> });
<del> fi.emit('data', '\tfo\to\t');
<del> fi.emit('data', '\n');
<del> assert.ok(called);
<del> rli.close();
<del>
<del> // constructor throws if completer is not a function or undefined
<del> fi = new FakeInput();
<del> assert.throws(function() {
<del> readline.createInterface({
<del> input: fi,
<del> completer: 'string is not valid'
<del> });
<del> }, function(err) {
<del> if (err instanceof TypeError) {
<del> if (/Argument \'completer\' must be a function/.test(err)) {
<del> return true;
<del> }
<del> }
<del> return false;
<del> });
<del>
<ide> // sending a multi-byte utf8 char over multiple writes
<ide> var buf = Buffer('☮', 'utf8');
<ide> fi = new FakeInput();
| 2
|
Ruby
|
Ruby
|
fix fraction float assertions in time_ext_test
|
4484db409446016b89eb1144d96db4f553f296d5
|
<ide><path>activesupport/test/core_ext/time_ext_test.rb
<ide> def test_sec_fraction
<ide> assert_equal Rational(1, 1_000_000_000), time.sec_fraction
<ide>
<ide> time = Time.utc(2016, 4, 23, 0, 0, 0.000_000_001)
<del> assert_equal Rational(1, 1_000_000_000), time.sec_fraction
<add> assert_equal 0.000_000_001.to_r, time.sec_fraction
<ide>
<ide> time = Time.utc(2016, 4, 23, 0, 0, 0, Rational(1, 1_000))
<ide> assert_equal Rational(1, 1_000_000_000), time.sec_fraction
<ide>
<ide> time = Time.utc(2016, 4, 23, 0, 0, 0, 0.001)
<del> assert_equal Rational(1, 1_000_000_000), time.sec_fraction
<add> assert_equal 0.001.to_r / 1000000, time.sec_fraction
<ide> end
<ide>
<ide> def test_beginning_of_day
| 1
|
Text
|
Text
|
clarify more optional parameters in node-api
|
0b4d06657b1ad91617ab8516d148e6a2a953b358
|
<ide><path>doc/api/n-api.md
<ide> napi_status napi_create_arraybuffer(napi_env env,
<ide> * `[in] env`: The environment that the API is invoked under.
<ide> * `[in] length`: The length in bytes of the array buffer to create.
<ide> * `[out] data`: Pointer to the underlying byte buffer of the `ArrayBuffer`.
<add> `data` can optionally be ignored by passing `NULL`.
<ide> * `[out] result`: A `napi_value` representing a JavaScript `ArrayBuffer`.
<ide>
<ide> Returns `napi_ok` if the API succeeded.
<ide> napi_status napi_create_buffer(napi_env env,
<ide> * `[in] env`: The environment that the API is invoked under.
<ide> * `[in] size`: Size in bytes of the underlying buffer.
<ide> * `[out] data`: Raw pointer to the underlying buffer.
<add> `data` can optionally be ignored by passing `NULL`.
<ide> * `[out] result`: A `napi_value` representing a `node::Buffer`.
<ide>
<ide> Returns `napi_ok` if the API succeeded.
<ide> napi_status napi_create_buffer_copy(napi_env env,
<ide> of the new buffer).
<ide> * `[in] data`: Raw pointer to the underlying buffer to copy from.
<ide> * `[out] result_data`: Pointer to the new `Buffer`'s underlying data buffer.
<add> `result_data` can optionally be ignored by passing `NULL`.
<ide> * `[out] result`: A `napi_value` representing a `node::Buffer`.
<ide>
<ide> Returns `napi_ok` if the API succeeded.
<ide> napi_status napi_create_function(napi_env env,
<ide> ```
<ide>
<ide> * `[in] env`: The environment that the API is invoked under.
<del>* `[in] utf8Name`: The name of the function encoded as UTF8. This is visible
<del> within JavaScript as the new function object's `name` property.
<add>* `[in] utf8Name`: Optional name of the function encoded as UTF8. This is
<add> visible within JavaScript as the new function object's `name` property.
<ide> * `[in] length`: The length of the `utf8name` in bytes, or `NAPI_AUTO_LENGTH` if
<ide> it is null-terminated.
<ide> * `[in] cb`: The native function which should be called when this function
| 1
|
Go
|
Go
|
introduce a `cli` package for test-integration
|
50c4475df6304e0cf12ea95217eb00ab5d572e34
|
<ide><path>integration-cli/check_test.go
<ide> import (
<ide>
<ide> "github.com/docker/docker/api/types/swarm"
<ide> cliconfig "github.com/docker/docker/cli/config"
<add> "github.com/docker/docker/integration-cli/cli"
<ide> "github.com/docker/docker/integration-cli/daemon"
<ide> "github.com/docker/docker/integration-cli/environment"
<ide> "github.com/docker/docker/integration-cli/registry"
<ide> func init() {
<ide> }
<ide>
<ide> func TestMain(m *testing.M) {
<del> var err error
<del> if dockerBin := os.Getenv("DOCKER_BINARY"); dockerBin != "" {
<del> dockerBinary = dockerBin
<del> }
<del> dockerBinary, err = exec.LookPath(dockerBinary)
<del> if err != nil {
<del> fmt.Printf("ERROR: couldn't resolve full path to the Docker binary (%v)\n", err)
<del> os.Exit(1)
<del> }
<add> dockerBinary = testEnv.DockerBinary()
<ide>
<ide> if testEnv.LocalDaemon() {
<ide> fmt.Println("INFO: Testing against a local daemon")
<ide> func TestMain(m *testing.M) {
<ide> }
<ide>
<ide> func Test(t *testing.T) {
<add> cli.EnsureTestEnvIsLoaded(t)
<ide> cmd := exec.Command(dockerBinary, "images", "-f", "dangling=false", "--format", "{{.Repository}}:{{.Tag}}")
<ide> cmd.Env = appendBaseEnv(true)
<ide> out, err := cmd.CombinedOutput()
<ide><path>integration-cli/cli/build/build.go
<add>package build
<add>
<add>import (
<add> "strings"
<add>
<add> icmd "github.com/docker/docker/pkg/testutil/cmd"
<add>)
<add>
<add>// WithDockerfile creates / returns a CmdOperator to set the Dockerfile for a build operation
<add>func WithDockerfile(dockerfile string) func(*icmd.Cmd) func() {
<add> return func(cmd *icmd.Cmd) func() {
<add> cmd.Command = append(cmd.Command, "-")
<add> cmd.Stdin = strings.NewReader(dockerfile)
<add> return nil
<add> }
<add>}
<add>
<add>// WithoutCache makes the build ignore cache
<add>func WithoutCache(cmd *icmd.Cmd) func() {
<add> cmd.Command = append(cmd.Command, "--no-cache")
<add> return nil
<add>}
<add>
<add>// WithContextPath set the build context path
<add>func WithContextPath(path string) func(*icmd.Cmd) func() {
<add> // WithContextPath sets the build context path
<add> return func(cmd *icmd.Cmd) func() {
<add> cmd.Command = append(cmd.Command, path)
<add> return nil
<add> }
<add>}
<ide><path>integration-cli/cli/cli.go
<add>package cli
<add>
<add>import (
<add> "fmt"
<add> "sync"
<add> "time"
<add>
<add> "github.com/docker/docker/integration-cli/daemon"
<add> "github.com/docker/docker/integration-cli/environment"
<add> icmd "github.com/docker/docker/pkg/testutil/cmd"
<add>)
<add>
<add>var (
<add> testEnv *environment.Execution
<add> onlyOnce sync.Once
<add>)
<add>
<add>// EnsureTestEnvIsLoaded make sure the test environment is loaded for this package
<add>func EnsureTestEnvIsLoaded(t testingT) {
<add> var doIt bool
<add> var err error
<add> onlyOnce.Do(func() {
<add> doIt = true
<add> })
<add>
<add> if !doIt {
<add> return
<add> }
<add> testEnv, err = environment.New()
<add> if err != nil {
<add> t.Fatalf("error loading testenv : %v", err)
<add> }
<add>}
<add>
<add>// CmdOperator defines functions that can modify a command
<add>type CmdOperator func(*icmd.Cmd) func()
<add>
<add>type testingT interface {
<add> Fatalf(string, ...interface{})
<add>}
<add>
<add>// DockerCmd executes the specified docker command and expect a success
<add>func DockerCmd(t testingT, command string, args ...string) *icmd.Result {
<add> return Docker(Cmd(command, args...)).Assert(t, icmd.Success)
<add>}
<add>
<add>// BuildCmd executes the specified docker build command and expect a success
<add>func BuildCmd(t testingT, name string, cmdOperators ...CmdOperator) *icmd.Result {
<add> return Docker(Build(name), cmdOperators...).Assert(t, icmd.Success)
<add>}
<add>
<add>// InspectCmd executes the specified docker inspect command and expect a success
<add>func InspectCmd(t testingT, name string, cmdOperators ...CmdOperator) *icmd.Result {
<add> return Docker(Inspect(name), cmdOperators...).Assert(t, icmd.Success)
<add>}
<add>
<add>// Docker executes the specified docker command
<add>func Docker(cmd icmd.Cmd, cmdOperators ...CmdOperator) *icmd.Result {
<add> for _, op := range cmdOperators {
<add> deferFn := op(&cmd)
<add> if deferFn != nil {
<add> defer deferFn()
<add> }
<add> }
<add> appendDocker(&cmd)
<add> return icmd.RunCmd(cmd)
<add>}
<add>
<add>// Build executes the specified docker build command
<add>func Build(name string) icmd.Cmd {
<add> return icmd.Command("build", "-t", name)
<add>}
<add>
<add>// Inspect executes the specified docker inspect command
<add>func Inspect(name string) icmd.Cmd {
<add> return icmd.Command("inspect", name)
<add>}
<add>
<add>// Format sets the specified format with --format flag
<add>func Format(format string) func(*icmd.Cmd) func() {
<add> return func(cmd *icmd.Cmd) func() {
<add> cmd.Command = append(
<add> []string{cmd.Command[0]},
<add> append([]string{"--format", fmt.Sprintf("{{%s}}", format)}, cmd.Command[1:]...)...,
<add> )
<add> return nil
<add> }
<add>}
<add>
<add>func appendDocker(cmd *icmd.Cmd) {
<add> cmd.Command = append([]string{testEnv.DockerBinary()}, cmd.Command...)
<add>}
<add>
<add>// Cmd build an icmd.Cmd struct from the specified command and arguments
<add>func Cmd(command string, args ...string) icmd.Cmd {
<add> return icmd.Command(command, args...)
<add>}
<add>
<add>// Daemon points to the specified daemon
<add>func Daemon(d *daemon.Daemon) func(*icmd.Cmd) func() {
<add> return func(cmd *icmd.Cmd) func() {
<add> cmd.Command = append([]string{"--host", d.Sock()}, cmd.Command...)
<add> return nil
<add> }
<add>}
<add>
<add>// WithTimeout sets the timeout for the command to run
<add>func WithTimeout(timeout time.Duration) func(cmd *icmd.Cmd) func() {
<add> return func(cmd *icmd.Cmd) func() {
<add> cmd.Timeout = timeout
<add> return nil
<add> }
<add>}
<add>
<add>// WithEnvironmentVariables sets the specified environment variables for the command to run
<add>func WithEnvironmentVariables(envs ...string) func(cmd *icmd.Cmd) func() {
<add> return func(cmd *icmd.Cmd) func() {
<add> cmd.Env = envs
<add> return nil
<add> }
<add>}
<add>
<add>// WithFlags sets the specified flags for the command to run
<add>func WithFlags(flags ...string) func(*icmd.Cmd) func() {
<add> return func(cmd *icmd.Cmd) func() {
<add> cmd.Command = append(cmd.Command, flags...)
<add> return nil
<add> }
<add>}
<ide><path>integration-cli/docker_api_containers_test.go
<ide> import (
<ide> mounttypes "github.com/docker/docker/api/types/mount"
<ide> networktypes "github.com/docker/docker/api/types/network"
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/integration-cli/request"
<ide> "github.com/docker/docker/pkg/ioutils"
<ide> "github.com/docker/docker/pkg/mount"
<ide> func (s *DockerSuite) TestContainersAPICreateMountsCreate(c *check.C) {
<ide> )
<ide> if testEnv.DaemonPlatform() != "windows" {
<ide> testImg = "test-mount-config"
<del> buildImageSuccessfully(c, testImg, withDockerfile(`
<add> buildImageSuccessfully(c, testImg, build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN mkdir `+destPath+` && touch `+destPath+slash+`bar
<ide> CMD cat `+destPath+slash+`bar
<ide><path>integration-cli/docker_api_images_test.go
<ide> import (
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/api/types/image"
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/integration-cli/request"
<ide> "github.com/go-check/check"
<ide> )
<ide> func (s *DockerSuite) TestAPIImagesSaveAndLoad(c *check.C) {
<ide> // TODO Windows to Windows CI: Investigate further why this test fails.
<ide> testRequires(c, Network)
<ide> testRequires(c, DaemonIsLinux)
<del> buildImageSuccessfully(c, "saveandload", withDockerfile("FROM busybox\nENV FOO bar"))
<add> buildImageSuccessfully(c, "saveandload", build.WithDockerfile("FROM busybox\nENV FOO bar"))
<ide> id := getIDByName(c, "saveandload")
<ide>
<ide> res, body, err := request.Get("/images/" + id + "/get")
<ide> func (s *DockerSuite) TestAPIImagesSaveAndLoad(c *check.C) {
<ide> defer loadBody.Close()
<ide> c.Assert(res.StatusCode, checker.Equals, http.StatusOK)
<ide>
<del> inspectOut := inspectField(c, id, "Id")
<add> inspectOut := cli.InspectCmd(c, id, cli.Format(".Id")).Combined()
<ide> c.Assert(strings.TrimSpace(string(inspectOut)), checker.Equals, id, check.Commentf("load did not work properly"))
<ide> }
<ide>
<ide> func (s *DockerSuite) TestAPIImagesDelete(c *check.C) {
<ide> testRequires(c, Network)
<ide> }
<ide> name := "test-api-images-delete"
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nENV FOO bar"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nENV FOO bar"))
<ide> id := getIDByName(c, name)
<ide>
<ide> dockerCmd(c, "tag", name, "test:tag1")
<ide> func (s *DockerSuite) TestAPIImagesHistory(c *check.C) {
<ide> testRequires(c, Network)
<ide> }
<ide> name := "test-api-images-history"
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nENV FOO bar"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nENV FOO bar"))
<ide> id := getIDByName(c, name)
<ide>
<ide> status, body, err := request.SockRequest("GET", "/images/"+id+"/history", nil, daemonHost())
<ide><path>integration-cli/docker_cli_build_test.go
<ide> import (
<ide>
<ide> "github.com/docker/docker/builder/dockerfile/command"
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/archive"
<ide> "github.com/docker/docker/pkg/stringutils"
<ide> "github.com/docker/docker/pkg/testutil"
<ide> import (
<ide> )
<ide>
<ide> func (s *DockerSuite) TestBuildJSONEmptyRun(c *check.C) {
<del> buildImageSuccessfully(c, "testbuildjsonemptyrun", withDockerfile(`
<add> cli.BuildCmd(c, "testbuildjsonemptyrun", build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN []
<ide> `))
<ide> func (s *DockerSuite) TestBuildShCmdJSONEntrypoint(c *check.C) {
<ide> expected = "cmd /S /C echo test"
<ide> }
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM busybox
<ide> ENTRYPOINT ["echo"]
<ide> CMD echo test
<ide> func (s *DockerSuite) TestBuildEnvironmentReplacementUser(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildenvironmentreplacement"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM scratch
<ide> ENV user foo
<ide> USER ${user}
<ide> func (s *DockerSuite) TestBuildEnvironmentReplacementVolume(c *check.C) {
<ide> volumePath = "/quux"
<ide> }
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM `+minimalBaseImage()+`
<ide> ENV volume `+volumePath+`
<ide> VOLUME ${volume}
<ide> func (s *DockerSuite) TestBuildEnvironmentReplacementExpose(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildenvironmentreplacement"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM scratch
<ide> ENV port 80
<ide> EXPOSE ${port}
<ide> func (s *DockerSuite) TestBuildEnvironmentReplacementExpose(c *check.C) {
<ide> func (s *DockerSuite) TestBuildEnvironmentReplacementWorkdir(c *check.C) {
<ide> name := "testbuildenvironmentreplacement"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM busybox
<ide> ENV MYWORKDIR /work
<ide> RUN mkdir ${MYWORKDIR}
<ide> func (s *DockerSuite) TestBuildEnvironmentReplacementEnv(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildenvironmentreplacement"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM busybox
<ide> ENV foo zzz
<ide> ENV bar ${foo}
<ide> func (s *DockerSuite) TestBuildHandleEscapesInVolume(c *check.C) {
<ide> }
<ide>
<ide> for _, tc := range testCases {
<del> buildImageSuccessfully(c, name, withDockerfile(fmt.Sprintf(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(fmt.Sprintf(`
<ide> FROM scratch
<ide> ENV FOO bar
<ide> VOLUME %s
<ide> func (s *DockerSuite) TestBuildOnBuildLowercase(c *check.C) {
<ide> name := "testbuildonbuildlowercase"
<ide> name2 := "testbuildonbuildlowercase2"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM busybox
<ide> onbuild run echo quux
<ide> `))
<ide>
<del> result := buildImage(name2, withDockerfile(fmt.Sprintf(`
<add> result := buildImage(name2, build.WithDockerfile(fmt.Sprintf(`
<ide> FROM %s
<ide> `, name)))
<ide> result.Assert(c, icmd.Success)
<ide> func (s *DockerSuite) TestBuildEnvEscapes(c *check.C) {
<ide> // ENV expansions work differently in Windows
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildenvescapes"
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM busybox
<ide> ENV TEST foo
<ide> CMD echo \$
<ide> func (s *DockerSuite) TestBuildEnvOverwrite(c *check.C) {
<ide> // ENV expansions work differently in Windows
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildenvoverwrite"
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM busybox
<ide> ENV TEST foo
<ide> CMD echo ${TEST}
<ide> func (s *DockerSuite) TestBuildOnBuildCmdEntrypointJSON(c *check.C) {
<ide> name1 := "onbuildcmd"
<ide> name2 := "onbuildgenerated"
<ide>
<del> buildImageSuccessfully(c, name1, withDockerfile(`
<add> buildImageSuccessfully(c, name1, build.WithDockerfile(`
<ide> FROM busybox
<ide> ONBUILD CMD ["hello world"]
<ide> ONBUILD ENTRYPOINT ["echo"]
<ide> ONBUILD RUN ["true"]`))
<ide>
<del> buildImageSuccessfully(c, name2, withDockerfile(fmt.Sprintf(`FROM %s`, name1)))
<add> buildImageSuccessfully(c, name2, build.WithDockerfile(fmt.Sprintf(`FROM %s`, name1)))
<ide>
<ide> out, _ := dockerCmd(c, "run", name2)
<ide> if !regexp.MustCompile(`(?m)^hello world`).MatchString(out) {
<ide> func (s *DockerSuite) TestBuildOnBuildEntrypointJSON(c *check.C) {
<ide> name1 := "onbuildcmd"
<ide> name2 := "onbuildgenerated"
<ide>
<del> buildImageSuccessfully(c, name1, withDockerfile(`
<add> buildImageSuccessfully(c, name1, build.WithDockerfile(`
<ide> FROM busybox
<ide> ONBUILD ENTRYPOINT ["echo"]`))
<ide>
<del> buildImageSuccessfully(c, name2, withDockerfile(fmt.Sprintf("FROM %s\nCMD [\"hello world\"]\n", name1)))
<add> buildImageSuccessfully(c, name2, build.WithDockerfile(fmt.Sprintf("FROM %s\nCMD [\"hello world\"]\n", name1)))
<ide>
<ide> out, _ := dockerCmd(c, "run", name2)
<ide> if !regexp.MustCompile(`(?m)^hello world`).MatchString(out) {
<ide> func (s *DockerSuite) TestBuildCacheAdd(c *check.C) {
<ide> })
<ide> defer server.Close()
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(fmt.Sprintf(`FROM scratch
<add> cli.BuildCmd(c, name, build.WithDockerfile(fmt.Sprintf(`FROM scratch
<ide> ADD %s/robots.txt /`, server.URL())))
<ide>
<del> result := buildImage(name, withDockerfile(fmt.Sprintf(`FROM scratch
<add> result := cli.Docker(cli.Build(name), build.WithDockerfile(fmt.Sprintf(`FROM scratch
<ide> ADD %s/index.html /`, server.URL())))
<ide> result.Assert(c, icmd.Success)
<ide> if strings.Contains(result.Combined(), "Using cache") {
<ide> func (s *DockerSuite) TestBuildLastModified(c *check.C) {
<ide> ADD %s/file /`
<ide> dockerfile := fmt.Sprintf(dFmt, server.URL())
<ide>
<del> buildImageSuccessfully(c, name, withoutCache, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithoutCache, build.WithDockerfile(dockerfile))
<ide> out, _ = dockerCmd(c, "run", name, "ls", "-le", "/file")
<ide>
<ide> // Build it again and make sure the mtime of the file didn't change.
<ide> // Wait a few seconds to make sure the time changed enough to notice
<ide> time.Sleep(2 * time.Second)
<ide>
<del> buildImageSuccessfully(c, name, withoutCache, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithoutCache, build.WithDockerfile(dockerfile))
<ide> out2, _ = dockerCmd(c, "run", name, "ls", "-le", "/file")
<ide>
<ide> if out != out2 {
<ide> ADD %s/file /`
<ide> defer server.Close()
<ide>
<ide> dockerfile = fmt.Sprintf(dFmt, server.URL())
<del> buildImageSuccessfully(c, name, withoutCache, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithoutCache, build.WithDockerfile(dockerfile))
<ide> out2, _ = dockerCmd(c, "run", name, "ls", "-le", "/file")
<ide>
<ide> if out == out2 {
<ide> func (s *DockerSuite) TestBuildForceRm(c *check.C) {
<ide> containerCountBefore := getContainerCount(c)
<ide> name := "testbuildforcerm"
<ide>
<del> buildImage(name, withBuildFlags("--force-rm"), withBuildContext(c,
<add> buildImage(name, cli.WithFlags("--force-rm"), withBuildContext(c,
<ide> withFile("Dockerfile", `FROM `+minimalBaseImage()+`
<ide> RUN true
<ide> RUN thiswillfail`))).Assert(c, icmd.Expected{
<ide> func (s *DockerSuite) TestBuildRm(c *check.C) {
<ide> for _, tc := range testCases {
<ide> containerCountBefore := getContainerCount(c)
<ide>
<del> buildImageSuccessfully(c, name, withBuildFlags(tc.buildflags...), withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, cli.WithFlags(tc.buildflags...), build.WithDockerfile(`FROM busybox
<ide> RUN echo hello world`))
<ide>
<ide> containerCountAfter := getContainerCount(c)
<ide> func (s *DockerSuite) TestBuildWithVolumes(c *check.C) {
<ide> }
<ide> )
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM scratch
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM scratch
<ide> VOLUME /test1
<ide> VOLUME /test2
<ide> VOLUME /test3 /test4
<ide> func (s *DockerSuite) TestBuildWithVolumes(c *check.C) {
<ide> func (s *DockerSuite) TestBuildMaintainer(c *check.C) {
<ide> name := "testbuildmaintainer"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> MAINTAINER dockerio`))
<ide>
<ide> expected := "dockerio"
<ide> func (s *DockerSuite) TestBuildUser(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuilduser"
<ide> expected := "dockerio"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN echo 'dockerio:x:1001:1001::/bin:/bin/false' >> /etc/passwd
<ide> USER dockerio
<ide> RUN [ $(whoami) = 'dockerio' ]`))
<ide> func (s *DockerSuite) TestBuildRelativeWorkdir(c *check.C) {
<ide> expectedFinal = `/test2/test3`
<ide> }
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN sh -c "[ "$PWD" = "`+expected1+`" ]"
<ide> WORKDIR test1
<ide> RUN sh -c "[ "$PWD" = "`+expected2+`" ]"
<ide> func (s *DockerSuite) TestBuildRelativeWorkdir(c *check.C) {
<ide> // Windows semantics. Most path handling verifications are in unit tests
<ide> func (s *DockerSuite) TestBuildWindowsWorkdirProcessing(c *check.C) {
<ide> testRequires(c, DaemonIsWindows)
<del> buildImageSuccessfully(c, "testbuildwindowsworkdirprocessing", withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, "testbuildwindowsworkdirprocessing", build.WithDockerfile(`FROM busybox
<ide> WORKDIR C:\\foo
<ide> WORKDIR bar
<ide> RUN sh -c "[ "$PWD" = "C:/foo/bar" ]"
<ide> func (s *DockerSuite) TestBuildWorkdirWithEnvVariables(c *check.C) {
<ide> expected = `/test1/test2`
<ide> }
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> ENV DIRPATH /test1
<ide> ENV SUBDIRNAME test2
<ide> WORKDIR $DIRPATH
<ide> func (s *DockerSuite) TestBuildBlankName(c *check.C) {
<ide> }
<ide>
<ide> for _, tc := range testCases {
<del> buildImage(name, withDockerfile(fmt.Sprintf(`FROM busybox
<add> buildImage(name, build.WithDockerfile(fmt.Sprintf(`FROM busybox
<ide> %s`, tc.expression))).Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> Err: tc.expectedStderr,
<ide> func (s *DockerSuite) TestBuildEnv(c *check.C) {
<ide> testRequires(c, DaemonIsLinux) // ENV expansion is different in Windows
<ide> name := "testbuildenv"
<ide> expected := "[PATH=/test:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin PORT=2375]"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> ENV PATH /test:$PATH
<ide> ENV PORT 2375
<ide> RUN [ $(env | grep PORT) = 'PORT=2375' ]`))
<ide> func (s *DockerSuite) TestBuildPATH(c *check.C) {
<ide> defPath := "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
<ide>
<ide> fn := func(dockerfile string, expected string) {
<del> buildImageSuccessfully(c, "testbldpath", withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, "testbldpath", build.WithDockerfile(dockerfile))
<ide> res := inspectField(c, "testbldpath", "Config.Env")
<ide> if res != expected {
<ide> c.Fatalf("Env %q, expected %q for dockerfile:%q", res, expected, dockerfile)
<ide> func (s *DockerSuite) TestBuildContextCleanup(c *check.C) {
<ide> c.Fatalf("failed to list contents of tmp dir: %s", err)
<ide> }
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> ENTRYPOINT ["/bin/echo"]`))
<ide>
<ide> entriesFinal, err := ioutil.ReadDir(filepath.Join(testEnv.DockerBasePath(), "tmp"))
<ide> func (s *DockerSuite) TestBuildContextCleanupFailedBuild(c *check.C) {
<ide> c.Fatalf("failed to list contents of tmp dir: %s", err)
<ide> }
<ide>
<del> buildImage(name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImage(name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> RUN /non/existing/command`)).Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> })
<ide> func (s *DockerSuite) TestBuildCmd(c *check.C) {
<ide> name := "testbuildcmd"
<ide> expected := "[/bin/echo Hello World]"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> CMD ["/bin/echo", "Hello World"]`))
<ide>
<ide> res := inspectField(c, name, "Config.Cmd")
<ide> func (s *DockerSuite) TestBuildExpose(c *check.C) {
<ide> name := "testbuildexpose"
<ide> expected := "map[2375/tcp:{}]"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM scratch
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM scratch
<ide> EXPOSE 2375`))
<ide>
<ide> res := inspectField(c, name, "Config.ExposedPorts")
<ide> func (s *DockerSuite) TestBuildExposeMorePorts(c *check.C) {
<ide> tmpl.Execute(buf, portList)
<ide>
<ide> name := "testbuildexpose"
<del> buildImageSuccessfully(c, name, withDockerfile(buf.String()))
<add> buildImageSuccessfully(c, name, build.WithDockerfile(buf.String()))
<ide>
<ide> // check if all the ports are saved inside Config.ExposedPorts
<ide> res := inspectFieldJSON(c, name, "Config.ExposedPorts")
<ide> func (s *DockerSuite) TestBuildExposeMorePorts(c *check.C) {
<ide> func (s *DockerSuite) TestBuildExposeOrder(c *check.C) {
<ide> testRequires(c, DaemonIsLinux) // Expose not implemented on Windows
<ide> buildID := func(name, exposed string) string {
<del> buildImageSuccessfully(c, name, withDockerfile(fmt.Sprintf(`FROM scratch
<add> buildImageSuccessfully(c, name, build.WithDockerfile(fmt.Sprintf(`FROM scratch
<ide> EXPOSE %s`, exposed)))
<ide> id := inspectField(c, name, "Id")
<ide> return id
<ide> func (s *DockerSuite) TestBuildExposeUpperCaseProto(c *check.C) {
<ide> testRequires(c, DaemonIsLinux) // Expose not implemented on Windows
<ide> name := "testbuildexposeuppercaseproto"
<ide> expected := "map[5678/udp:{}]"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM scratch
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM scratch
<ide> EXPOSE 5678/UDP`))
<ide> res := inspectField(c, name, "Config.ExposedPorts")
<ide> if res != expected {
<ide> func (s *DockerSuite) TestBuildEmptyEntrypointInheritance(c *check.C) {
<ide> name := "testbuildentrypointinheritance"
<ide> name2 := "testbuildentrypointinheritance2"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> ENTRYPOINT ["/bin/echo"]`))
<ide> res := inspectField(c, name, "Config.Entrypoint")
<ide>
<ide> func (s *DockerSuite) TestBuildEmptyEntrypointInheritance(c *check.C) {
<ide> c.Fatalf("Entrypoint %s, expected %s", res, expected)
<ide> }
<ide>
<del> buildImageSuccessfully(c, name2, withDockerfile(fmt.Sprintf(`FROM %s
<add> buildImageSuccessfully(c, name2, build.WithDockerfile(fmt.Sprintf(`FROM %s
<ide> ENTRYPOINT []`, name)))
<ide> res = inspectField(c, name2, "Config.Entrypoint")
<ide>
<ide> func (s *DockerSuite) TestBuildEmptyEntrypoint(c *check.C) {
<ide> name := "testbuildentrypoint"
<ide> expected := "[]"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> ENTRYPOINT []`))
<ide>
<ide> res := inspectField(c, name, "Config.Entrypoint")
<ide> func (s *DockerSuite) TestBuildEntrypoint(c *check.C) {
<ide> name := "testbuildentrypoint"
<ide>
<ide> expected := "[/bin/echo]"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> ENTRYPOINT ["/bin/echo"]`))
<ide>
<ide> res := inspectField(c, name, "Config.Entrypoint")
<ide> func (s *DockerSuite) TestBuildEntrypoint(c *check.C) {
<ide>
<ide> // #6445 ensure ONBUILD triggers aren't committed to grandchildren
<ide> func (s *DockerSuite) TestBuildOnBuildLimitedInheritence(c *check.C) {
<del> buildImageSuccessfully(c, "testonbuildtrigger1", withDockerfile(`
<add> buildImageSuccessfully(c, "testonbuildtrigger1", build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN echo "GRANDPARENT"
<ide> ONBUILD RUN echo "ONBUILD PARENT"
<ide> `))
<ide> // ONBUILD should be run in second build.
<del> buildImage("testonbuildtrigger2", withDockerfile("FROM testonbuildtrigger1")).Assert(c, icmd.Expected{
<add> buildImage("testonbuildtrigger2", build.WithDockerfile("FROM testonbuildtrigger1")).Assert(c, icmd.Expected{
<ide> Out: "ONBUILD PARENT",
<ide> })
<ide> // ONBUILD should *not* be run in third build.
<del> result := buildImage("testonbuildtrigger3", withDockerfile("FROM testonbuildtrigger2"))
<add> result := buildImage("testonbuildtrigger3", build.WithDockerfile("FROM testonbuildtrigger2"))
<ide> result.Assert(c, icmd.Success)
<ide> if strings.Contains(result.Combined(), "ONBUILD PARENT") {
<ide> c.Fatalf("ONBUILD instruction ran in grandchild of ONBUILD parent")
<ide> func (s *DockerSuite) TestBuildSameDockerfileWithAndWithoutCache(c *check.C) {
<ide> MAINTAINER dockerio
<ide> EXPOSE 5432
<ide> ENTRYPOINT ["/bin/echo"]`
<del> buildImageSuccessfully(c, name, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithDockerfile(dockerfile))
<ide> id1 := getIDByName(c, name)
<del> buildImageSuccessfully(c, name, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithDockerfile(dockerfile))
<ide> id2 := getIDByName(c, name)
<del> buildImageSuccessfully(c, name, withoutCache, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithoutCache, build.WithDockerfile(dockerfile))
<ide> id3 := getIDByName(c, name)
<ide> if id1 != id2 {
<ide> c.Fatal("The cache should have been used but hasn't.")
<ide> func (s *DockerSuite) TestBuildAddMultipleLocalFileWithAndWithoutCache(c *check.
<ide> id1 := getIDByName(c, name)
<ide> buildImageSuccessfully(c, name, withExternalBuildContext(ctx))
<ide> id2 := getIDByName(c, name)
<del> buildImageSuccessfully(c, name, withoutCache, withExternalBuildContext(ctx))
<add> buildImageSuccessfully(c, name, build.WithoutCache, withExternalBuildContext(ctx))
<ide> id3 := getIDByName(c, name)
<ide> if id1 != id2 {
<ide> c.Fatal("The cache should have been used but hasn't.")
<ide> func (s *DockerSuite) TestBuildAddCurrentDirWithoutCache(c *check.C) {
<ide> defer ctx.Close()
<ide> buildImageSuccessfully(c, name, withExternalBuildContext(ctx))
<ide> id1 := getIDByName(c, name)
<del> buildImageSuccessfully(c, name, withoutCache, withExternalBuildContext(ctx))
<add> buildImageSuccessfully(c, name, build.WithoutCache, withExternalBuildContext(ctx))
<ide> id2 := getIDByName(c, name)
<ide> if id1 == id2 {
<ide> c.Fatal("The cache should have been invalided but hasn't.")
<ide> func (s *DockerSuite) TestBuildAddRemoteFileWithAndWithoutCache(c *check.C) {
<ide> dockerfile := fmt.Sprintf(`FROM `+minimalBaseImage()+`
<ide> MAINTAINER dockerio
<ide> ADD %s/baz /usr/lib/baz/quux`, server.URL())
<del> buildImageSuccessfully(c, name, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithDockerfile(dockerfile))
<ide> id1 := getIDByName(c, name)
<del> buildImageSuccessfully(c, name, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithDockerfile(dockerfile))
<ide> id2 := getIDByName(c, name)
<del> buildImageSuccessfully(c, name, withoutCache, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithoutCache, build.WithDockerfile(dockerfile))
<ide> id3 := getIDByName(c, name)
<ide>
<ide> if id1 != id2 {
<ide> func (s *DockerSuite) TestBuildAddLocalAndRemoteFilesWithAndWithoutCache(c *chec
<ide> id1 := getIDByName(c, name)
<ide> buildImageSuccessfully(c, name, withExternalBuildContext(ctx))
<ide> id2 := getIDByName(c, name)
<del> buildImageSuccessfully(c, name, withoutCache, withExternalBuildContext(ctx))
<add> buildImageSuccessfully(c, name, build.WithoutCache, withExternalBuildContext(ctx))
<ide> id3 := getIDByName(c, name)
<ide> if id1 != id2 {
<ide> c.Fatal("The cache should have been used but hasn't.")
<ide> func (s *DockerSuite) TestBuildWithVolumeOwnership(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildimg"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox:latest
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox:latest
<ide> RUN mkdir /test && chown daemon:daemon /test && chmod 0600 /test
<ide> VOLUME /test`))
<ide>
<ide> func (s *DockerSuite) TestBuildWithVolumeOwnership(c *check.C) {
<ide> // utilizing cache
<ide> func (s *DockerSuite) TestBuildEntrypointRunCleanup(c *check.C) {
<ide> name := "testbuildcmdcleanup"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN echo "hello"`))
<ide>
<ide> buildImageSuccessfully(c, name, withBuildContext(c,
<ide> func (s *DockerSuite) TestBuildInheritance(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildinheritance"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM scratch
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM scratch
<ide> EXPOSE 2375`))
<ide> ports1 := inspectField(c, name, "Config.ExposedPorts")
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(fmt.Sprintf(`FROM %s
<add> buildImageSuccessfully(c, name, build.WithDockerfile(fmt.Sprintf(`FROM %s
<ide> ENTRYPOINT ["/bin/echo"]`, name)))
<ide>
<ide> res := inspectField(c, name, "Config.Entrypoint")
<ide> func (s *DockerSuite) TestBuildInheritance(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestBuildFails(c *check.C) {
<ide> name := "testbuildfails"
<del> buildImage(name, withDockerfile(`FROM busybox
<add> buildImage(name, build.WithDockerfile(`FROM busybox
<ide> RUN sh -c "exit 23"`)).Assert(c, icmd.Expected{
<ide> ExitCode: 23,
<ide> Err: "returned a non-zero code: 23",
<ide> func (s *DockerSuite) TestBuildFails(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestBuildOnBuild(c *check.C) {
<ide> name := "testbuildonbuild"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> ONBUILD RUN touch foobar`))
<del> buildImageSuccessfully(c, name, withDockerfile(fmt.Sprintf(`FROM %s
<add> buildImageSuccessfully(c, name, build.WithDockerfile(fmt.Sprintf(`FROM %s
<ide> RUN [ -f foobar ]`, name)))
<ide> }
<ide>
<ide> func (s *DockerSuite) TestBuildAddToSymlinkDest(c *check.C) {
<ide> func (s *DockerSuite) TestBuildEscapeWhitespace(c *check.C) {
<ide> name := "testbuildescapewhitespace"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> # ESCAPE=\
<ide> FROM busybox
<ide> MAINTAINER "Docker \
<ide> func (s *DockerSuite) TestBuildVerifyIntString(c *check.C) {
<ide> // Verify that strings that look like ints are still passed as strings
<ide> name := "testbuildstringing"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM busybox
<ide> MAINTAINER 123`))
<ide>
<ide> func (s *DockerSuite) TestBuildDockerignoringRenamedDockerfile(c *check.C) {
<ide> RUN ls /tmp/Dockerfile
<ide> RUN sh -c "! ls /tmp/MyDockerfile"
<ide> RUN ls /tmp/.dockerignore`
<del> buildImageSuccessfully(c, name, withBuildFlags("-f", "MyDockerfile"), withBuildContext(c,
<add> buildImageSuccessfully(c, name, cli.WithFlags("-f", "MyDockerfile"), withBuildContext(c,
<ide> withFile("Dockerfile", "Should not use me"),
<ide> withFile("MyDockerfile", dockerfile),
<ide> withFile(".dockerignore", "MyDockerfile\n"),
<ide> ))
<del> buildImageSuccessfully(c, name, withBuildFlags("-f", "MyDockerfile"), withBuildContext(c,
<add> buildImageSuccessfully(c, name, cli.WithFlags("-f", "MyDockerfile"), withBuildContext(c,
<ide> withFile("Dockerfile", "Should not use me"),
<ide> withFile("MyDockerfile", dockerfile),
<ide> withFile(".dockerignore", "./MyDockerfile\n"),
<ide> dir1/dir3/**
<ide> func (s *DockerSuite) TestBuildLineBreak(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildlinebreak"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN sh -c 'echo root:testpass \
<ide> > /tmp/passwd'
<ide> RUN mkdir -p /var/run/sshd
<ide> RUN sh -c "[ "$(ls -d /var/run/sshd)" = "/var/run/sshd" ]"`))
<ide> func (s *DockerSuite) TestBuildEOLInLine(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildeolinline"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN sh -c 'echo root:testpass > /tmp/passwd'
<ide> RUN echo "foo \n bar"; echo "baz"
<ide> RUN mkdir -p /var/run/sshd
<ide> RUN sh -c "[ "$(ls -d /var/run/sshd)" = "/var/run/sshd" ]"`))
<ide> func (s *DockerSuite) TestBuildCommentsShebangs(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildcomments"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> # This is an ordinary comment.
<ide> RUN { echo '#!/bin/sh'; echo 'echo hello world'; } > /hello.sh
<ide> RUN [ ! -x /hello.sh ]
<ide> RUN [ "$(/hello.sh)" = "hello world" ]`))
<ide> func (s *DockerSuite) TestBuildUsersAndGroups(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildusers"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide>
<ide> # Make sure our defaults work
<ide> RUN [ "$(id -u):$(id -g)/$(id -un):$(id -gn)" = '0:0/root:root' ]
<ide> func (s *DockerSuite) TestBuildFromGit(c *check.C) {
<ide> }, true)
<ide> defer git.Close()
<ide>
<del> buildImageSuccessfully(c, name, withBuildContextPath(git.RepoURL))
<add> buildImageSuccessfully(c, name, build.WithContextPath(git.RepoURL))
<ide>
<ide> res := inspectField(c, name, "Author")
<ide> if res != "docker" {
<ide> func (s *DockerSuite) TestBuildFromGitWithContext(c *check.C) {
<ide> }, true)
<ide> defer git.Close()
<ide>
<del> buildImageSuccessfully(c, name, withBuildContextPath(fmt.Sprintf("%s#master:docker", git.RepoURL)))
<add> buildImageSuccessfully(c, name, build.WithContextPath(fmt.Sprintf("%s#master:docker", git.RepoURL)))
<ide>
<ide> res := inspectField(c, name, "Author")
<ide> if res != "docker" {
<ide> func (s *DockerSuite) TestBuildFromGitwithF(c *check.C) {
<ide> }, true)
<ide> defer git.Close()
<ide>
<del> buildImage(name, withBuildFlags("-f", "myApp/myDockerfile"), withBuildContextPath(git.RepoURL)).Assert(c, icmd.Expected{
<add> buildImage(name, cli.WithFlags("-f", "myApp/myDockerfile"), build.WithContextPath(git.RepoURL)).Assert(c, icmd.Expected{
<ide> Out: "hi from Dockerfile",
<ide> })
<ide> }
<ide> func (s *DockerSuite) TestBuildFromRemoteTarball(c *check.C) {
<ide> })
<ide> defer server.Close()
<ide>
<del> buildImageSuccessfully(c, name, withBuildContextPath(server.URL()+"/testT.tar"))
<add> buildImageSuccessfully(c, name, build.WithContextPath(server.URL()+"/testT.tar"))
<ide>
<ide> res := inspectField(c, name, "Author")
<ide> if res != "docker" {
<ide> func (s *DockerSuite) TestBuildFromRemoteTarball(c *check.C) {
<ide> func (s *DockerSuite) TestBuildCleanupCmdOnEntrypoint(c *check.C) {
<ide> name := "testbuildcmdcleanuponentrypoint"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> CMD ["test"]
<ide> ENTRYPOINT ["echo"]`))
<del> buildImageSuccessfully(c, name, withDockerfile(fmt.Sprintf(`FROM %s
<add> buildImageSuccessfully(c, name, build.WithDockerfile(fmt.Sprintf(`FROM %s
<ide> ENTRYPOINT ["cat"]`, name)))
<ide>
<ide> res := inspectField(c, name, "Config.Cmd")
<ide> func (s *DockerSuite) TestBuildCleanupCmdOnEntrypoint(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestBuildClearCmd(c *check.C) {
<ide> name := "testbuildclearcmd"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> ENTRYPOINT ["/bin/bash"]
<ide> CMD []`))
<ide>
<ide> func (s *DockerSuite) TestBuildEmptyCmd(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide>
<ide> name := "testbuildemptycmd"
<del> buildImageSuccessfully(c, name, withDockerfile("FROM "+minimalBaseImage()+"\nMAINTAINER quux\n"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM "+minimalBaseImage()+"\nMAINTAINER quux\n"))
<ide>
<ide> res := inspectFieldJSON(c, name, "Config.Cmd")
<ide> if res != "null" {
<ide> func (s *DockerSuite) TestBuildEmptyCmd(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestBuildOnBuildOutput(c *check.C) {
<ide> name := "testbuildonbuildparent"
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nONBUILD RUN echo foo\n"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nONBUILD RUN echo foo\n"))
<ide>
<del> buildImage(name, withDockerfile("FROM "+name+"\nMAINTAINER quux\n")).Assert(c, icmd.Expected{
<add> buildImage(name, build.WithDockerfile("FROM "+name+"\nMAINTAINER quux\n")).Assert(c, icmd.Expected{
<ide> Out: "# Executing 1 build trigger",
<ide> })
<ide> }
<ide>
<ide> // FIXME(vdemeester) should be a unit test
<ide> func (s *DockerSuite) TestBuildInvalidTag(c *check.C) {
<ide> name := "abcd:" + stringutils.GenerateRandomAlphaOnlyString(200)
<del> buildImage(name, withDockerfile("FROM "+minimalBaseImage()+"\nMAINTAINER quux\n")).Assert(c, icmd.Expected{
<add> buildImage(name, build.WithDockerfile("FROM "+minimalBaseImage()+"\nMAINTAINER quux\n")).Assert(c, icmd.Expected{
<ide> ExitCode: 125,
<ide> Err: "invalid reference format",
<ide> })
<ide> }
<ide>
<ide> func (s *DockerSuite) TestBuildCmdShDashC(c *check.C) {
<ide> name := "testbuildcmdshc"
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nCMD echo cmd\n"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nCMD echo cmd\n"))
<ide>
<ide> res := inspectFieldJSON(c, name, "Config.Cmd")
<ide> expected := `["/bin/sh","-c","echo cmd"]`
<ide> func (s *DockerSuite) TestBuildCmdSpaces(c *check.C) {
<ide> // look the same
<ide> name := "testbuildcmdspaces"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nCMD [\"echo hi\"]\n"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nCMD [\"echo hi\"]\n"))
<ide> id1 := getIDByName(c, name)
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nCMD [\"echo\", \"hi\"]\n"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nCMD [\"echo\", \"hi\"]\n"))
<ide> id2 := getIDByName(c, name)
<ide>
<ide> if id1 == id2 {
<ide> c.Fatal("Should not have resulted in the same CMD")
<ide> }
<ide>
<ide> // Now do the same with ENTRYPOINT
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nENTRYPOINT [\"echo hi\"]\n"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nENTRYPOINT [\"echo hi\"]\n"))
<ide> id1 = getIDByName(c, name)
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nENTRYPOINT [\"echo\", \"hi\"]\n"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nENTRYPOINT [\"echo\", \"hi\"]\n"))
<ide> id2 = getIDByName(c, name)
<ide>
<ide> if id1 == id2 {
<ide> func (s *DockerSuite) TestBuildCmdSpaces(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestBuildCmdJSONNoShDashC(c *check.C) {
<ide> name := "testbuildcmdjson"
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nCMD [\"echo\", \"cmd\"]"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nCMD [\"echo\", \"cmd\"]"))
<ide>
<ide> res := inspectFieldJSON(c, name, "Config.Cmd")
<ide> expected := `["echo","cmd"]`
<ide> func (s *DockerSuite) TestBuildCmdJSONNoShDashC(c *check.C) {
<ide> }
<ide>
<ide> func (s *DockerSuite) TestBuildEntrypointCanBeOverridenByChild(c *check.C) {
<del> buildImageSuccessfully(c, "parent", withDockerfile(`
<add> buildImageSuccessfully(c, "parent", build.WithDockerfile(`
<ide> FROM busybox
<ide> ENTRYPOINT exit 130
<ide> `))
<ide> func (s *DockerSuite) TestBuildEntrypointCanBeOverridenByChild(c *check.C) {
<ide> ExitCode: 130,
<ide> })
<ide>
<del> buildImageSuccessfully(c, "child", withDockerfile(`
<add> buildImageSuccessfully(c, "child", build.WithDockerfile(`
<ide> FROM parent
<ide> ENTRYPOINT exit 5
<ide> `))
<ide> func (s *DockerSuite) TestBuildEntrypointCanBeOverridenByChildInspect(c *check.C
<ide> expected = `["cmd","/S","/C","echo quux"]`
<ide> }
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nENTRYPOINT /foo/bar"))
<del> buildImageSuccessfully(c, name2, withDockerfile(fmt.Sprintf("FROM %s\nENTRYPOINT echo quux", name)))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nENTRYPOINT /foo/bar"))
<add> buildImageSuccessfully(c, name2, build.WithDockerfile(fmt.Sprintf("FROM %s\nENTRYPOINT echo quux", name)))
<ide>
<ide> res := inspectFieldJSON(c, name2, "Config.Entrypoint")
<ide> if res != expected {
<ide> func (s *DockerSuite) TestBuildEntrypointCanBeOverridenByChildInspect(c *check.C
<ide>
<ide> func (s *DockerSuite) TestBuildRunShEntrypoint(c *check.C) {
<ide> name := "testbuildentrypoint"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> ENTRYPOINT echo`))
<ide> dockerCmd(c, "run", "--rm", name)
<ide> }
<ide> func (s *DockerSuite) TestBuildExoticShellInterpolation(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildexoticshellinterpolation"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM busybox
<ide>
<ide> ENV SOME_VAR a.b.c
<ide> func (s *DockerSuite) TestBuildVerifySingleQuoteFails(c *check.C) {
<ide> expectedExitCode = 127
<ide> }
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> CMD [ '/bin/sh', '-c', 'echo hi' ]`))
<ide>
<ide> icmd.RunCommand(dockerBinary, "run", "--rm", name).Assert(c, icmd.Expected{
<ide> func (s *DockerSuite) TestBuildVerboseOut(c *check.C) {
<ide> expected = "\n123\r\n"
<ide> }
<ide>
<del> buildImage(name, withDockerfile(`FROM busybox
<add> buildImage(name, build.WithDockerfile(`FROM busybox
<ide> RUN echo 123`)).Assert(c, icmd.Expected{
<ide> Out: expected,
<ide> })
<ide> }
<ide>
<ide> func (s *DockerSuite) TestBuildWithTabs(c *check.C) {
<ide> name := "testbuildwithtabs"
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nRUN echo\tone\t\ttwo"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nRUN echo\tone\t\ttwo"))
<ide> res := inspectFieldJSON(c, name, "ContainerConfig.Cmd")
<ide> expected1 := `["/bin/sh","-c","echo\tone\t\ttwo"]`
<ide> expected2 := `["/bin/sh","-c","echo\u0009one\u0009\u0009two"]` // syntactically equivalent, and what Go 1.3 generates
<ide> func (s *DockerSuite) TestBuildWithTabs(c *check.C) {
<ide> func (s *DockerSuite) TestBuildLabels(c *check.C) {
<ide> name := "testbuildlabel"
<ide> expected := `{"License":"GPL","Vendor":"Acme"}`
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> LABEL Vendor=Acme
<ide> LABEL License GPL`))
<ide> res := inspectFieldJSON(c, name, "Config.Labels")
<ide> func (s *DockerSuite) TestBuildLabels(c *check.C) {
<ide> func (s *DockerSuite) TestBuildLabelsCache(c *check.C) {
<ide> name := "testbuildlabelcache"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> LABEL Vendor=Acme`))
<ide> id1 := getIDByName(c, name)
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> LABEL Vendor=Acme`))
<ide> id2 := getIDByName(c, name)
<ide> if id1 != id2 {
<ide> c.Fatalf("Build 2 should have worked & used cache(%s,%s)", id1, id2)
<ide> }
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> LABEL Vendor=Acme1`))
<ide> id2 = getIDByName(c, name)
<ide> if id1 == id2 {
<ide> c.Fatalf("Build 3 should have worked & NOT used cache(%s,%s)", id1, id2)
<ide> }
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> LABEL Vendor Acme`))
<ide> id2 = getIDByName(c, name)
<ide> if id1 != id2 {
<ide> c.Fatalf("Build 4 should have worked & used cache(%s,%s)", id1, id2)
<ide> }
<ide>
<ide> // Now make sure the cache isn't used by mistake
<del> buildImageSuccessfully(c, name, withoutCache, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithoutCache, build.WithDockerfile(`FROM busybox
<ide> LABEL f1=b1 f2=b2`))
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> LABEL f1=b1 f2=b2`))
<ide> id2 = getIDByName(c, name)
<ide> if id1 == id2 {
<ide> func (s *DockerSuite) TestBuildNotVerboseSuccess(c *check.C) {
<ide> // This test makes sure that -q works correctly when build is successful:
<ide> // stdout has only the image ID (long image ID) and stderr is empty.
<ide> outRegexp := regexp.MustCompile("^(sha256:|)[a-z0-9]{64}\\n$")
<del> buildFlags := withBuildFlags("-q")
<add> buildFlags := cli.WithFlags("-q")
<ide>
<ide> tt := []struct {
<ide> Name string
<ide> func (s *DockerSuite) TestBuildNotVerboseSuccess(c *check.C) {
<ide> {
<ide> Name: "quiet_build_stdin_success",
<ide> BuildFunc: func(name string) *icmd.Result {
<del> return buildImage(name, buildFlags, withDockerfile("FROM busybox"))
<add> return buildImage(name, buildFlags, build.WithDockerfile("FROM busybox"))
<ide> },
<ide> },
<ide> {
<ide> func (s *DockerSuite) TestBuildNotVerboseSuccess(c *check.C) {
<ide> git := newFakeGit(c, "repo", map[string]string{
<ide> "Dockerfile": "FROM busybox",
<ide> }, true)
<del> return buildImage(name, buildFlags, withBuildContextPath(git.RepoURL))
<add> return buildImage(name, buildFlags, build.WithContextPath(git.RepoURL))
<ide> },
<ide> },
<ide> }
<ide> func (s *DockerSuite) TestBuildNotVerboseFailureWithNonExistImage(c *check.C) {
<ide> testRequires(c, Network)
<ide> testName := "quiet_build_not_exists_image"
<ide> dockerfile := "FROM busybox11"
<del> quietResult := buildImage(testName, withBuildFlags("-q"), withDockerfile(dockerfile))
<add> quietResult := buildImage(testName, cli.WithFlags("-q"), build.WithDockerfile(dockerfile))
<ide> quietResult.Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> })
<del> result := buildImage(testName, withDockerfile(dockerfile))
<add> result := buildImage(testName, build.WithDockerfile(dockerfile))
<ide> result.Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> })
<ide> func (s *DockerSuite) TestBuildNotVerboseFailure(c *check.C) {
<ide> }
<ide>
<ide> for _, tc := range testCases {
<del> quietResult := buildImage(tc.testName, withBuildFlags("-q"), withDockerfile(tc.dockerfile))
<add> quietResult := buildImage(tc.testName, cli.WithFlags("-q"), build.WithDockerfile(tc.dockerfile))
<ide> quietResult.Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> })
<del> result := buildImage(tc.testName, withDockerfile(tc.dockerfile))
<add> result := buildImage(tc.testName, build.WithDockerfile(tc.dockerfile))
<ide> result.Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> })
<ide> func (s *DockerSuite) TestBuildNotVerboseFailureRemote(c *check.C) {
<ide> // TODO(vdemeester) with cobra, stdout has a carriage return too much so this test should not check stdout
<ide> URL := "http://something.invalid"
<ide> name := "quiet_build_wrong_remote"
<del> quietResult := buildImage(name, withBuildFlags("-q"), withBuildContextPath(URL))
<add> quietResult := buildImage(name, cli.WithFlags("-q"), build.WithContextPath(URL))
<ide> quietResult.Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> })
<del> result := buildImage(name, withBuildContextPath(URL))
<add> result := buildImage(name, build.WithContextPath(URL))
<ide> result.Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> })
<ide> func (s *DockerSuite) TestBuildStderr(c *check.C) {
<ide> // This test just makes sure that no non-error output goes
<ide> // to stderr
<ide> name := "testbuildstderr"
<del> result := buildImage(name, withDockerfile("FROM busybox\nRUN echo one"))
<add> result := buildImage(name, build.WithDockerfile("FROM busybox\nRUN echo one"))
<ide> result.Assert(c, icmd.Success)
<ide>
<ide> // Windows to non-Windows should have a security warning
<ide> func (s *DockerSuite) TestBuildSymlinkBreakout(c *check.C) {
<ide> w.Close()
<ide> f.Close()
<ide>
<del> buildImageSuccessfully(c, name, withoutCache, withExternalBuildContext(fakeContextFromDir(ctx)))
<add> buildImageSuccessfully(c, name, build.WithoutCache, withExternalBuildContext(fakeContextFromDir(ctx)))
<ide> if _, err := os.Lstat(filepath.Join(tmpdir, "inject")); err == nil {
<ide> c.Fatal("symlink breakout - inject")
<ide> } else if !os.IsNotExist(err) {
<ide> ADD xz /usr/local/sbin/
<ide> RUN chmod 755 /usr/local/sbin/xz
<ide> ADD test.xz /
<ide> RUN [ ! -e /injected ]`),
<del> withFile("test.xz", "\xfd\x37\x7a\x58\x5a\x00\x00\x04\xe6\xd6\xb4\x46\x02\x00"+
<del> "\x21\x01\x16\x00\x00\x00\x74\x2f\xe5\xa3\x01\x00\x3f\xfd"+
<del> "\x37\x7a\x58\x5a\x00\x00\x04\xe6\xd6\xb4\x46\x02\x00\x21"),
<add> withFile("test.xz", "\xfd\x37\x7a\x58\x5a\x00\x00\x04\xe6\xd6\xb4\x46\x02\x00"+"\x21\x01\x16\x00\x00\x00\x74\x2f\xe5\xa3\x01\x00\x3f\xfd"+"\x37\x7a\x58\x5a\x00\x00\x04\xe6\xd6\xb4\x46\x02\x00\x21"),
<ide> withFile("xz", "#!/bin/sh\ntouch /injected"),
<ide> ))
<ide> }
<ide> RUN echo from Dockerfile`,
<ide>
<ide> // Make sure that -f is ignored and that we don't use the Dockerfile
<ide> // that's in the current dir
<del> result := buildImage("test1", withBuildFlags("-f", "baz", server.URL()+"/baz"), func(cmd *icmd.Cmd) func() {
<add> result := buildImage("test1", cli.WithFlags("-f", "baz", server.URL()+"/baz"), func(cmd *icmd.Cmd) func() {
<ide> cmd.Dir = ctx.Dir
<ide> return nil
<ide> })
<ide> RUN echo "from Dockerfile"`,
<ide>
<ide> // Make sure that -f is ignored and that we don't use the Dockerfile
<ide> // that's in the current dir
<del> result := buildImage("test1", withBuildFlags("-f", "baz", "-"), func(cmd *icmd.Cmd) func() {
<add> result := buildImage("test1", cli.WithFlags("-f", "baz", "-"), func(cmd *icmd.Cmd) func() {
<ide> cmd.Dir = ctx.Dir
<ide> cmd.Stdin = strings.NewReader(`FROM busybox
<ide> RUN echo "from baz"
<ide> func (s *DockerSuite) TestBuildFromOfficialNames(c *check.C) {
<ide> }
<ide> for idx, fromName := range fromNames {
<ide> imgName := fmt.Sprintf("%s%d", name, idx)
<del> buildImageSuccessfully(c, imgName, withDockerfile("FROM "+fromName))
<add> buildImageSuccessfully(c, imgName, build.WithDockerfile("FROM "+fromName))
<ide> dockerCmd(c, "rmi", imgName)
<ide> }
<ide> }
<ide> func (s *DockerSuite) TestBuildSpaces(c *check.C) {
<ide> }
<ide>
<ide> ctx.Add("Dockerfile", "FROM busybox\n COPY")
<del> result2 = buildImage(name, withoutCache, withExternalBuildContext(ctx))
<add> result2 = buildImage(name, build.WithoutCache, withExternalBuildContext(ctx))
<ide> result2.Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> })
<ide> func (s *DockerSuite) TestBuildSpaces(c *check.C) {
<ide> }
<ide>
<ide> ctx.Add("Dockerfile", "FROM busybox\n COPY ")
<del> result2 = buildImage(name, withoutCache, withExternalBuildContext(ctx))
<add> result2 = buildImage(name, build.WithoutCache, withExternalBuildContext(ctx))
<ide> result2.Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> })
<ide> RUN echo " \
<ide> expected = "\" foo \""
<ide> }
<ide>
<del> buildImage(name, withDockerfile(dockerfile)).Assert(c, icmd.Expected{
<add> buildImage(name, build.WithDockerfile(dockerfile)).Assert(c, icmd.Expected{
<ide> Out: expected,
<ide> })
<ide> }
<ide>
<ide> // #4393
<ide> func (s *DockerSuite) TestBuildVolumeFileExistsinContainer(c *check.C) {
<ide> testRequires(c, DaemonIsLinux) // TODO Windows: This should error out
<del> buildImage("docker-test-errcreatevolumewithfile", withDockerfile(`
<add> buildImage("docker-test-errcreatevolumewithfile", build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN touch /foo
<ide> VOLUME /foo
<ide> func (s *DockerSuite) TestBuildMissingArgs(c *check.C) {
<ide> dockerfile = "FROM busybox\n" + cmd
<ide> }
<ide>
<del> buildImage("args", withDockerfile(dockerfile)).Assert(c, icmd.Expected{
<add> buildImage("args", build.WithDockerfile(dockerfile)).Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> Err: cmd + " requires",
<ide> })
<ide> func (s *DockerSuite) TestBuildMissingArgs(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestBuildEmptyScratch(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<del> buildImage("sc", withDockerfile("FROM scratch")).Assert(c, icmd.Expected{
<add> buildImage("sc", build.WithDockerfile("FROM scratch")).Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> Err: "No image was generated",
<ide> })
<ide> func (s *DockerSuite) TestBuildRUNoneJSON(c *check.C) {
<ide> testRequires(c, DaemonIsLinux) // No hello-world Windows image
<ide> name := "testbuildrunonejson"
<ide>
<del> buildImage(name, withDockerfile(`FROM hello-world:frozen
<add> buildImage(name, build.WithDockerfile(`FROM hello-world:frozen
<ide> RUN [ "/hello" ]`)).Assert(c, icmd.Expected{
<ide> Out: "Hello from Docker",
<ide> })
<ide> RUN [ "/hello" ]`)).Assert(c, icmd.Expected{
<ide> func (s *DockerSuite) TestBuildEmptyStringVolume(c *check.C) {
<ide> name := "testbuildemptystringvolume"
<ide>
<del> buildImage(name, withDockerfile(`
<add> buildImage(name, build.WithDockerfile(`
<ide> FROM busybox
<ide> ENV foo=""
<ide> VOLUME $foo
<ide> func (s *DockerSuite) TestBuildContainerWithCgroupParent(c *check.C) {
<ide> c.Fatalf("unable to find self memory cgroup path. CgroupsPath: %v", selfCgroupPaths)
<ide> }
<ide> result := buildImage("buildcgroupparent",
<del> withBuildFlags("--cgroup-parent", cgroupParent),
<del> withDockerfile(`
<add> cli.WithFlags("--cgroup-parent", cgroupParent),
<add> build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN cat /proc/self/cgroup
<ide> `))
<ide> func (s *DockerSuite) TestBuildNoDupOutput(c *check.C) {
<ide> // property - there was a bug that caused it to be duplicated on the
<ide> // Step X line
<ide> name := "testbuildnodupoutput"
<del> result := buildImage(name, withDockerfile(`
<add> result := buildImage(name, build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN env`))
<ide> result.Assert(c, icmd.Success)
<ide> func (s *DockerSuite) TestBuildNoDupOutput(c *check.C) {
<ide> func (s *DockerSuite) TestBuildStartsFromOne(c *check.C) {
<ide> // Explicit check to ensure that build starts from step 1 rather than 0
<ide> name := "testbuildstartsfromone"
<del> result := buildImage(name, withDockerfile(`FROM busybox`))
<add> result := buildImage(name, build.WithDockerfile(`FROM busybox`))
<ide> result.Assert(c, icmd.Success)
<ide> exp := "\nStep 1/1 : FROM busybox\n"
<ide> if !strings.Contains(result.Combined(), exp) {
<ide> func (s *DockerSuite) TestBuildRUNErrMsg(c *check.C) {
<ide> }
<ide> exp := fmt.Sprintf(`The command '%s badEXE a1 \& a2 a3' returned a non-zero code: %d`, shell, exitCode)
<ide>
<del> buildImage(name, withDockerfile(`
<add> buildImage(name, build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN badEXE a1 \& a2 a3`)).Assert(c, icmd.Expected{
<ide> ExitCode: exitCode,
<ide> func (s *DockerTrustSuite) TestTrustedBuild(c *check.C) {
<ide>
<ide> name := "testtrustedbuild"
<ide>
<del> buildImage(name, trustedBuild, withDockerfile(dockerFile)).Assert(c, icmd.Expected{
<add> buildImage(name, trustedBuild, build.WithDockerfile(dockerFile)).Assert(c, icmd.Expected{
<ide> Out: fmt.Sprintf("FROM %s@sha", repoName[:len(repoName)-7]),
<ide> })
<ide>
<ide> func (s *DockerTrustSuite) TestTrustedBuildUntrustedTag(c *check.C) {
<ide>
<ide> name := "testtrustedbuilduntrustedtag"
<ide>
<del> buildImage(name, trustedBuild, withDockerfile(dockerFile)).Assert(c, icmd.Expected{
<add> buildImage(name, trustedBuild, build.WithDockerfile(dockerFile)).Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> Err: "does not have trust data for",
<ide> })
<ide> func (s *DockerTrustSuite) TestTrustedBuildTagFromReleasesRole(c *check.C) {
<ide> RUN []
<ide> `, otherTag)
<ide> name := "testtrustedbuildreleasesrole"
<del> buildImage(name, trustedBuild, withDockerfile(dockerFile)).Assert(c, icmd.Expected{
<add> buildImage(name, trustedBuild, build.WithDockerfile(dockerFile)).Assert(c, icmd.Expected{
<ide> Out: fmt.Sprintf("FROM %s@sha", repoName),
<ide> })
<ide> }
<ide> func (s *DockerTrustSuite) TestTrustedBuildTagIgnoresOtherDelegationRoles(c *che
<ide> `, otherTag)
<ide>
<ide> name := "testtrustedbuildotherrole"
<del> buildImage(name, trustedBuild, withDockerfile(dockerFile)).Assert(c, icmd.Expected{
<add> buildImage(name, trustedBuild, build.WithDockerfile(dockerFile)).Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> })
<ide> }
<ide> func (s *DockerSuite) TestBuildNullStringInAddCopyVolume(c *check.C) {
<ide> func (s *DockerSuite) TestBuildStopSignal(c *check.C) {
<ide> testRequires(c, DaemonIsLinux) // Windows does not support STOPSIGNAL yet
<ide> imgName := "test_build_stop_signal"
<del> buildImageSuccessfully(c, imgName, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, imgName, build.WithDockerfile(`FROM busybox
<ide> STOPSIGNAL SIGKILL`))
<ide> res := inspectFieldJSON(c, imgName, "Config.StopSignal")
<ide> if res != `"SIGKILL"` {
<ide> func (s *DockerSuite) TestBuildBuildTimeArg(c *check.C) {
<ide>
<ide> }
<ide> buildImage(imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> ).Assert(c, icmd.Expected{
<ide> Out: envVal,
<ide> })
<ide> func (s *DockerSuite) TestBuildBuildTimeArgHistory(c *check.C) {
<ide> dockerfile := fmt.Sprintf(`FROM busybox
<ide> ARG %s=%s`, envKey, envDef)
<ide> buildImage(imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> ).Assert(c, icmd.Expected{
<ide> Out: envVal,
<ide> })
<ide> func (s *DockerSuite) TestBuildTimeArgHistoryExclusions(c *check.C) {
<ide> ARG %s
<ide> RUN echo "Testing Build Args!"`, envKey, explicitProxyKey)
<ide> buildImage(imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal),
<ide> "--build-arg", fmt.Sprintf("%s=%s", explicitProxyKey, explicitProxyVal),
<ide> "--build-arg", proxy),
<del> withDockerfile(dockerfile),
<add> build.WithDockerfile(dockerfile),
<ide> ).Assert(c, icmd.Success)
<ide>
<ide> out, _ := dockerCmd(c, "history", "--no-trunc", imgName)
<ide> func (s *DockerSuite) TestBuildBuildTimeArgCacheHit(c *check.C) {
<ide> ARG %s
<ide> RUN echo $%s`, envKey, envKey)
<ide> buildImageSuccessfully(c, imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> origImgID := getIDByName(c, imgName)
<ide>
<ide> imgNameCache := "bldargtestcachehit"
<ide> buildImageSuccessfully(c, imgNameCache,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> newImgID := getIDByName(c, imgName)
<ide> if newImgID != origImgID {
<ide> func (s *DockerSuite) TestBuildBuildTimeArgCacheMissExtraArg(c *check.C) {
<ide> ARG %s
<ide> RUN echo $%s`, envKey, extraEnvKey, envKey)
<ide> buildImageSuccessfully(c, imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> origImgID := getIDByName(c, imgName)
<ide>
<ide> imgNameCache := "bldargtestcachemiss"
<ide> buildImageSuccessfully(c, imgNameCache,
<del> withBuildFlags(
<add> cli.WithFlags(
<ide> "--build-arg", fmt.Sprintf("%s=%s", envKey, envVal),
<ide> "--build-arg", fmt.Sprintf("%s=%s", extraEnvKey, extraEnvVal),
<ide> ),
<del> withDockerfile(dockerfile),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> newImgID := getIDByName(c, imgNameCache)
<ide>
<ide> func (s *DockerSuite) TestBuildBuildTimeArgCacheMissSameArgDiffVal(c *check.C) {
<ide> ARG %s
<ide> RUN echo $%s`, envKey, envKey)
<ide> buildImageSuccessfully(c, imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> origImgID := getIDByName(c, imgName)
<ide>
<ide> imgNameCache := "bldargtestcachemiss"
<ide> buildImageSuccessfully(c, imgNameCache,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, newEnvVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, newEnvVal)),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> newImgID := getIDByName(c, imgNameCache)
<ide> if newImgID == origImgID {
<ide> func (s *DockerSuite) TestBuildBuildTimeArgOverrideArgDefinedBeforeEnv(c *check.
<ide> `, envKey, envKey, envValOveride, envKey, envKey)
<ide>
<ide> result := buildImage(imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> result.Assert(c, icmd.Success)
<ide> if strings.Count(result.Combined(), envValOveride) != 2 {
<ide> func (s *DockerSuite) TestBuildBuildTimeArgOverrideEnvDefinedBeforeArg(c *check.
<ide> CMD echo $%s
<ide> `, envKey, envValOveride, envKey, envKey, envKey)
<ide> result := buildImage(imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> result.Assert(c, icmd.Success)
<ide> if strings.Count(result.Combined(), envValOveride) != 2 {
<ide> func (s *DockerSuite) TestBuildBuildTimeArgExpansion(c *check.C) {
<ide> volVal := "/testVol/"
<ide>
<ide> buildImageSuccessfully(c, imgName,
<del> withBuildFlags(
<add> cli.WithFlags(
<ide> "--build-arg", fmt.Sprintf("%s=%s", wdVar, wdVal),
<ide> "--build-arg", fmt.Sprintf("%s=%s", addVar, addVal),
<ide> "--build-arg", fmt.Sprintf("%s=%s", copyVar, copyVal),
<ide> func (s *DockerSuite) TestBuildBuildTimeArgExpansionOverride(c *check.C) {
<ide> RUN echo $%s
<ide> CMD echo $%s`, envKey, envKey, envValOveride, envKey1, envKey, envKey1, envKey1)
<ide> result := buildImage(imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> result.Assert(c, icmd.Success)
<ide> if strings.Count(result.Combined(), envValOveride) != 2 {
<ide> func (s *DockerSuite) TestBuildBuildTimeArgUntrustedDefinedAfterUse(c *check.C)
<ide> ARG %s
<ide> CMD echo $%s`, envKey, envKey, envKey)
<ide> result := buildImage(imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> result.Assert(c, icmd.Success)
<ide> if strings.Contains(result.Combined(), envVal) {
<ide> func (s *DockerSuite) TestBuildBuildTimeArgBuiltinArg(c *check.C) {
<ide> CMD echo $%s`, envKey, envKey)
<ide>
<ide> result := buildImage(imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> result.Assert(c, icmd.Success)
<ide> if !strings.Contains(result.Combined(), envVal) {
<ide> func (s *DockerSuite) TestBuildBuildTimeArgDefaultOverride(c *check.C) {
<ide> RUN echo $%s
<ide> CMD echo $%s`, envKey, envVal, envKey, envKey, envKey, envKey)
<ide> result := buildImage(imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envValOveride)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envValOveride)),
<add> build.WithDockerfile(dockerfile),
<ide> )
<ide> result.Assert(c, icmd.Success)
<ide> if strings.Count(result.Combined(), envValOveride) != 1 {
<ide> func (s *DockerSuite) TestBuildBuildTimeArgUnconsumedArg(c *check.C) {
<ide> CMD echo $%s`, envKey, envKey)
<ide> warnStr := "[Warning] One or more build-args"
<ide> buildImage(imgName,
<del> withBuildFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<del> withDockerfile(dockerfile),
<add> cli.WithFlags("--build-arg", fmt.Sprintf("%s=%s", envKey, envVal)),
<add> build.WithDockerfile(dockerfile),
<ide> ).Assert(c, icmd.Expected{
<ide> Out: warnStr,
<ide> })
<ide> func (s *DockerSuite) TestBuildBuildTimeArgEnv(c *check.C) {
<ide> RUN [ "$FO10" == "" ]
<ide> `
<ide> result := buildImage("testbuildtimeargenv",
<del> withBuildFlags(
<add> cli.WithFlags(
<ide> "--build-arg", fmt.Sprintf("FOO1=fromcmd"),
<ide> "--build-arg", fmt.Sprintf("FOO2="),
<ide> "--build-arg", fmt.Sprintf("FOO3"), // set in env
<ide> func (s *DockerSuite) TestBuildBuildTimeArgEnv(c *check.C) {
<ide> "--build-arg", fmt.Sprintf("FOO9"), // should produce a warning
<ide> "--build-arg", fmt.Sprintf("FO10"), // not set in env, empty value
<ide> ),
<del> withEnvironmentVariales(append(os.Environ(),
<add> cli.WithEnvironmentVariables(append(os.Environ(),
<ide> "FOO1=fromenv",
<ide> "FOO2=fromenv",
<ide> "FOO3=fromenv")...),
<ide> func (s *DockerSuite) TestBuildBuildTimeArgQuotedValVariants(c *check.C) {
<ide> RUN [ "$%s" != "$%s" ]`, envKey, envKey1, envKey2, envKey3,
<ide> envKey, envKey2, envKey, envKey3, envKey1, envKey2, envKey1, envKey3,
<ide> envKey2, envKey3)
<del> buildImageSuccessfully(c, imgName, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, imgName, build.WithDockerfile(dockerfile))
<ide> }
<ide>
<ide> func (s *DockerSuite) TestBuildBuildTimeArgEmptyValVariants(c *check.C) {
<ide> func (s *DockerSuite) TestBuildBuildTimeArgEmptyValVariants(c *check.C) {
<ide> RUN [ "$%s" == "$%s" ]
<ide> RUN [ "$%s" == "$%s" ]
<ide> RUN [ "$%s" == "$%s" ]`, envKey, envKey1, envKey2, envKey, envKey1, envKey1, envKey2, envKey, envKey2)
<del> buildImageSuccessfully(c, imgName, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, imgName, build.WithDockerfile(dockerfile))
<ide> }
<ide>
<ide> func (s *DockerSuite) TestBuildBuildTimeArgDefintionWithNoEnvInjection(c *check.C) {
<ide> func (s *DockerSuite) TestBuildBuildTimeArgDefintionWithNoEnvInjection(c *check.
<ide> ARG %s
<ide> RUN env`, envKey)
<ide>
<del> result := buildImage(imgName, withDockerfile(dockerfile))
<add> result := buildImage(imgName, build.WithDockerfile(dockerfile))
<ide> result.Assert(c, icmd.Success)
<ide> if strings.Count(result.Combined(), envKey) != 1 {
<ide> c.Fatalf("unexpected number of occurrences of the arg in output: %q expected: 1", result.Combined())
<ide> func (s *DockerSuite) TestBuildNoNamedVolume(c *check.C) {
<ide> VOLUME ` + volName + `
<ide> RUN ls /foo/oops
<ide> `
<del> buildImage("test", withDockerfile(dockerFile)).Assert(c, icmd.Expected{
<add> buildImage("test", build.WithDockerfile(dockerFile)).Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> })
<ide> }
<ide> func (s *DockerSuite) TestBuildTagEvent(c *check.C) {
<ide> dockerFile := `FROM busybox
<ide> RUN echo events
<ide> `
<del> buildImageSuccessfully(c, "test", withDockerfile(dockerFile))
<add> buildImageSuccessfully(c, "test", build.WithDockerfile(dockerFile))
<ide>
<ide> until := daemonUnixTime(c)
<ide> out, _ := dockerCmd(c, "events", "--since", since, "--until", until, "--filter", "type=image")
<ide> func (s *DockerSuite) TestBuildMultipleTags(c *check.C) {
<ide> FROM busybox
<ide> MAINTAINER test-15780
<ide> `
<del> buildImageSuccessfully(c, "tag1", withBuildFlags("-t", "tag2:v2", "-t", "tag1:latest", "-t", "tag1"), withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, "tag1", cli.WithFlags("-t", "tag2:v2", "-t", "tag1:latest", "-t", "tag1"), build.WithDockerfile(dockerfile))
<ide>
<ide> id1 := getIDByName(c, "tag1")
<ide> id2 := getIDByName(c, "tag2:v2")
<ide> func (s *DockerSuite) TestBuildCacheRootSource(c *check.C) {
<ide>
<ide> // #19375
<ide> func (s *DockerSuite) TestBuildFailsGitNotCallable(c *check.C) {
<del> buildImage("gitnotcallable", withEnvironmentVariales("PATH="),
<del> withBuildContextPath("github.com/docker/v1.10-migrator.git")).Assert(c, icmd.Expected{
<add> buildImage("gitnotcallable", cli.WithEnvironmentVariables("PATH="),
<add> build.WithContextPath("github.com/docker/v1.10-migrator.git")).Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> Err: "unable to prepare context: unable to find 'git': ",
<ide> })
<ide>
<del> buildImage("gitnotcallable", withEnvironmentVariales("PATH="),
<del> withBuildContextPath("https://github.com/docker/v1.10-migrator.git")).Assert(c, icmd.Expected{
<add> buildImage("gitnotcallable", cli.WithEnvironmentVariables("PATH="),
<add> build.WithContextPath("https://github.com/docker/v1.10-migrator.git")).Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> Err: "unable to prepare context: unable to find 'git': ",
<ide> })
<ide> func (s *DockerSuite) TestBuildFailsGitNotCallable(c *check.C) {
<ide> func (s *DockerSuite) TestBuildWorkdirWindowsPath(c *check.C) {
<ide> testRequires(c, DaemonIsWindows)
<ide> name := "testbuildworkdirwindowspath"
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM `+testEnv.MinimalBaseImage()+`
<ide> RUN mkdir C:\\work
<ide> WORKDIR C:\\work
<ide> func (s *DockerSuite) TestBuildLabel(c *check.C) {
<ide> name := "testbuildlabel"
<ide> testLabel := "foo"
<ide>
<del> buildImageSuccessfully(c, name, withBuildFlags("--label", testLabel),
<del> withDockerfile(`
<add> buildImageSuccessfully(c, name, cli.WithFlags("--label", testLabel),
<add> build.WithDockerfile(`
<ide> FROM `+minimalBaseImage()+`
<ide> LABEL default foo
<ide> `))
<ide> func (s *DockerSuite) TestBuildLabel(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestBuildLabelOneNode(c *check.C) {
<ide> name := "testbuildlabel"
<del> buildImageSuccessfully(c, name, withBuildFlags("--label", "foo=bar"),
<del> withDockerfile("FROM busybox"))
<add> buildImageSuccessfully(c, name, cli.WithFlags("--label", "foo=bar"),
<add> build.WithDockerfile("FROM busybox"))
<ide>
<ide> var labels map[string]string
<ide> inspectFieldAndUnmarshall(c, name, "Config.Labels", &labels)
<ide> func (s *DockerSuite) TestBuildLabelCacheCommit(c *check.C) {
<ide> name := "testbuildlabelcachecommit"
<ide> testLabel := "foo"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM `+minimalBaseImage()+`
<ide> LABEL default foo
<ide> `))
<del> buildImageSuccessfully(c, name, withBuildFlags("--label", testLabel),
<del> withDockerfile(`
<add> buildImageSuccessfully(c, name, cli.WithFlags("--label", testLabel),
<add> build.WithDockerfile(`
<ide> FROM `+minimalBaseImage()+`
<ide> LABEL default foo
<ide> `))
<ide> func (s *DockerSuite) TestBuildLabelMultiple(c *check.C) {
<ide> labelArgs = append(labelArgs, "--label", k+"="+v)
<ide> }
<ide>
<del> buildImageSuccessfully(c, name, withBuildFlags(labelArgs...),
<del> withDockerfile(`
<add> buildImageSuccessfully(c, name, cli.WithFlags(labelArgs...),
<add> build.WithDockerfile(`
<ide> FROM `+minimalBaseImage()+`
<ide> LABEL default foo
<ide> `))
<ide> func (s *DockerRegistryAuthHtpasswdSuite) TestBuildFromAuthenticatedRegistry(c *
<ide> dockerCmd(c, "login", "-u", s.reg.Username(), "-p", s.reg.Password(), privateRegistryURL)
<ide> baseImage := privateRegistryURL + "/baseimage"
<ide>
<del> buildImageSuccessfully(c, baseImage, withDockerfile(`
<add> buildImageSuccessfully(c, baseImage, build.WithDockerfile(`
<ide> FROM busybox
<ide> ENV env1 val1
<ide> `))
<ide>
<ide> dockerCmd(c, "push", baseImage)
<ide> dockerCmd(c, "rmi", baseImage)
<ide>
<del> buildImageSuccessfully(c, baseImage, withDockerfile(fmt.Sprintf(`
<add> buildImageSuccessfully(c, baseImage, build.WithDockerfile(fmt.Sprintf(`
<ide> FROM %s
<ide> ENV env2 val2
<ide> `, baseImage)))
<ide> func (s *DockerSuite) TestBuildLabelsOverride(c *check.C) {
<ide> // Command line option labels will always override
<ide> name := "scratchy"
<ide> expected := `{"bar":"from-flag","foo":"from-flag"}`
<del> buildImageSuccessfully(c, name, withBuildFlags("--label", "foo=from-flag", "--label", "bar=from-flag"),
<del> withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, name, cli.WithFlags("--label", "foo=from-flag", "--label", "bar=from-flag"),
<add> build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> LABEL foo=from-dockerfile`))
<ide> res := inspectFieldJSON(c, name, "Config.Labels")
<ide> if res != expected {
<ide> func (s *DockerSuite) TestBuildLabelsOverride(c *check.C) {
<ide>
<ide> name = "from"
<ide> expected = `{"foo":"from-dockerfile"}`
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> LABEL foo from-dockerfile`))
<ide> res = inspectFieldJSON(c, name, "Config.Labels")
<ide> if res != expected {
<ide> func (s *DockerSuite) TestBuildLabelsOverride(c *check.C) {
<ide> // Command line option label will override even via `FROM`
<ide> name = "new"
<ide> expected = `{"bar":"from-dockerfile2","foo":"new"}`
<del> buildImageSuccessfully(c, name, withBuildFlags("--label", "foo=new"),
<del> withDockerfile(`FROM from
<add> buildImageSuccessfully(c, name, cli.WithFlags("--label", "foo=new"),
<add> build.WithDockerfile(`FROM from
<ide> LABEL bar from-dockerfile2`))
<ide> res = inspectFieldJSON(c, name, "Config.Labels")
<ide> if res != expected {
<ide> func (s *DockerSuite) TestBuildLabelsOverride(c *check.C) {
<ide> // will be treated as --label foo="", --label bar=""
<ide> name = "scratchy2"
<ide> expected = `{"bar":"","foo":""}`
<del> buildImageSuccessfully(c, name, withBuildFlags("--label", "foo", "--label", "bar="),
<del> withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, name, cli.WithFlags("--label", "foo", "--label", "bar="),
<add> build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> LABEL foo=from-dockerfile`))
<ide> res = inspectFieldJSON(c, name, "Config.Labels")
<ide> if res != expected {
<ide> func (s *DockerSuite) TestBuildLabelsOverride(c *check.C) {
<ide> // This time is for inherited images
<ide> name = "new2"
<ide> expected = `{"bar":"","foo":""}`
<del> buildImageSuccessfully(c, name, withBuildFlags("--label", "foo=", "--label", "bar"),
<del> withDockerfile(`FROM from
<add> buildImageSuccessfully(c, name, cli.WithFlags("--label", "foo=", "--label", "bar"),
<add> build.WithDockerfile(`FROM from
<ide> LABEL bar from-dockerfile2`))
<ide> res = inspectFieldJSON(c, name, "Config.Labels")
<ide> if res != expected {
<ide> func (s *DockerSuite) TestBuildLabelsOverride(c *check.C) {
<ide> // Command line option labels with only `FROM`
<ide> name = "scratchy"
<ide> expected = `{"bar":"from-flag","foo":"from-flag"}`
<del> buildImageSuccessfully(c, name, withBuildFlags("--label", "foo=from-flag", "--label", "bar=from-flag"),
<del> withDockerfile(`FROM `+minimalBaseImage()))
<add> buildImageSuccessfully(c, name, cli.WithFlags("--label", "foo=from-flag", "--label", "bar=from-flag"),
<add> build.WithDockerfile(`FROM `+minimalBaseImage()))
<ide> res = inspectFieldJSON(c, name, "Config.Labels")
<ide> if res != expected {
<ide> c.Fatalf("Labels %s, expected %s", res, expected)
<ide> func (s *DockerSuite) TestBuildLabelsOverride(c *check.C) {
<ide> // Command line option labels with env var
<ide> name = "scratchz"
<ide> expected = `{"bar":"$PATH"}`
<del> buildImageSuccessfully(c, name, withBuildFlags("--label", "bar=$PATH"),
<del> withDockerfile(`FROM `+minimalBaseImage()))
<add> buildImageSuccessfully(c, name, cli.WithFlags("--label", "bar=$PATH"),
<add> build.WithDockerfile(`FROM `+minimalBaseImage()))
<ide> res = inspectFieldJSON(c, name, "Config.Labels")
<ide> if res != expected {
<ide> c.Fatalf("Labels %s, expected %s", res, expected)
<ide> func (s *DockerSuite) TestBuildLabelsOverride(c *check.C) {
<ide> // Test case for #22855
<ide> func (s *DockerSuite) TestBuildDeleteCommittedFile(c *check.C) {
<ide> name := "test-delete-committed-file"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN echo test > file
<ide> RUN test -e file
<ide> RUN rm file
<ide> func (s *DockerSuite) TestBuildWithUTF8BOMDockerignore(c *check.C) {
<ide> func (s *DockerSuite) TestBuildShellUpdatesConfig(c *check.C) {
<ide> name := "testbuildshellupdatesconfig"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> SHELL ["foo", "-bar"]`))
<ide> expected := `["foo","-bar","#(nop) ","SHELL [foo -bar]"]`
<ide> res := inspectFieldJSON(c, name, "ContainerConfig.Cmd")
<ide> func (s *DockerSuite) TestBuildShellUpdatesConfig(c *check.C) {
<ide> func (s *DockerSuite) TestBuildShellMultiple(c *check.C) {
<ide> name := "testbuildshellmultiple"
<ide>
<del> result := buildImage(name, withDockerfile(`FROM busybox
<add> result := buildImage(name, build.WithDockerfile(`FROM busybox
<ide> RUN echo defaultshell
<ide> SHELL ["echo"]
<ide> RUN echoshell
<ide> func (s *DockerSuite) TestBuildShellMultiple(c *check.C) {
<ide> func (s *DockerSuite) TestBuildShellEntrypoint(c *check.C) {
<ide> name := "testbuildshellentrypoint"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> SHELL ["ls"]
<ide> ENTRYPOINT -l`))
<ide> // A container started from the image uses the shell-form ENTRYPOINT.
<ide> func (s *DockerSuite) TestBuildShellEntrypoint(c *check.C) {
<ide> // #22489 Shell test to confirm shell is inherited in a subsequent build
<ide> func (s *DockerSuite) TestBuildShellInherited(c *check.C) {
<ide> name1 := "testbuildshellinherited1"
<del> buildImageSuccessfully(c, name1, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name1, build.WithDockerfile(`FROM busybox
<ide> SHELL ["ls"]`))
<ide> name2 := "testbuildshellinherited2"
<del> buildImage(name2, withDockerfile(`FROM `+name1+`
<add> buildImage(name2, build.WithDockerfile(`FROM `+name1+`
<ide> RUN -l`)).Assert(c, icmd.Expected{
<ide> // ls -l has "total " followed by some number in it, ls without -l does not.
<ide> Out: "total ",
<ide> func (s *DockerSuite) TestBuildShellInherited(c *check.C) {
<ide> func (s *DockerSuite) TestBuildShellNotJSON(c *check.C) {
<ide> name := "testbuildshellnotjson"
<ide>
<del> buildImage(name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImage(name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> sHeLl exec -form`, // Casing explicit to ensure error is upper-cased.
<ide> )).Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> func (s *DockerSuite) TestBuildShellNotJSON(c *check.C) {
<ide> func (s *DockerSuite) TestBuildShellWindowsPowershell(c *check.C) {
<ide> testRequires(c, DaemonIsWindows)
<ide> name := "testbuildshellpowershell"
<del> buildImage(name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImage(name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> SHELL ["powershell", "-command"]
<ide> RUN Write-Host John`)).Assert(c, icmd.Expected{
<ide> Out: "\nJohn\n",
<ide> func (s *DockerSuite) TestBuildShellWindowsPowershell(c *check.C) {
<ide> func (s *DockerSuite) TestBuildEscapeNotBackslashWordTest(c *check.C) {
<ide> testRequires(c, DaemonIsWindows)
<ide> name := "testbuildescapenotbackslashwordtesta"
<del> buildImage(name, withDockerfile(`# escape= `+"`"+`
<add> buildImage(name, build.WithDockerfile(`# escape= `+"`"+`
<ide> FROM `+minimalBaseImage()+`
<ide> WORKDIR c:\windows
<ide> RUN dir /w`)).Assert(c, icmd.Expected{
<ide> Out: "[System32]",
<ide> })
<ide>
<ide> name = "testbuildescapenotbackslashwordtestb"
<del> buildImage(name, withDockerfile(`# escape= `+"`"+`
<add> buildImage(name, build.WithDockerfile(`# escape= `+"`"+`
<ide> FROM `+minimalBaseImage()+`
<ide> SHELL ["powershell.exe"]
<ide> WORKDIR c:\foo
<ide> func (s *DockerSuite) TestBuildEscapeNotBackslashWordTest(c *check.C) {
<ide> func (s *DockerSuite) TestBuildCmdShellArgsEscaped(c *check.C) {
<ide> testRequires(c, DaemonIsWindows)
<ide> name := "testbuildcmdshellescaped"
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM `+minimalBaseImage()+`
<ide> CMD "ipconfig"
<ide> `))
<ide> func (s *DockerSuite) TestBuildCmdShellArgsEscaped(c *check.C) {
<ide> func (s *DockerSuite) TestBuildStepsWithProgress(c *check.C) {
<ide> name := "testbuildstepswithprogress"
<ide> totalRun := 5
<del> result := buildImage(name, withDockerfile("FROM busybox\n"+strings.Repeat("RUN echo foo\n", totalRun)))
<add> result := buildImage(name, build.WithDockerfile("FROM busybox\n"+strings.Repeat("RUN echo foo\n", totalRun)))
<ide> result.Assert(c, icmd.Success)
<ide> c.Assert(result.Combined(), checker.Contains, fmt.Sprintf("Step 1/%d : FROM busybox", 1+totalRun))
<ide> for i := 2; i <= 1+totalRun; i++ {
<ide> func (s *DockerSuite) TestBuildWithFailure(c *check.C) {
<ide>
<ide> // First test case can only detect `nobody` in runtime so all steps will show up
<ide> dockerfile := "FROM busybox\nRUN nobody"
<del> result := buildImage(name, withDockerfile(dockerfile))
<add> result := buildImage(name, build.WithDockerfile(dockerfile))
<ide> c.Assert(result.Error, checker.NotNil)
<ide> c.Assert(result.Stdout(), checker.Contains, "Step 1/2 : FROM busybox")
<ide> c.Assert(result.Stdout(), checker.Contains, "Step 2/2 : RUN nobody")
<ide>
<ide> // Second test case `FFOM` should have been detected before build runs so no steps
<ide> dockerfile = "FFOM nobody\nRUN nobody"
<del> result = buildImage(name, withDockerfile(dockerfile))
<add> result = buildImage(name, build.WithDockerfile(dockerfile))
<ide> c.Assert(result.Error, checker.NotNil)
<ide> c.Assert(result.Stdout(), checker.Not(checker.Contains), "Step 1/2 : FROM busybox")
<ide> c.Assert(result.Stdout(), checker.Not(checker.Contains), "Step 2/2 : RUN nobody")
<ide> func (s *DockerSuite) TestBuildCacheFromEqualDiffIDsLength(c *check.C) {
<ide> id1 := getIDByName(c, "build1")
<ide>
<ide> // rebuild with cache-from
<del> result := buildImage("build2", withBuildFlags("--cache-from=build1"), withExternalBuildContext(ctx))
<add> result := buildImage("build2", cli.WithFlags("--cache-from=build1"), withExternalBuildContext(ctx))
<ide> result.Assert(c, icmd.Success)
<ide> id2 := getIDByName(c, "build2")
<ide> c.Assert(id1, checker.Equals, id2)
<ide> func (s *DockerSuite) TestBuildCacheFrom(c *check.C) {
<ide> id1 := getIDByName(c, "build1")
<ide>
<ide> // rebuild with cache-from
<del> result := buildImage("build2", withBuildFlags("--cache-from=build1"), withExternalBuildContext(ctx))
<add> result := buildImage("build2", cli.WithFlags("--cache-from=build1"), withExternalBuildContext(ctx))
<ide> result.Assert(c, icmd.Success)
<ide> id2 := getIDByName(c, "build2")
<ide> c.Assert(id1, checker.Equals, id2)
<ide> c.Assert(strings.Count(result.Combined(), "Using cache"), checker.Equals, 3)
<ide> dockerCmd(c, "rmi", "build2")
<ide>
<ide> // no cache match with unknown source
<del> result = buildImage("build2", withBuildFlags("--cache-from=nosuchtag"), withExternalBuildContext(ctx))
<add> result = buildImage("build2", cli.WithFlags("--cache-from=nosuchtag"), withExternalBuildContext(ctx))
<ide> result.Assert(c, icmd.Success)
<ide> id2 = getIDByName(c, "build2")
<ide> c.Assert(id1, checker.Not(checker.Equals), id2)
<ide> func (s *DockerSuite) TestBuildCacheFrom(c *check.C) {
<ide> c.Assert(strings.TrimSpace(parentID), checker.Equals, "")
<ide>
<ide> // cache still applies without parents
<del> result = buildImage("build2", withBuildFlags("--cache-from=build1"), withExternalBuildContext(ctx))
<add> result = buildImage("build2", cli.WithFlags("--cache-from=build1"), withExternalBuildContext(ctx))
<ide> result.Assert(c, icmd.Success)
<ide> id2 = getIDByName(c, "build2")
<ide> c.Assert(id1, checker.Equals, id2)
<ide> c.Assert(strings.Count(result.Combined(), "Using cache"), checker.Equals, 3)
<ide> history1, _ := dockerCmd(c, "history", "-q", "build2")
<ide>
<ide> // Retry, no new intermediate images
<del> result = buildImage("build3", withBuildFlags("--cache-from=build1"), withExternalBuildContext(ctx))
<add> result = buildImage("build3", cli.WithFlags("--cache-from=build1"), withExternalBuildContext(ctx))
<ide> result.Assert(c, icmd.Success)
<ide> id3 := getIDByName(c, "build3")
<ide> c.Assert(id1, checker.Equals, id3)
<ide> func (s *DockerSuite) TestBuildCacheFrom(c *check.C) {
<ide> err = ioutil.WriteFile(filepath.Join(ctx.Dir, "Dockerfile"), []byte(dockerfile), 0644)
<ide> c.Assert(err, checker.IsNil)
<ide>
<del> result = buildImage("build2", withBuildFlags("--cache-from=build1"), withExternalBuildContext(ctx))
<add> result = buildImage("build2", cli.WithFlags("--cache-from=build1"), withExternalBuildContext(ctx))
<ide> result.Assert(c, icmd.Success)
<ide> id2 = getIDByName(c, "build2")
<ide> c.Assert(id1, checker.Not(checker.Equals), id2)
<ide> func (s *DockerSuite) TestBuildCacheFrom(c *check.C) {
<ide> func (s *DockerSuite) TestBuildNetNone(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuildnetnone"
<del> buildImage(name, withBuildFlags("--network=none"), withDockerfile(`
<add> buildImage(name, cli.WithFlags("--network=none"), build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN ping -c 1 8.8.8.8
<ide> `)).Assert(c, icmd.Expected{
<ide> func (s *DockerSuite) TestBuildNetContainer(c *check.C) {
<ide> id, _ := dockerCmd(c, "run", "--hostname", "foobar", "-d", "busybox", "nc", "-ll", "-p", "1234", "-e", "hostname")
<ide>
<ide> name := "testbuildnetcontainer"
<del> buildImageSuccessfully(c, name, withBuildFlags("--network=container:"+strings.TrimSpace(id)),
<del> withDockerfile(`
<add> buildImageSuccessfully(c, name, cli.WithFlags("--network=container:"+strings.TrimSpace(id)),
<add> build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN nc localhost 1234 > /otherhost
<ide> `))
<ide> func (s *DockerSuite) TestBuildWithExtraHost(c *check.C) {
<ide>
<ide> name := "testbuildwithextrahost"
<ide> buildImageSuccessfully(c, name,
<del> withBuildFlags(
<add> cli.WithFlags(
<ide> "--add-host", "foo:127.0.0.1",
<ide> "--add-host", "bar:127.0.0.1",
<ide> ),
<del> withDockerfile(`
<add> build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN ping -c 1 foo
<ide> RUN ping -c 1 bar
<ide> func (s *DockerSuite) TestBuildWithExtraHostInvalidFormat(c *check.C) {
<ide> }
<ide>
<ide> for _, tc := range testCases {
<del> result := buildImage(tc.testName, withBuildFlags(tc.buildFlag), withDockerfile(tc.dockerfile))
<add> result := buildImage(tc.testName, cli.WithFlags(tc.buildFlag), build.WithDockerfile(tc.dockerfile))
<ide> result.Assert(c, icmd.Expected{
<ide> ExitCode: 125,
<ide> })
<ide> func (s *DockerSuite) TestBuildSquashParent(c *check.C) {
<ide> `
<ide> // build and get the ID that we can use later for history comparison
<ide> name := "test"
<del> buildImageSuccessfully(c, name, withDockerfile(dockerFile))
<add> buildImageSuccessfully(c, name, build.WithDockerfile(dockerFile))
<ide> origID := getIDByName(c, name)
<ide>
<ide> // build with squash
<del> buildImageSuccessfully(c, name, withBuildFlags("--squash"), withDockerfile(dockerFile))
<add> buildImageSuccessfully(c, name, cli.WithFlags("--squash"), build.WithDockerfile(dockerFile))
<ide> id := getIDByName(c, name)
<ide>
<ide> out, _ := dockerCmd(c, "run", "--rm", id, "/bin/sh", "-c", "cat /hello")
<ide> func (s *DockerSuite) TestBuildSquashParent(c *check.C) {
<ide> func (s *DockerSuite) TestBuildContChar(c *check.C) {
<ide> name := "testbuildcontchar"
<ide>
<del> buildImage(name, withDockerfile(`FROM busybox\`)).Assert(c, icmd.Expected{
<add> buildImage(name, build.WithDockerfile(`FROM busybox\`)).Assert(c, icmd.Expected{
<ide> Out: "Step 1/1 : FROM busybox",
<ide> })
<ide>
<del> result := buildImage(name, withDockerfile(`FROM busybox
<add> result := buildImage(name, build.WithDockerfile(`FROM busybox
<ide> RUN echo hi \`))
<ide> result.Assert(c, icmd.Success)
<ide> c.Assert(result.Combined(), checker.Contains, "Step 1/2 : FROM busybox")
<ide> c.Assert(result.Combined(), checker.Contains, "Step 2/2 : RUN echo hi\n")
<ide>
<del> result = buildImage(name, withDockerfile(`FROM busybox
<add> result = buildImage(name, build.WithDockerfile(`FROM busybox
<ide> RUN echo hi \\`))
<ide> result.Assert(c, icmd.Success)
<ide> c.Assert(result.Combined(), checker.Contains, "Step 1/2 : FROM busybox")
<ide> c.Assert(result.Combined(), checker.Contains, "Step 2/2 : RUN echo hi \\\n")
<ide>
<del> result = buildImage(name, withDockerfile(`FROM busybox
<add> result = buildImage(name, build.WithDockerfile(`FROM busybox
<ide> RUN echo hi \\\`))
<ide> result.Assert(c, icmd.Success)
<ide> c.Assert(result.Combined(), checker.Contains, "Step 1/2 : FROM busybox")
<ide> func (s *DockerSuite) TestBuildOpaqueDirectory(c *check.C) {
<ide> `
<ide> // Test that build succeeds, last command fails if opaque directory
<ide> // was not handled correctly
<del> buildImageSuccessfully(c, "testopaquedirectory", withDockerfile(dockerFile))
<add> buildImageSuccessfully(c, "testopaquedirectory", build.WithDockerfile(dockerFile))
<ide> }
<ide>
<ide> // Windows test for USER in dockerfile
<ide> func (s *DockerSuite) TestBuildWindowsUser(c *check.C) {
<ide> testRequires(c, DaemonIsWindows)
<ide> name := "testbuildwindowsuser"
<del> buildImage(name, withDockerfile(`FROM `+testEnv.MinimalBaseImage()+`
<add> buildImage(name, build.WithDockerfile(`FROM `+testEnv.MinimalBaseImage()+`
<ide> RUN net user user /add
<ide> USER user
<ide> RUN set username
<ide> RUN ["cat", "/foo/file"]
<ide> func (s *DockerSuite) TestBuildWindowsEnvCaseInsensitive(c *check.C) {
<ide> testRequires(c, DaemonIsWindows)
<ide> name := "testbuildwindowsenvcaseinsensitive"
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM `+testEnv.MinimalBaseImage()+`
<ide> ENV FOO=bar foo=baz
<ide> `))
<ide> func (s *DockerSuite) TestBuildWindowsEnvCaseInsensitive(c *check.C) {
<ide> // Test case for 29667
<ide> func (s *DockerSuite) TestBuildWorkdirImageCmd(c *check.C) {
<ide> image := "testworkdirimagecmd"
<del> buildImageSuccessfully(c, image, withDockerfile(`
<add> buildImageSuccessfully(c, image, build.WithDockerfile(`
<ide> FROM busybox
<ide> WORKDIR /foo/bar
<ide> `))
<ide> WORKDIR /foo/bar
<ide> c.Assert(strings.TrimSpace(out), checker.Equals, lookingFor)
<ide>
<ide> image = "testworkdirlabelimagecmd"
<del> buildImageSuccessfully(c, image, withDockerfile(`
<add> buildImageSuccessfully(c, image, build.WithDockerfile(`
<ide> FROM busybox
<ide> WORKDIR /foo/bar
<ide> LABEL a=b
<ide> func (s *DockerSuite) TestBuildWorkdirCmd(c *check.C) {
<ide> FROM busybox
<ide> WORKDIR /
<ide> `
<del> buildImageSuccessfully(c, name, withDockerfile(dockerFile))
<del> result := buildImage(name, withDockerfile(dockerFile))
<add> buildImageSuccessfully(c, name, build.WithDockerfile(dockerFile))
<add> result := buildImage(name, build.WithDockerfile(dockerFile))
<ide> result.Assert(c, icmd.Success)
<ide> c.Assert(strings.Count(result.Combined(), "Using cache"), checker.Equals, 1)
<ide> }
<ide>
<ide> // FIXME(vdemeester) should be a unit test
<ide> func (s *DockerSuite) TestBuildLineErrorOnBuild(c *check.C) {
<ide> name := "test_build_line_error_onbuild"
<del> buildImage(name, withDockerfile(`FROM busybox
<add> buildImage(name, build.WithDockerfile(`FROM busybox
<ide> ONBUILD
<ide> `)).Assert(c, icmd.Expected{
<ide> ExitCode: 1,
<ide> func (s *DockerSuite) TestBuildLineErrorOnBuild(c *check.C) {
<ide> // FIXME(vdemeester) should be a unit test
<ide> func (s *DockerSuite) TestBuildLineErrorUknownInstruction(c *check.C) {
<ide> name := "test_build_line_error_unknown_instruction"
<del> buildImage(name, withDockerfile(`FROM busybox
<add> buildImage(name, build.WithDockerfile(`FROM busybox
<ide> RUN echo hello world
<ide> NOINSTRUCTION echo ba
<ide> RUN echo hello
<ide> func (s *DockerSuite) TestBuildLineErrorUknownInstruction(c *check.C) {
<ide> // FIXME(vdemeester) should be a unit test
<ide> func (s *DockerSuite) TestBuildLineErrorWithEmptyLines(c *check.C) {
<ide> name := "test_build_line_error_with_empty_lines"
<del> buildImage(name, withDockerfile(`
<add> buildImage(name, build.WithDockerfile(`
<ide> FROM busybox
<ide>
<ide> RUN echo hello world
<ide> func (s *DockerSuite) TestBuildLineErrorWithEmptyLines(c *check.C) {
<ide> // FIXME(vdemeester) should be a unit test
<ide> func (s *DockerSuite) TestBuildLineErrorWithComments(c *check.C) {
<ide> name := "test_build_line_error_with_comments"
<del> buildImage(name, withDockerfile(`FROM busybox
<add> buildImage(name, build.WithDockerfile(`FROM busybox
<ide> # This will print hello world
<ide> # and then ba
<ide> RUN echo hello world
<ide><path>integration-cli/docker_cli_by_digest_test.go
<ide> import (
<ide> "github.com/docker/distribution/manifest/schema2"
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/stringutils"
<ide> "github.com/go-check/check"
<ide> "github.com/opencontainers/go-digest"
<ide> func (s *DockerRegistrySuite) TestBuildByDigest(c *check.C) {
<ide>
<ide> // do the build
<ide> name := "buildbydigest"
<del> buildImageSuccessfully(c, name, withDockerfile(fmt.Sprintf(
<add> buildImageSuccessfully(c, name, build.WithDockerfile(fmt.Sprintf(
<ide> `FROM %s
<ide> CMD ["/bin/echo", "Hello World"]`, imageReference)))
<ide> c.Assert(err, checker.IsNil)
<ide> func (s *DockerRegistrySuite) TestPsListContainersFilterAncestorImageByDigest(c
<ide>
<ide> // build an image from it
<ide> imageName1 := "images_ps_filter_test"
<del> buildImageSuccessfully(c, imageName1, withDockerfile(fmt.Sprintf(
<add> buildImageSuccessfully(c, imageName1, build.WithDockerfile(fmt.Sprintf(
<ide> `FROM %s
<ide> LABEL match me 1`, imageReference)))
<ide>
<ide><path>integration-cli/docker_cli_commit_test.go
<ide> import (
<ide> "strings"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli"
<ide> "github.com/go-check/check"
<ide> )
<ide>
<ide> func (s *DockerSuite) TestCommitAfterContainerIsDone(c *check.C) {
<del> out, _ := dockerCmd(c, "run", "-i", "-a", "stdin", "busybox", "echo", "foo")
<add> out := cli.DockerCmd(c, "run", "-i", "-a", "stdin", "busybox", "echo", "foo").Combined()
<ide>
<ide> cleanedContainerID := strings.TrimSpace(out)
<ide>
<del> dockerCmd(c, "wait", cleanedContainerID)
<add> cli.DockerCmd(c, "wait", cleanedContainerID)
<ide>
<del> out, _ = dockerCmd(c, "commit", cleanedContainerID)
<add> out = cli.DockerCmd(c, "commit", cleanedContainerID).Combined()
<ide>
<ide> cleanedImageID := strings.TrimSpace(out)
<ide>
<del> dockerCmd(c, "inspect", cleanedImageID)
<add> cli.DockerCmd(c, "inspect", cleanedImageID)
<ide> }
<ide>
<ide> func (s *DockerSuite) TestCommitWithoutPause(c *check.C) {
<ide><path>integration-cli/docker_cli_create_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/stringid"
<ide> "github.com/docker/docker/pkg/testutil"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> func (s *DockerSuite) TestCreateLabels(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestCreateLabelFromImage(c *check.C) {
<ide> imageName := "testcreatebuildlabel"
<del> buildImageSuccessfully(c, imageName, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, imageName, build.WithDockerfile(`FROM busybox
<ide> LABEL k1=v1 k2=v2`))
<ide>
<ide> name := "test_create_labels_from_image"
<ide> func (s *DockerSuite) TestCreateModeIpcContainer(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestCreateByImageID(c *check.C) {
<ide> imageName := "testcreatebyimageid"
<del> buildImageSuccessfully(c, imageName, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, imageName, build.WithDockerfile(`FROM busybox
<ide> MAINTAINER dockerio`))
<ide> imageID := getIDByName(c, imageName)
<ide> truncatedImageID := stringid.TruncateID(imageID)
<ide><path>integration-cli/docker_cli_daemon_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli"
<ide> "github.com/docker/docker/integration-cli/daemon"
<ide> "github.com/docker/docker/pkg/mount"
<ide> "github.com/docker/docker/pkg/stringid"
<ide> func (s *DockerDaemonSuite) TestLegacyDaemonCommand(c *check.C) {
<ide> func (s *DockerDaemonSuite) TestDaemonRestartWithRunningContainersPorts(c *check.C) {
<ide> s.d.StartWithBusybox(c)
<ide>
<del> if out, err := s.d.Cmd("run", "-d", "--name", "top1", "-p", "1234:80", "--restart", "always", "busybox:latest", "top"); err != nil {
<del> c.Fatalf("Could not run top1: err=%v\n%s", err, out)
<del> }
<del> // --restart=no by default
<del> if out, err := s.d.Cmd("run", "-d", "--name", "top2", "-p", "80", "busybox:latest", "top"); err != nil {
<del> c.Fatalf("Could not run top2: err=%v\n%s", err, out)
<del> }
<add> cli.Docker(
<add> cli.Cmd("run", "-d", "--name", "top1", "-p", "1234:80", "--restart", "always", "busybox:latest", "top"),
<add> cli.Daemon(s.d),
<add> ).Assert(c, icmd.Success)
<add>
<add> cli.Docker(
<add> cli.Cmd("run", "-d", "--name", "top2", "-p", "80", "busybox:latest", "top"),
<add> cli.Daemon(s.d),
<add> ).Assert(c, icmd.Success)
<ide>
<ide> testRun := func(m map[string]bool, prefix string) {
<ide> var format string
<ide> for cont, shouldRun := range m {
<del> out, err := s.d.Cmd("ps")
<del> if err != nil {
<del> c.Fatalf("Could not run ps: err=%v\n%q", err, out)
<del> }
<add> out := cli.Docker(cli.Cmd("ps"), cli.Daemon(s.d)).Assert(c, icmd.Success).Combined()
<ide> if shouldRun {
<ide> format = "%scontainer %q is not running"
<ide> } else {
<ide><path>integration-cli/docker_cli_events_test.go
<ide> import (
<ide> eventtypes "github.com/docker/docker/api/types/events"
<ide> eventstestutils "github.com/docker/docker/daemon/events/testutils"
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/integration-cli/request"
<ide> "github.com/docker/docker/pkg/testutil"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> func (s *DockerSuite) TestEventsFilterImageLabels(c *check.C) {
<ide> label := "io.docker.testing=image"
<ide>
<ide> // Build a test image.
<del> buildImageSuccessfully(c, name, withDockerfile(fmt.Sprintf(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(fmt.Sprintf(`
<ide> FROM busybox:latest
<ide> LABEL %s`, label)))
<ide> dockerCmd(c, "tag", name, "labelfiltertest:tag1")
<ide> func (s *DockerSuite) TestEventsCommit(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestEventsCopy(c *check.C) {
<ide> // Build a test image.
<del> buildImageSuccessfully(c, "cpimg", withDockerfile(`
<add> buildImageSuccessfully(c, "cpimg", build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN echo HI > /file`))
<ide> id := getIDByName(c, "cpimg")
<ide> func (s *DockerSuite) TestEventsFilterType(c *check.C) {
<ide> label := "io.docker.testing=image"
<ide>
<ide> // Build a test image.
<del> buildImageSuccessfully(c, name, withDockerfile(fmt.Sprintf(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(fmt.Sprintf(`
<ide> FROM busybox:latest
<ide> LABEL %s`, label)))
<ide> dockerCmd(c, "tag", name, "labelfiltertest:tag1")
<ide><path>integration-cli/docker_cli_events_unix_test.go
<ide> import (
<ide> "unicode"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/go-check/check"
<ide> "github.com/kr/pty"
<ide> )
<ide> func (s *DockerSuite) TestEventsOOMDisableTrue(c *check.C) {
<ide> case <-time.After(20 * time.Second):
<ide> observer.CheckEventError(c, containerID, "oom", matcher)
<ide> case <-testActions["oom"]:
<del> // ignore, done
<add> // ignore, done
<ide> case errRun := <-errChan:
<ide> if errRun != nil {
<ide> c.Fatalf("%v", errRun)
<ide> func (s *DockerSuite) TestEventsImageUntagDelete(c *check.C) {
<ide> defer observer.Stop()
<ide>
<ide> name := "testimageevents"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM scratch
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM scratch
<ide> MAINTAINER "docker"`))
<ide> imageID := getIDByName(c, name)
<ide> c.Assert(deleteImages(name), checker.IsNil)
<ide><path>integration-cli/docker_cli_exec_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/integration-cli/request"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> "github.com/go-check/check"
<ide> func (s *DockerSuite) TestExecWithImageUser(c *check.C) {
<ide> // Not applicable on Windows
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testbuilduser"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN echo 'dockerio:x:1001:1001::/bin:/bin/false' >> /etc/passwd
<ide> USER dockerio`))
<ide> dockerCmd(c, "run", "-d", "--name", "dockerioexec", name, "top")
<ide><path>integration-cli/docker_cli_health_test.go
<ide> package main
<ide>
<ide> import (
<ide> "encoding/json"
<del>
<ide> "strconv"
<ide> "strings"
<ide> "time"
<ide>
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/go-check/check"
<ide> )
<ide>
<ide> func (s *DockerSuite) TestHealth(c *check.C) {
<ide> testRequires(c, DaemonIsLinux) // busybox doesn't work on Windows
<ide>
<ide> imageName := "testhealth"
<del> buildImageSuccessfully(c, imageName, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, imageName, build.WithDockerfile(`FROM busybox
<ide> RUN echo OK > /status
<ide> CMD ["/bin/sleep", "120"]
<ide> STOPSIGNAL SIGKILL
<ide> func (s *DockerSuite) TestHealth(c *check.C) {
<ide>
<ide> // Inspect the options
<ide> out, _ = dockerCmd(c, "inspect",
<del> "--format=timeout={{.Config.Healthcheck.Timeout}} "+
<del> "interval={{.Config.Healthcheck.Interval}} "+
<del> "retries={{.Config.Healthcheck.Retries}} "+
<del> "test={{.Config.Healthcheck.Test}}", name)
<add> "--format=timeout={{.Config.Healthcheck.Timeout}} interval={{.Config.Healthcheck.Interval}} retries={{.Config.Healthcheck.Retries}} test={{.Config.Healthcheck.Test}}", name)
<ide> c.Check(out, checker.Equals, "timeout=30s interval=1s retries=0 test=[CMD-SHELL cat /status]\n")
<ide>
<ide> // Start
<ide> func (s *DockerSuite) TestHealth(c *check.C) {
<ide> dockerCmd(c, "rm", "noh")
<ide>
<ide> // Disable the check with a new build
<del> buildImageSuccessfully(c, "no_healthcheck", withDockerfile(`FROM testhealth
<add> buildImageSuccessfully(c, "no_healthcheck", build.WithDockerfile(`FROM testhealth
<ide> HEALTHCHECK NONE`))
<ide>
<ide> out, _ = dockerCmd(c, "inspect", "--format={{.ContainerConfig.Healthcheck.Test}}", "no_healthcheck")
<ide> func (s *DockerSuite) TestHealth(c *check.C) {
<ide> dockerCmd(c, "rm", "-f", "test")
<ide>
<ide> // Check JSON-format
<del> buildImageSuccessfully(c, imageName, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, imageName, build.WithDockerfile(`FROM busybox
<ide> RUN echo OK > /status
<ide> CMD ["/bin/sleep", "120"]
<ide> STOPSIGNAL SIGKILL
<ide><path>integration-cli/docker_cli_history_test.go
<ide> import (
<ide> "strings"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/go-check/check"
<ide> )
<ide>
<ide> // This is a heisen-test. Because the created timestamp of images and the behavior of
<ide> // sort is not predictable it doesn't always fail.
<ide> func (s *DockerSuite) TestBuildHistory(c *check.C) {
<ide> name := "testbuildhistory"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> LABEL label.A="A"
<ide> LABEL label.B="B"
<ide> LABEL label.C="C"
<ide><path>integration-cli/docker_cli_images_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/stringid"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> "github.com/go-check/check"
<ide> func (s *DockerSuite) TestImagesEnsureImageWithBadTagIsNotListed(c *check.C) {
<ide> }
<ide>
<ide> func (s *DockerSuite) TestImagesOrderedByCreationDate(c *check.C) {
<del> buildImageSuccessfully(c, "order:test_a", withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, "order:test_a", build.WithDockerfile(`FROM busybox
<ide> MAINTAINER dockerio1`))
<ide> id1 := getIDByName(c, "order:test_a")
<ide> time.Sleep(1 * time.Second)
<del> buildImageSuccessfully(c, "order:test_c", withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, "order:test_c", build.WithDockerfile(`FROM busybox
<ide> MAINTAINER dockerio2`))
<ide> id2 := getIDByName(c, "order:test_c")
<ide> time.Sleep(1 * time.Second)
<del> buildImageSuccessfully(c, "order:test_b", withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, "order:test_b", build.WithDockerfile(`FROM busybox
<ide> MAINTAINER dockerio3`))
<ide> id3 := getIDByName(c, "order:test_b")
<ide>
<ide> func (s *DockerSuite) TestImagesFilterLabelMatch(c *check.C) {
<ide> imageName1 := "images_filter_test1"
<ide> imageName2 := "images_filter_test2"
<ide> imageName3 := "images_filter_test3"
<del> buildImageSuccessfully(c, imageName1, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, imageName1, build.WithDockerfile(`FROM busybox
<ide> LABEL match me`))
<ide> image1ID := getIDByName(c, imageName1)
<ide>
<del> buildImageSuccessfully(c, imageName2, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, imageName2, build.WithDockerfile(`FROM busybox
<ide> LABEL match="me too"`))
<ide> image2ID := getIDByName(c, imageName2)
<ide>
<del> buildImageSuccessfully(c, imageName3, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, imageName3, build.WithDockerfile(`FROM busybox
<ide> LABEL nomatch me`))
<ide> image3ID := getIDByName(c, imageName3)
<ide>
<ide> func (s *DockerSuite) TestImagesFilterLabelWithCommit(c *check.C) {
<ide> }
<ide>
<ide> func (s *DockerSuite) TestImagesFilterSinceAndBefore(c *check.C) {
<del> buildImageSuccessfully(c, "image:1", withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, "image:1", build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> LABEL number=1`))
<ide> imageID1 := getIDByName(c, "image:1")
<del> buildImageSuccessfully(c, "image:2", withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, "image:2", build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> LABEL number=2`))
<ide> imageID2 := getIDByName(c, "image:2")
<del> buildImageSuccessfully(c, "image:3", withDockerfile(`FROM `+minimalBaseImage()+`
<add> buildImageSuccessfully(c, "image:3", build.WithDockerfile(`FROM `+minimalBaseImage()+`
<ide> LABEL number=3`))
<ide> imageID3 := getIDByName(c, "image:3")
<ide>
<ide> func assertImageList(out string, expected []string) bool {
<ide> func (s *DockerSuite) TestImagesFilterSpaceTrimCase(c *check.C) {
<ide> imageName := "images_filter_test"
<ide> // Build a image and fail to build so that we have dangling images ?
<del> buildImage(imageName, withDockerfile(`FROM busybox
<add> buildImage(imageName, build.WithDockerfile(`FROM busybox
<ide> RUN touch /test/foo
<ide> RUN touch /test/bar
<ide> RUN touch /test/baz`)).Assert(c, icmd.Expected{
<ide> func (s *DockerSuite) TestImagesEnsureOnlyHeadsImagesShown(c *check.C) {
<ide> MAINTAINER docker
<ide> ENV foo bar`
<ide> name := "scratch-image"
<del> result := buildImage(name, withDockerfile(dockerfile))
<add> result := buildImage(name, build.WithDockerfile(dockerfile))
<ide> result.Assert(c, icmd.Success)
<ide> id := getIDByName(c, name)
<ide>
<ide> func (s *DockerSuite) TestImagesEnsureImagesFromScratchShown(c *check.C) {
<ide> MAINTAINER docker`
<ide>
<ide> name := "scratch-image"
<del> buildImageSuccessfully(c, name, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithDockerfile(dockerfile))
<ide> id := getIDByName(c, name)
<ide>
<ide> out, _ := dockerCmd(c, "images")
<ide> func (s *DockerSuite) TestImagesEnsureImagesFromBusyboxShown(c *check.C) {
<ide> MAINTAINER docker`
<ide> name := "busybox-image"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithDockerfile(dockerfile))
<ide> id := getIDByName(c, name)
<ide>
<ide> out, _ := dockerCmd(c, "images")
<ide><path>integration-cli/docker_cli_nat_test.go
<ide> func getExternalAddress(c *check.C) net.IP {
<ide> return ifaceIP
<ide> }
<ide>
<del>func getContainerLogs(c *check.C, containerID string) string {
<del> out, _ := dockerCmd(c, "logs", containerID)
<del> return strings.Trim(out, "\r\n")
<del>}
<del>
<del>func getContainerStatus(c *check.C, containerID string) string {
<del> out := inspectField(c, containerID, "State.Running")
<del> return out
<del>}
<del>
<ide> func (s *DockerSuite) TestNetworkNat(c *check.C) {
<ide> testRequires(c, DaemonIsLinux, SameHostDaemon)
<ide> msg := "it works"
<ide><path>integration-cli/docker_cli_ps_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/stringid"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> "github.com/go-check/check"
<ide> func (s *DockerSuite) TestPsListContainersFilterName(c *check.C) {
<ide> func (s *DockerSuite) TestPsListContainersFilterAncestorImage(c *check.C) {
<ide> // Build images
<ide> imageName1 := "images_ps_filter_test1"
<del> buildImageSuccessfully(c, imageName1, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, imageName1, build.WithDockerfile(`FROM busybox
<ide> LABEL match me 1`))
<ide> imageID1 := getIDByName(c, imageName1)
<ide>
<ide> imageName1Tagged := "images_ps_filter_test1:tag"
<del> buildImageSuccessfully(c, imageName1Tagged, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, imageName1Tagged, build.WithDockerfile(`FROM busybox
<ide> LABEL match me 1 tagged`))
<ide> imageID1Tagged := getIDByName(c, imageName1Tagged)
<ide>
<ide> imageName2 := "images_ps_filter_test2"
<del> buildImageSuccessfully(c, imageName2, withDockerfile(fmt.Sprintf(`FROM %s
<add> buildImageSuccessfully(c, imageName2, build.WithDockerfile(fmt.Sprintf(`FROM %s
<ide> LABEL match me 2`, imageName1)))
<ide> imageID2 := getIDByName(c, imageName2)
<ide>
<ide><path>integration-cli/docker_cli_pull_local_test.go
<ide> import (
<ide> "github.com/docker/distribution/manifest/manifestlist"
<ide> "github.com/docker/distribution/manifest/schema2"
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> "github.com/go-check/check"
<ide> "github.com/opencontainers/go-digest"
<ide> func testConcurrentPullWholeRepo(c *check.C) {
<ide> repos := []string{}
<ide> for _, tag := range []string{"recent", "fresh", "todays"} {
<ide> repo := fmt.Sprintf("%v:%v", repoName, tag)
<del> buildImageSuccessfully(c, repo, withDockerfile(fmt.Sprintf(`
<add> buildImageSuccessfully(c, repo, build.WithDockerfile(fmt.Sprintf(`
<ide> FROM busybox
<ide> ENTRYPOINT ["/bin/echo"]
<ide> ENV FOO foo
<ide> func testConcurrentPullMultipleTags(c *check.C) {
<ide> repos := []string{}
<ide> for _, tag := range []string{"recent", "fresh", "todays"} {
<ide> repo := fmt.Sprintf("%v:%v", repoName, tag)
<del> buildImageSuccessfully(c, repo, withDockerfile(fmt.Sprintf(`
<add> buildImageSuccessfully(c, repo, build.WithDockerfile(fmt.Sprintf(`
<ide> FROM busybox
<ide> ENTRYPOINT ["/bin/echo"]
<ide> ENV FOO foo
<ide> func testPullIDStability(c *check.C) {
<ide> derivedImage := privateRegistryURL + "/dockercli/id-stability"
<ide> baseImage := "busybox"
<ide>
<del> buildImageSuccessfully(c, derivedImage, withDockerfile(fmt.Sprintf(`
<add> buildImageSuccessfully(c, derivedImage, build.WithDockerfile(fmt.Sprintf(`
<ide> FROM %s
<ide> ENV derived true
<ide> ENV asdf true
<ide> func (s *DockerSchema1RegistrySuite) TestPullIDStability(c *check.C) {
<ide> func testPullNoLayers(c *check.C) {
<ide> repoName := fmt.Sprintf("%v/dockercli/scratch", privateRegistryURL)
<ide>
<del> buildImageSuccessfully(c, repoName, withDockerfile(`
<add> buildImageSuccessfully(c, repoName, build.WithDockerfile(`
<ide> FROM scratch
<ide> ENV foo bar`))
<ide> dockerCmd(c, "push", repoName)
<ide><path>integration-cli/docker_cli_pull_trusted_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/testutil"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> "github.com/go-check/check"
<ide> func (s *DockerTrustSuite) TestTrustedOfflinePull(c *check.C) {
<ide> func (s *DockerTrustSuite) TestTrustedPullDelete(c *check.C) {
<ide> repoName := fmt.Sprintf("%v/dockercli/%s:latest", privateRegistryURL, "trusted-pull-delete")
<ide> // tag the image and upload it to the private registry
<del> buildImageSuccessfully(c, repoName, withDockerfile(`
<add> buildImageSuccessfully(c, repoName, build.WithDockerfile(`
<ide> FROM busybox
<ide> CMD echo trustedpulldelete
<ide> `))
<ide><path>integration-cli/docker_cli_push_test.go
<ide> import (
<ide> "github.com/docker/distribution/reference"
<ide> cliconfig "github.com/docker/docker/cli/config"
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/testutil"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> "github.com/go-check/check"
<ide> func testConcurrentPush(c *check.C) {
<ide> repos := []string{}
<ide> for _, tag := range []string{"push1", "push2", "push3"} {
<ide> repo := fmt.Sprintf("%v:%v", repoName, tag)
<del> buildImageSuccessfully(c, repo, withDockerfile(fmt.Sprintf(`
<add> buildImageSuccessfully(c, repo, build.WithDockerfile(fmt.Sprintf(`
<ide> FROM busybox
<ide> ENTRYPOINT ["/bin/echo"]
<ide> ENV FOO foo
<ide><path>integration-cli/docker_cli_rm_test.go
<ide> import (
<ide> "os"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/go-check/check"
<ide> )
<ide>
<ide> func (s *DockerSuite) TestRmContainerOrphaning(c *check.C) {
<ide> MAINTAINER Integration Tests`
<ide>
<ide> // build first dockerfile
<del> buildImageSuccessfully(c, img, withDockerfile(dockerfile1))
<add> buildImageSuccessfully(c, img, build.WithDockerfile(dockerfile1))
<ide> img1 := getIDByName(c, img)
<ide> // run container on first image
<ide> dockerCmd(c, "run", img)
<ide> // rebuild dockerfile with a small addition at the end
<del> buildImageSuccessfully(c, img, withDockerfile(dockerfile2))
<add> buildImageSuccessfully(c, img, build.WithDockerfile(dockerfile2))
<ide> // try to remove the image, should not error out.
<ide> out, _, err := dockerCmdWithError("rmi", img)
<ide> c.Assert(err, check.IsNil, check.Commentf("Expected to removing the image, but failed: %s", out))
<ide><path>integration-cli/docker_cli_rmi_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/stringid"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> "github.com/go-check/check"
<ide> func (s *DockerSuite) TestRmiImgIDForce(c *check.C) {
<ide> // See https://github.com/docker/docker/issues/14116
<ide> func (s *DockerSuite) TestRmiImageIDForceWithRunningContainersAndMultipleTags(c *check.C) {
<ide> dockerfile := "FROM busybox\nRUN echo test 14116\n"
<del> buildImageSuccessfully(c, "test-14116", withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, "test-14116", build.WithDockerfile(dockerfile))
<ide> imgID := getIDByName(c, "test-14116")
<ide>
<ide> newTag := "newtag"
<ide> func (s *DockerSuite) TestRmiForceWithMultipleRepositories(c *check.C) {
<ide> tag1 := imageName + ":tag1"
<ide> tag2 := imageName + ":tag2"
<ide>
<del> buildImageSuccessfully(c, tag1, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, tag1, build.WithDockerfile(`FROM busybox
<ide> MAINTAINER "docker"`))
<ide> dockerCmd(c, "tag", tag1, tag2)
<ide>
<ide> func (s *DockerSuite) TestRmiContainerImageNotFound(c *check.C) {
<ide> imageIds := make([]string, 2)
<ide> for i, name := range imageNames {
<ide> dockerfile := fmt.Sprintf("FROM busybox\nMAINTAINER %s\nRUN echo %s\n", name, name)
<del> buildImageSuccessfully(c, name, withoutCache, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, name, build.WithoutCache, build.WithDockerfile(dockerfile))
<ide> id := getIDByName(c, name)
<ide> imageIds[i] = id
<ide> }
<ide> RUN echo 0 #layer0
<ide> RUN echo 1 #layer1
<ide> RUN echo 2 #layer2
<ide> `
<del> buildImageSuccessfully(c, image, withoutCache, withDockerfile(dockerfile))
<add> buildImageSuccessfully(c, image, build.WithoutCache, build.WithDockerfile(dockerfile))
<ide> out, _ := dockerCmd(c, "history", "-q", image)
<ide> ids := strings.Split(out, "\n")
<ide> idToTag := ids[2]
<ide> RUN echo 2 #layer2
<ide> }
<ide>
<ide> func (*DockerSuite) TestRmiParentImageFail(c *check.C) {
<del> buildImageSuccessfully(c, "test", withDockerfile(`
<add> buildImageSuccessfully(c, "test", build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN echo hello`))
<ide>
<ide><path>integration-cli/docker_cli_run_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/mount"
<ide> "github.com/docker/docker/pkg/stringid"
<ide> "github.com/docker/docker/pkg/stringutils"
<ide> func (s *DockerSuite) TestRunCreateVolumesInSymlinkDir(c *check.C) {
<ide> containerPath = "/test/test"
<ide> cmd = "true"
<ide> }
<del> buildImageSuccessfully(c, name, withDockerfile(dockerFile))
<add> buildImageSuccessfully(c, name, build.WithDockerfile(dockerFile))
<ide> dockerCmd(c, "run", "-v", containerPath, name, cmd)
<ide> }
<ide>
<ide> func (s *DockerSuite) TestRunCreateVolumesInSymlinkDir2(c *check.C) {
<ide> containerPath = "/test/test"
<ide> cmd = "true"
<ide> }
<del> buildImageSuccessfully(c, name, withDockerfile(dockerFile))
<add> buildImageSuccessfully(c, name, build.WithDockerfile(dockerFile))
<ide> dockerCmd(c, "run", "-v", containerPath, name, cmd)
<ide> }
<ide>
<ide> func (s *DockerSuite) TestRunCopyVolumeUIDGID(c *check.C) {
<ide> // Not applicable on Windows as it does not support uid or gid in this way
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testrunvolumesuidgid"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN echo 'dockerio:x:1001:1001::/bin:/bin/false' >> /etc/passwd
<ide> RUN echo 'dockerio:x:1001:' >> /etc/group
<ide> RUN mkdir -p /hello && touch /hello/test && chown dockerio.dockerio /hello`))
<ide> func (s *DockerSuite) TestRunCopyVolumeContent(c *check.C) {
<ide> // that copies from the image to the volume.
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testruncopyvolumecontent"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN mkdir -p /hello/local && echo hello > /hello/local/world`))
<ide>
<ide> // Test that the content is copied from the image to the volume
<ide> func (s *DockerSuite) TestRunCopyVolumeContent(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestRunCleanupCmdOnEntrypoint(c *check.C) {
<ide> name := "testrunmdcleanuponentrypoint"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> ENTRYPOINT ["echo"]
<ide> CMD ["testingpoint"]`))
<ide>
<ide> func (s *DockerSuite) TestVolumesNoCopyData(c *check.C) {
<ide> // are pre-populated such as is built in the dockerfile used in this test.
<ide> testRequires(c, DaemonIsLinux)
<ide> prefix, slash := getPrefixAndSlashFromDaemonPlatform()
<del> buildImageSuccessfully(c, "dataimage", withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, "dataimage", build.WithDockerfile(`FROM busybox
<ide> RUN ["mkdir", "-p", "/foo"]
<ide> RUN ["touch", "/foo/bar"]`))
<ide> dockerCmd(c, "run", "--name", "test", "-v", prefix+slash+"foo", "busybox")
<ide> func (s *DockerSuite) TestRunNoOutputFromPullInStdout(c *check.C) {
<ide> func (s *DockerSuite) TestRunVolumesCleanPaths(c *check.C) {
<ide> testRequires(c, SameHostDaemon)
<ide> prefix, slash := getPrefixAndSlashFromDaemonPlatform()
<del> buildImageSuccessfully(c, "run_volumes_clean_paths", withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, "run_volumes_clean_paths", build.WithDockerfile(`FROM busybox
<ide> VOLUME `+prefix+`/foo/`))
<ide> dockerCmd(c, "run", "-v", prefix+"/foo", "-v", prefix+"/bar/", "--name", "dark_helmet", "run_volumes_clean_paths")
<ide>
<ide> func (s *DockerSuite) TestRunInitLayerPathOwnership(c *check.C) {
<ide> // Not applicable on Windows as it does not support Linux uid/gid ownership
<ide> testRequires(c, DaemonIsLinux)
<ide> name := "testetcfileownership"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN echo 'dockerio:x:1001:1001::/bin:/bin/false' >> /etc/passwd
<ide> RUN echo 'dockerio:x:1001:' >> /etc/group
<ide> RUN chown dockerio:dockerio /etc`))
<ide> func (s *DockerSuite) TestRunNamedVolumeCopyImageData(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide>
<ide> testImg := "testvolumecopy"
<del> buildImageSuccessfully(c, testImg, withDockerfile(`
<add> buildImageSuccessfully(c, testImg, build.WithDockerfile(`
<ide> FROM busybox
<ide> RUN mkdir -p /foo && echo hello > /foo/hello
<ide> `))
<ide> func (s *DockerSuite) TestRunVolumeWithOneCharacter(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestRunVolumeCopyFlag(c *check.C) {
<ide> testRequires(c, DaemonIsLinux) // Windows does not support copying data from image to the volume
<del> buildImageSuccessfully(c, "volumecopy", withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, "volumecopy", build.WithDockerfile(`FROM busybox
<ide> RUN mkdir /foo && echo hello > /foo/bar
<ide> CMD cat /foo/bar`))
<ide> dockerCmd(c, "volume", "create", "test")
<ide><path>integration-cli/docker_cli_run_unix_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/homedir"
<ide> "github.com/docker/docker/pkg/mount"
<ide> "github.com/docker/docker/pkg/parsers"
<ide> func (s *DockerSuite) TestRunTmpfsMounts(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestRunTmpfsMountsOverrideImageVolumes(c *check.C) {
<ide> name := "img-with-volumes"
<del> buildImageSuccessfully(c, name, withDockerfile(`
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`
<ide> FROM busybox
<ide> VOLUME /run
<ide> RUN touch /run/stuff
<ide><path>integration-cli/docker_cli_save_load_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/testutil"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> "github.com/go-check/check"
<ide> func (s *DockerSuite) TestSaveDirectoryPermissions(c *check.C) {
<ide> os.Mkdir(extractionDirectory, 0777)
<ide>
<ide> defer os.RemoveAll(tmpDir)
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN adduser -D user && mkdir -p /opt/a/b && chown -R user:user /opt/a
<ide> RUN touch /opt/a/b/c && chown user:user /opt/a/b/c`))
<ide>
<ide> func (s *DockerSuite) TestSaveLoadNoTag(c *check.C) {
<ide>
<ide> name := "saveloadnotag"
<ide>
<del> buildImageSuccessfully(c, name, withDockerfile("FROM busybox\nENV foo=bar"))
<add> buildImageSuccessfully(c, name, build.WithDockerfile("FROM busybox\nENV foo=bar"))
<ide> id := inspectField(c, name, "Id")
<ide>
<ide> // Test to make sure that save w/o name just shows imageID during load
<ide><path>integration-cli/docker_cli_save_load_unix_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> "github.com/go-check/check"
<ide> "github.com/kr/pty"
<ide> func (s *DockerSuite) TestSaveAndLoadRepoStdout(c *check.C) {
<ide>
<ide> func (s *DockerSuite) TestSaveAndLoadWithProgressBar(c *check.C) {
<ide> name := "test-load"
<del> buildImageSuccessfully(c, name, withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, name, build.WithDockerfile(`FROM busybox
<ide> RUN touch aa
<ide> `))
<ide>
<ide><path>integration-cli/docker_cli_swarm_test.go
<ide> import (
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/api/types/swarm"
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli"
<ide> "github.com/docker/docker/integration-cli/daemon"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> "github.com/docker/libnetwork/driverapi"
<ide> func (s *DockerSwarmSuite) TestSwarmInit(c *check.C) {
<ide> return sw.Spec
<ide> }
<ide>
<del> out, err := d.Cmd("swarm", "init", "--cert-expiry", "30h", "--dispatcher-heartbeat", "11s")
<del> c.Assert(err, checker.IsNil, check.Commentf("out: %v", out))
<add> cli.Docker(cli.Cmd("swarm", "init", "--cert-expiry", "30h", "--dispatcher-heartbeat", "11s"),
<add> cli.Daemon(d.Daemon)).Assert(c, icmd.Success)
<ide>
<ide> spec := getSpec()
<ide> c.Assert(spec.CAConfig.NodeCertExpiry, checker.Equals, 30*time.Hour)
<ide> c.Assert(spec.Dispatcher.HeartbeatPeriod, checker.Equals, 11*time.Second)
<ide>
<ide> c.Assert(d.Leave(true), checker.IsNil)
<ide> time.Sleep(500 * time.Millisecond) // https://github.com/docker/swarmkit/issues/1421
<del> out, err = d.Cmd("swarm", "init")
<del> c.Assert(err, checker.IsNil, check.Commentf("out: %v", out))
<add> cli.Docker(cli.Cmd("swarm", "init"), cli.Daemon(d.Daemon)).Assert(c, icmd.Success)
<ide>
<ide> spec = getSpec()
<ide> c.Assert(spec.CAConfig.NodeCertExpiry, checker.Equals, 90*24*time.Hour)
<ide> func (s *DockerSwarmSuite) TestSwarmInit(c *check.C) {
<ide> func (s *DockerSwarmSuite) TestSwarmInitIPv6(c *check.C) {
<ide> testRequires(c, IPv6)
<ide> d1 := s.AddDaemon(c, false, false)
<del> out, err := d1.Cmd("swarm", "init", "--listen-addr", "::1")
<del> c.Assert(err, checker.IsNil, check.Commentf("out: %v", out))
<add> cli.Docker(cli.Cmd("swarm", "init", "--listen-add", "::1"), cli.Daemon(d1.Daemon)).Assert(c, icmd.Success)
<ide>
<ide> d2 := s.AddDaemon(c, false, false)
<del> out, err = d2.Cmd("swarm", "join", "::1")
<del> c.Assert(err, checker.IsNil, check.Commentf("out: %v", out))
<add> cli.Docker(cli.Cmd("swarm", "join", "::1"), cli.Daemon(d2.Daemon)).Assert(c, icmd.Success)
<ide>
<del> out, err = d2.Cmd("info")
<del> c.Assert(err, checker.IsNil, check.Commentf("out: %v", out))
<add> out := cli.Docker(cli.Cmd("info"), cli.Daemon(d2.Daemon)).Assert(c, icmd.Success).Combined()
<ide> c.Assert(out, checker.Contains, "Swarm: active")
<ide> }
<ide>
<ide><path>integration-cli/docker_cli_tag_test.go
<ide> import (
<ide> "strings"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/pkg/stringid"
<ide> "github.com/docker/docker/pkg/stringutils"
<ide> "github.com/go-check/check"
<ide> func (s *DockerSuite) TestTagInvalidRepoName(c *check.C) {
<ide>
<ide> // ensure tags cannot create ambiguity with image ids
<ide> func (s *DockerSuite) TestTagTruncationAmbiguity(c *check.C) {
<del> buildImageSuccessfully(c, "notbusybox:latest", withDockerfile(`FROM busybox
<add> buildImageSuccessfully(c, "notbusybox:latest", build.WithDockerfile(`FROM busybox
<ide> MAINTAINER dockerio`))
<ide> imageID := getIDByName(c, "notbusybox:latest")
<ide> truncatedImageID := stringid.TruncateID(imageID)
<ide><path>integration-cli/docker_cli_volume_test.go
<ide> import (
<ide> "strings"
<ide>
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/integration-cli/request"
<ide> icmd "github.com/docker/docker/pkg/testutil/cmd"
<ide> "github.com/go-check/check"
<ide> func (s *DockerSuite) TestDuplicateMountpointsForVolumesFrom(c *check.C) {
<ide> testRequires(c, DaemonIsLinux)
<ide>
<ide> image := "vimage"
<del> buildImageSuccessfully(c, image, withDockerfile(`
<add> buildImageSuccessfully(c, image, build.WithDockerfile(`
<ide> FROM busybox
<ide> VOLUME ["/tmp/data"]`))
<ide>
<ide> func (s *DockerSuite) TestDuplicateMountpointsForVolumesFromAndBind(c *check.C)
<ide> testRequires(c, DaemonIsLinux)
<ide>
<ide> image := "vimage"
<del> buildImageSuccessfully(c, image, withDockerfile(`
<add> buildImageSuccessfully(c, image, build.WithDockerfile(`
<ide> FROM busybox
<ide> VOLUME ["/tmp/data"]`))
<ide>
<ide> func (s *DockerSuite) TestDuplicateMountpointsForVolumesFromAndMounts(c *check.C
<ide> testRequires(c, SameHostDaemon, DaemonIsLinux)
<ide>
<ide> image := "vimage"
<del> buildImageSuccessfully(c, image, withDockerfile(`
<add> buildImageSuccessfully(c, image, build.WithDockerfile(`
<ide> FROM busybox
<ide> VOLUME ["/tmp/data"]`))
<ide>
<ide><path>integration-cli/docker_utils_test.go
<ide> import (
<ide>
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/integration-cli/checker"
<add> "github.com/docker/docker/integration-cli/cli"
<add> "github.com/docker/docker/integration-cli/cli/build"
<ide> "github.com/docker/docker/integration-cli/daemon"
<ide> "github.com/docker/docker/integration-cli/registry"
<ide> "github.com/docker/docker/integration-cli/request"
<ide> func newRemoteFileServer(c *check.C, ctx *FakeContext) *remoteFileServer {
<ide> // Build the image
<ide> fakeContextAddDockerfile(c, ctx, `FROM httpserver
<ide> COPY . /static`)
<del> buildImageSuccessfully(c, image, withoutCache, withExternalBuildContext(ctx))
<add> buildImageSuccessfully(c, image, build.WithoutCache, withExternalBuildContext(ctx))
<ide>
<ide> // Start the container
<ide> dockerCmd(c, "run", "-d", "-P", "--name", container, image)
<ide> func getIDByName(c *check.C, name string) string {
<ide> return id
<ide> }
<ide>
<del>func buildImageSuccessfully(c *check.C, name string, cmdOperators ...func(*icmd.Cmd) func()) {
<add>// Deprecated: use cli.Build
<add>func buildImageSuccessfully(c *check.C, name string, cmdOperators ...cli.CmdOperator) {
<ide> buildImage(name, cmdOperators...).Assert(c, icmd.Success)
<ide> }
<ide>
<del>func buildImage(name string, cmdOperators ...func(*icmd.Cmd) func()) *icmd.Result {
<del> cmd := icmd.Command(dockerBinary, "build", "-t", name)
<del> for _, op := range cmdOperators {
<del> deferFn := op(&cmd)
<del> if deferFn != nil {
<del> defer deferFn()
<del> }
<del> }
<del> return icmd.RunCmd(cmd)
<del>}
<del>
<del>func withBuildContextPath(path string) func(*icmd.Cmd) func() {
<del> return func(cmd *icmd.Cmd) func() {
<del> cmd.Command = append(cmd.Command, path)
<del> return nil
<del> }
<add>// Deprecated: use cli.Build
<add>func buildImage(name string, cmdOperators ...cli.CmdOperator) *icmd.Result {
<add> return cli.Docker(cli.Build(name), cmdOperators...)
<ide> }
<ide>
<ide> func withExternalBuildContext(ctx *FakeContext) func(*icmd.Cmd) func() {
<ide> func withBuildContext(c *check.C, contextOperators ...func(*FakeContext) error)
<ide> }
<ide> }
<ide>
<del>func withBuildFlags(flags ...string) func(*icmd.Cmd) func() {
<del> return func(cmd *icmd.Cmd) func() {
<del> cmd.Command = append(cmd.Command, flags...)
<del> return nil
<del> }
<del>}
<del>
<del>func withoutCache(cmd *icmd.Cmd) func() {
<del> cmd.Command = append(cmd.Command, "--no-cache")
<del> return nil
<del>}
<del>
<ide> func withFile(name, content string) func(*FakeContext) error {
<ide> return func(ctx *FakeContext) error {
<ide> return ctx.Add(name, content)
<ide> func closeBuildContext(c *check.C, ctx *FakeContext) func() {
<ide> }
<ide> }
<ide>
<del>func withDockerfile(dockerfile string) func(*icmd.Cmd) func() {
<del> return func(cmd *icmd.Cmd) func() {
<del> cmd.Command = append(cmd.Command, "-")
<del> cmd.Stdin = strings.NewReader(dockerfile)
<del> return nil
<del> }
<del>}
<del>
<ide> func trustedBuild(cmd *icmd.Cmd) func() {
<ide> trustedCmd(cmd)
<ide> return nil
<ide> }
<ide>
<del>func withEnvironmentVariales(envs ...string) func(cmd *icmd.Cmd) func() {
<del> return func(cmd *icmd.Cmd) func() {
<del> cmd.Env = envs
<del> return nil
<del> }
<del>}
<del>
<ide> type gitServer interface {
<ide> URL() string
<ide> Close() error
<ide><path>integration-cli/environment/environment.go
<ide> import (
<ide> "fmt"
<ide> "io/ioutil"
<ide> "os"
<add> "os/exec"
<ide> "path/filepath"
<ide> "strconv"
<ide> "strings"
<ide>
<ide> "github.com/docker/docker/api/types"
<ide> "github.com/docker/docker/api/types/container"
<ide> "github.com/docker/docker/client"
<add> "github.com/docker/docker/opts"
<ide> "golang.org/x/net/context"
<ide> )
<ide>
<add>const (
<add> // DefaultDockerBinary is the name of the docker binary
<add> DefaultDockerBinary = "docker"
<add>)
<add>
<ide> // Execution holds informations about the test execution environment.
<ide> type Execution struct {
<ide> daemonPlatform string
<ide> type Execution struct {
<ide> containerStoragePath string
<ide> // baseImage is the name of the base image for testing
<ide> // Environment variable WINDOWS_BASE_IMAGE can override this
<del> baseImage string
<add> baseImage string
<add> dockerBinary string
<ide>
<ide> protectedElements protectedElements
<ide> }
<ide> func New() (*Execution, error) {
<ide> daemonPid = int(p)
<ide> }
<ide> }
<add>
<add> var dockerBinary = DefaultDockerBinary
<add> if dockerBin := os.Getenv("DOCKER_BINARY"); dockerBin != "" {
<add> dockerBinary = dockerBin
<add> }
<add> dockerBinary, err = exec.LookPath(dockerBinary)
<add> if err != nil {
<add> return nil, err
<add> }
<add>
<ide> return &Execution{
<ide> localDaemon: localDaemon,
<ide> daemonPlatform: daemonPlatform,
<ide> func New() (*Execution, error) {
<ide> daemonPid: daemonPid,
<ide> experimentalDaemon: info.ExperimentalBuild,
<ide> baseImage: baseImage,
<add> dockerBinary: dockerBinary,
<ide> protectedElements: protectedElements{
<ide> images: map[string]struct{}{},
<ide> },
<ide> func (e *Execution) DaemonKernelVersionNumeric() int {
<ide> v, _ := strconv.Atoi(strings.Split(e.daemonKernelVersion, " ")[1])
<ide> return v
<ide> }
<add>
<add>// DockerBinary returns the docker binary for this testing environment
<add>func (e *Execution) DockerBinary() string {
<add> return e.dockerBinary
<add>}
<add>
<add>// DaemonHost return the daemon host string for this test execution
<add>func DaemonHost() string {
<add> daemonURLStr := "unix://" + opts.DefaultUnixSocket
<add> if daemonHostVar := os.Getenv("DOCKER_HOST"); daemonHostVar != "" {
<add> daemonURLStr = daemonHostVar
<add> }
<add> return daemonURLStr
<add>}
<ide><path>pkg/testutil/cmd/command.go
<ide> type Result struct {
<ide>
<ide> // Assert compares the Result against the Expected struct, and fails the test if
<ide> // any of the expcetations are not met.
<del>func (r *Result) Assert(t testingT, exp Expected) {
<add>func (r *Result) Assert(t testingT, exp Expected) *Result {
<ide> err := r.Compare(exp)
<ide> if err == nil {
<del> return
<add> return r
<ide> }
<ide> _, file, line, ok := runtime.Caller(1)
<ide> if ok {
<ide> t.Fatalf("at %s:%d - %s", filepath.Base(file), line, err.Error())
<ide> } else {
<ide> t.Fatalf("(no file/line info) - %s", err.Error())
<ide> }
<add> return nil
<ide> }
<ide>
<ide> // Compare returns a formatted error with the command, stdout, stderr, exit
| 33
|
Javascript
|
Javascript
|
restore footer logo
|
6a676c8d9bbf3adb8011606bf5f89b879addaf3f
|
<ide><path>website/docusaurus.config.js
<ide> module.exports = {
<ide> ]
<ide> }
<ide> ],
<add> logo: {
<add> alt: 'Redux Logo',
<add> src: 'img/redux_white.svg',
<add> href: 'https://redux.js.org/',
<add> },
<ide> copyright:
<ide> 'Copyright (c) 2015-present Dan Abramov and the Redux documentation authors.'
<ide> },
| 1
|
Ruby
|
Ruby
|
deprecate pathname#cp and pathname#chmod_r
|
96195295a3462070a26e03148f5140bbf0ea4dfd
|
<ide><path>Library/Homebrew/extend/pathname.rb
<ide> def default_stat
<ide> private :default_stat
<ide>
<ide> def cp dst
<add> opoo "Pathname#cp is deprecated, use FileUtils.cp"
<ide> if file?
<ide> FileUtils.cp to_s, dst
<ide> else
<ide> def rmdir_if_possible
<ide> end
<ide>
<ide> def chmod_R perms
<add> opoo "Pathname#chmod_R is deprecated, use FileUtils.chmod_R"
<ide> require 'fileutils'
<ide> FileUtils.chmod_R perms, to_s
<ide> end
<ide><path>Library/Homebrew/test/test_pathname.rb
<ide> def test_write_does_not_overwrite
<ide> assert_raises(RuntimeError) { @file.write('CONTENT') }
<ide> end
<ide>
<del> def test_chmod_R
<del> perms = 0777
<del> FileUtils.expects(:chmod_R).with(perms, @dir.to_s)
<del> @dir.chmod_R(perms)
<del> end
<del>
<ide> def test_atomic_write
<ide> touch @file
<ide> @file.atomic_write('CONTENT')
<ide> def test_atomic_write_preserves_default_permissions
<ide> assert_equal sentinel.stat.mode, @file.stat.mode
<ide> end
<ide>
<del> def test_cp
<del> touch @file
<del> mkdir_p @dir
<del>
<del> @file.cp(@dir)
<del> assert @file.file?
<del> assert((@dir+@file.basename).file?)
<del>
<del> @dir.cp(@dst)
<del> assert @dir.directory?
<del> assert((@dst+@dir.basename).directory?)
<del> end
<del>
<ide> def test_ensure_writable
<ide> touch @file
<ide> chmod 0555, @file
| 2
|
Ruby
|
Ruby
|
update diff_start_sha1 unconditionally
|
d1222971f61b5e882571e303750da34fdee5a0cf
|
<ide><path>Library/Homebrew/cmd/test-bot.rb
<ide> def brew_update
<ide> # Use Travis CI Git variables for master or branch jobs.
<ide> elsif ENV["TRAVIS_COMMIT_RANGE"]
<ide> diff_start_sha1, diff_end_sha1 = ENV["TRAVIS_COMMIT_RANGE"].split "..."
<del> diff_start_sha1 = git("merge-base", diff_start_sha1, diff_end_sha1).strip
<ide> # Otherwise just use the current SHA-1 (which may be overriden later)
<ide> else
<ide> diff_end_sha1 = diff_start_sha1 = current_sha1
<ide> end
<ide>
<add> diff_start_sha1 = git("merge-base", diff_start_sha1, diff_end_sha1).strip
<add>
<ide> # Handle no arguments being passed on the command-line e.g. `brew test-bot`.
<ide> if no_args?
<ide> if diff_start_sha1 == diff_end_sha1 || \
| 1
|
Python
|
Python
|
add fenwick tree
|
d68d0ef05c024cb90f96b379c1206b3487847cbc
|
<ide><path>data_structures/Binary Tree/FenwickTree.py
<add>class FenwickTree:
<add>
<add> def __init__(self, SIZE): # create fenwick tree with size SIZE
<add> self.Size = SIZE
<add> self.ft = [0 for i in range (0,SIZE)]
<add>
<add> def update(self, i, val): # update data (adding) in index i in O(lg N)
<add> while (i < self.Size):
<add> self.ft[i] += val
<add> i += i & (-i)
<add>
<add> def query(self, i): # query cumulative data from index 0 to i in O(lg N)
<add> ret = 0
<add> while (i > 0):
<add> ret += self.ft[i]
<add> i -= i & (-i)
<add> return ret
<add>
<add>if __name__ == '__main__':
<add> f = FenwickTree(100)
<add> f.update(1,20)
<add> f.update(4,4)
<add> print (f.query(1))
<add> print (f.query(3))
<add> print (f.query(4))
<add> f.update(2,-5)
<add> print (f.query(1))
<add> print (f.query(3))
| 1
|
Python
|
Python
|
fix atmos tests and azure blobs
|
c45fa041a44567e1f811e6bf618c0e7d8206b677
|
<ide><path>libcloud/test/storage/test_atmos.py
<ide> from libcloud.storage.drivers.atmos import AtmosConnection, AtmosDriver
<ide> from libcloud.storage.drivers.dummy import DummyIterator
<ide>
<del>from libcloud.test import MockHttp, generate_random_data
<add>from libcloud.test import MockHttp, generate_random_data, make_response
<ide> from libcloud.test.file_fixtures import StorageFileFixtures
<ide>
<ide>
<ide> def test_get_object_not_found(self):
<ide> self.fail('Exception was not thrown')
<ide>
<ide> def test_delete_object_success(self):
<add> AtmosMockHttp.type = 'DELETE'
<ide> container = Container(name='foo_bar_container', extra={},
<ide> driver=self.driver)
<ide> obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
<ide> def test_delete_object_success(self):
<ide> self.assertTrue(status)
<ide>
<ide> def test_delete_object_escaped_success(self):
<add> AtmosMockHttp.type = 'DELETE'
<ide> container = Container(name='foo & bar_container', extra={},
<ide> driver=self.driver)
<ide> obj = Object(name='foo & bar_object', size=1000, hash=None, extra={},
<ide> def test_upload_object_success(self):
<ide> def upload_file(self, object_name=None, content_type=None,
<ide> request_path=None, request_method=None,
<ide> headers=None, file_path=None, stream=None):
<del> return {'response': MockResponse(200, headers={'etag': '0cc175b9c0f1b6a831c399e269772661'}),
<add> return {'response': make_response(200, headers={'etag': '0cc175b9c0f1b6a831c399e269772661'}),
<ide> 'bytes_transferred': 1000,
<ide> 'data_hash': '0cc175b9c0f1b6a831c399e269772661'}
<ide>
<ide> def _rest_namespace_test_container_not_found_metadata_system(self, method,
<ide> return (httplib.NOT_FOUND, body, {},
<ide> httplib.responses[httplib.NOT_FOUND])
<ide>
<del> def _rest_namespace_foo_bar_container_foo_bar_object(self, method, url,
<del> body, headers):
<add> def _rest_namespace_foo_bar_container_foo_bar_object_DELETE(self, method, url,
<add> body, headers):
<ide> return (httplib.OK, '', {}, httplib.responses[httplib.OK])
<ide>
<del> def _rest_namespace_foo_20_26_20bar_container_foo_20_26_20bar_object(
<add> def _rest_namespace_foo_20_26_20bar_container_foo_20_26_20bar_object_DELETE(
<ide> self, method, url,
<ide> body, headers):
<ide> return (httplib.OK, '', {}, httplib.responses[httplib.OK])
<ide><path>libcloud/test/storage/test_azure_blobs.py
<ide> def _test2_test_list_containers(self, method, url, body, headers):
<ide> headers = {'content-type': 'application/zip',
<ide> 'etag': '"e31208wqsdoj329jd"',
<ide> 'x-amz-meta-rabbits': 'monkeys',
<del> 'content-length': 12345,
<add> 'content-length': '12345',
<ide> 'last-modified': 'Thu, 13 Sep 2012 07:13:22 GMT'
<ide> }
<ide>
| 2
|
Python
|
Python
|
update token auth view. closes
|
0539b1be01fd9c6b842a4a8bbdf23b65af8357d1
|
<ide><path>rest_framework/authtoken/views.py
<ide> class ObtainAuthToken(APIView):
<ide> permission_classes = ()
<ide> parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
<ide> renderer_classes = (renderers.JSONRenderer,)
<del> serializer_class = AuthTokenSerializer
<del> model = Token
<ide>
<ide> def post(self, request):
<del> serializer = self.serializer_class(data=request.data)
<del> if serializer.is_valid():
<del> user = serializer.validated_data['user']
<del> token, created = Token.objects.get_or_create(user=user)
<del> return Response({'token': token.key})
<del> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
<add> serializer = AuthTokenSerializer(data=request.data)
<add> serializer.is_valid(raise_exception=True)
<add> user = serializer.validated_data['user']
<add> token, created = Token.objects.get_or_create(user=user)
<add> return Response({'token': token.key})
<ide>
<ide>
<ide> obtain_auth_token = ObtainAuthToken.as_view()
| 1
|
Javascript
|
Javascript
|
add better python path description
|
57531d75fe9356a693680eac0396bf6f5595989f
|
<ide><path>script/utils/verify-requirements.js
<ide> function verifyPython27() {
<ide> }
<ide>
<ide> if (!fs.existsSync(pythonPath)) {
<del> console.warn("Python 2.7 is required to build Atom. Python 2.7 must be installed at '" + pythonPath + "' or the PYTHON env var must be set to '/path/to/executable/python2.7'");
<add> console.warn("Python 2.7 is required to build Atom. Python 2.7 must be installed at '" + pythonPath + "' or the PYTHON env var must be set to '/path/to/Python27/python.exe'");
<ide> process.exit(1);
<ide> }
<ide> }
| 1
|
Javascript
|
Javascript
|
remove unnecessary escape character for lint
|
214c885b90722708db6a3febbbb80f90c8750eb2
|
<ide><path>server/build/webpack.js
<ide> export default async function createCompiler (dir, { hotReload = false, dev = fa
<ide> }
<ide>
<ide> const babelRuntimePath = require.resolve('babel-runtime/package')
<del> .replace(/[\\\/]package\.json$/, '')
<add> .replace(/[\\/]package\.json$/, '')
<ide>
<ide> const loaders = [{
<ide> test: /\.js$/,
| 1
|
Ruby
|
Ruby
|
eliminate the usage of `any?` (#638)
|
a8566c9848122474b92fc8989eec196d5f4fb69b
|
<ide><path>Library/Homebrew/cmd/audit.rb
<ide> def audit_revision
<ide>
<ide> fv = FormulaVersions.new(formula, :max_depth => 10)
<ide> revision_map = fv.revision_map("origin/master")
<del> if (revisions = revision_map[formula.version]).any?
<add> revisions = revision_map[formula.version]
<add> if !revisions.empty?
<ide> problem "revision should not decrease" if formula.revision < revisions.max
<ide> elsif formula.revision != 0
<ide> if formula.stable
<ide> def audit_revision
<ide> def audit_legacy_patches
<ide> return unless formula.respond_to?(:patches)
<ide> legacy_patches = Patch.normalize_legacy_patches(formula.patches).grep(LegacyPatch)
<del> if legacy_patches.any?
<add> unless legacy_patches.empty?
<ide> problem "Use the patch DSL instead of defining a 'patches' method"
<ide> legacy_patches.each { |p| audit_patch(p) }
<ide> end
<ide><path>Library/Homebrew/cmd/bottle.rb
<ide> def print_filename(string, filename)
<ide> next if Metafiles::EXTENSIONS.include? file.extname
<ide>
<ide> linked_libraries = Keg.file_linked_libraries(file, string)
<del> result ||= linked_libraries.any?
<add> result ||= !linked_libraries.empty?
<ide>
<ide> if ARGV.verbose?
<del> print_filename(string, file) if linked_libraries.any?
<add> print_filename(string, file) unless linked_libraries.empty?
<ide> linked_libraries.each do |lib|
<ide> puts " #{Tty.gray}-->#{Tty.reset} links to #{lib}"
<ide> end
<ide> def print_filename(string, filename)
<ide> end
<ide> end
<ide>
<del> if ARGV.verbose? && text_matches.any?
<add> if ARGV.verbose? && !text_matches.empty?
<ide> print_filename string, file
<ide> text_matches.first(MAXIMUM_STRING_MATCHES).each do |match, offset|
<ide> puts " #{Tty.gray}-->#{Tty.reset} match '#{match}' at offset #{Tty.em}0x#{offset}#{Tty.reset}"
<ide> def bottle_formula(f)
<ide> versions = FormulaVersions.new(f)
<ide> bottle_revisions = versions.bottle_version_map("origin/master")[f.pkg_version]
<ide> bottle_revisions.pop if bottle_revisions.last.to_i > 0
<del> bottle_revision = bottle_revisions.any? ? bottle_revisions.max.to_i + 1 : 0
<add> bottle_revision = bottle_revisions.empty? ? 0 : bottle_revisions.max.to_i + 1
<ide> end
<ide>
<ide> filename = Bottle::Filename.create(f, Utils::Bottles.tag, bottle_revision)
<ide> def bottle_formula(f)
<ide> old_spec.send(field) != bottle.send(field)
<ide> end
<ide> bad_fields.delete(:cellar) if old_spec.cellar == :any && bottle.cellar == :any_skip_relocation
<del> if bad_fields.any?
<add> unless bad_fields.empty?
<ide> bottle_path.unlink if bottle_path.exist?
<ide> odie "--keep-old is passed but there are changes in: #{bad_fields.join ", "}"
<ide> end
<ide> def merge
<ide> next if key == "cellar" && old_value == "any" && value == "any_skip_relocation"
<ide> mismatches << key if old_value.empty? || value != old_value
<ide> end
<del> if mismatches.any?
<add> unless mismatches.empty?
<ide> odie "--keep-old was passed but there were changes in #{mismatches.join(", ")}!"
<ide> end
<ide> output = bottle_output bottle
<ide><path>Library/Homebrew/cmd/info.rb
<ide> def info_formula(f)
<ide> attrs << "pinned at #{f.pinned_version}" if f.pinned?
<ide> attrs << "keg-only" if f.keg_only?
<ide>
<del> puts "#{f.full_name}: #{specs * ", "}#{" [#{attrs * ", "}]" if attrs.any?}"
<add> puts "#{f.full_name}: #{specs * ", "}#{" [#{attrs * ", "}]" unless attrs.empty?}"
<ide> puts f.desc if f.desc
<ide> puts "#{Tty.em}#{f.homepage}#{Tty.reset}" if f.homepage
<ide>
<ide> conflicts = f.conflicts.map(&:name).sort!
<ide> puts "Conflicts with: #{conflicts*", "}" unless conflicts.empty?
<ide>
<ide> kegs = f.installed_kegs.sort_by(&:version)
<del> if kegs.any?
<add> if kegs.empty?
<add> puts "Not installed"
<add> else
<ide> kegs.each do |keg|
<ide> puts "#{keg} (#{keg.abv})#{" *" if keg.linked?}"
<ide> tab = Tab.for_keg(keg).to_s
<ide> puts " #{tab}" unless tab.empty?
<ide> end
<del> else
<del> puts "Not installed"
<ide> end
<ide>
<ide> puts "From: #{Tty.em}#{github_info(f)}#{Tty.reset}"
<ide><path>Library/Homebrew/cmd/install.rb
<ide> def install
<ide> begin
<ide> formulae = []
<ide>
<del> if ARGV.casks.any?
<add> unless ARGV.casks.empty?
<ide> args = []
<ide> args << "--force" if ARGV.force?
<ide> args << "--debug" if ARGV.debug?
<ide><path>Library/Homebrew/cmd/list.rb
<ide> def list
<ide> # Unbrewed uses the PREFIX, which will exist
<ide> # Things below use the CELLAR, which doesn't until the first formula is installed.
<ide> unless HOMEBREW_CELLAR.exist?
<del> raise NoSuchKegError.new(ARGV.named.first) if ARGV.named.any?
<add> raise NoSuchKegError.new(ARGV.named.first) unless ARGV.named.empty?
<ide> return
<ide> end
<ide>
<ide><path>Library/Homebrew/cmd/outdated.rb
<ide>
<ide> module Homebrew
<ide> def outdated
<del> formulae = ARGV.resolved_formulae.any? ? ARGV.resolved_formulae : Formula.installed
<add> formulae = if ARGV.resolved_formulae.empty?
<add> Formula.installed
<add> else
<add> ARGV.resolved_formulae
<add> end
<ide> if ARGV.json == "v1"
<ide> outdated = print_outdated_json(formulae)
<ide> else
<ide> outdated = print_outdated(formulae)
<ide> end
<del> Homebrew.failed = ARGV.resolved_formulae.any? && outdated.any?
<add> Homebrew.failed = !ARGV.resolved_formulae.empty? && !outdated.empty?
<ide> end
<ide>
<ide> def print_outdated(formulae)
<ide><path>Library/Homebrew/cmd/readall.rb
<ide> def readall
<ide> end
<ide>
<ide> options = { :aliases => ARGV.include?("--aliases") }
<del> taps = if ARGV.named.any?
<del> [Tap.fetch(ARGV.named.first)]
<del> else
<add> taps = if ARGV.named.empty?
<ide> Tap
<add> else
<add> [Tap.fetch(ARGV.named.first)]
<ide> end
<ide> taps.each do |tap|
<ide> Homebrew.failed = true unless Readall.valid_tap?(tap, options)
<ide><path>Library/Homebrew/cmd/search.rb
<ide> def search
<ide> arg.include?(char) && !arg.start_with?("/")
<ide> end
<ide> end
<del> if ARGV.any? && bad_regex
<add> if !ARGV.empty? && bad_regex
<ide> ohai "Did you mean to perform a regular expression search?"
<ide> ohai "Surround your query with /slashes/ to search by regex."
<ide> end
<ide><path>Library/Homebrew/cmd/upgrade.rb
<ide> def upgrade
<ide> if ARGV.named.empty?
<ide> outdated = Formula.installed.select(&:outdated?)
<ide> exit 0 if outdated.empty?
<del> elsif ARGV.named.any?
<add> else
<ide> outdated = ARGV.resolved_formulae.select(&:outdated?)
<ide>
<ide> (ARGV.resolved_formulae - outdated).each do |f|
<ide> versions = f.installed_kegs.map { |keg| keg.version }
<del> if versions.any?
<add> if versions.empty?
<add> onoe "#{f.full_name} not installed"
<add> else
<ide> version = versions.max
<ide> onoe "#{f.full_name} #{version} already installed"
<del> else
<del> onoe "#{f.full_name} not installed"
<ide> end
<ide> end
<ide> exit 1 if outdated.empty?
<ide><path>Library/Homebrew/dev-cmd/bump-formula-pr.rb
<ide> def inreplace_pairs(path, replacement_pairs)
<ide> end
<ide> contents.gsub!(old, new)
<ide> end
<del> if contents.errors.any?
<add> unless contents.errors.empty?
<ide> raise Utils::InreplaceError, path => contents.errors
<ide> end
<ide> contents
<ide><path>Library/Homebrew/dev-cmd/test-bot.rb
<ide> def diff_formulae(start_revision, end_revision, path, filter)
<ide> @name = "#{diff_start_sha1}-#{diff_end_sha1}"
<ide> end
<ide> # Handle formulae arguments being passed on the command-line e.g. `brew test-bot wget fish`.
<del> elsif @formulae && @formulae.any?
<add> elsif @formulae && !@formulae.empty?
<ide> @name = "#{@formulae.first}-#{diff_end_sha1}"
<ide> diff_start_sha1 = diff_end_sha1
<ide> # Handle a hash being passed on the command-line e.g. `brew test-bot 1a2b3c`.
<ide> def formula(formula_name)
<ide> test "brew", "uninstall", "--force", formula_name
<ide> FileUtils.ln bottle_filename, HOMEBREW_CACHE/bottle_filename, :force => true
<ide> @formulae.delete(formula_name)
<del> if unchanged_build_dependencies.any?
<add> unless unchanged_build_dependencies.empty?
<ide> test "brew", "uninstall", "--force", *unchanged_build_dependencies
<ide> unchanged_dependencies -= unchanged_build_dependencies
<ide> end
<ide> def formula(formula_name)
<ide> test "brew", "uninstall", "--devel", "--force", formula_name
<ide> end
<ide> end
<del> test "brew", "uninstall", "--force", *unchanged_dependencies if unchanged_dependencies.any?
<add> test "brew", "uninstall", "--force", *unchanged_dependencies unless unchanged_dependencies.empty?
<ide> end
<ide>
<ide> def homebrew
<ide><path>Library/Homebrew/exceptions.rb
<ide> def dump
<ide> end
<ide> end
<ide> puts
<del> if RUBY_VERSION >= "1.8.7" && issues && issues.any?
<add> if RUBY_VERSION >= "1.8.7" && issues && !issues.empty?
<ide> puts "These open issues may also help:"
<ide> puts issues.map { |i| "#{i["title"]} #{i["html_url"]}" }.join("\n")
<ide> end
<ide><path>Library/Homebrew/formula.rb
<ide> def outdated_versions
<ide>
<ide> # @private
<ide> def outdated?
<del> outdated_versions.any?
<add> !outdated_versions.empty?
<ide> rescue Migrator::MigrationNeededError
<ide> true
<ide> end
<ide> def eligible_kegs_for_cleanup
<ide> installed_kegs.select { |k| pkg_version > k.version }
<ide> end
<ide>
<del> if eligible_kegs.any?
<add> unless eligible_kegs.empty?
<ide> eligible_kegs.each do |keg|
<ide> if keg.linked?
<ide> opoo "Skipping (old) #{keg} due to it being linked"
<ide> def eligible_kegs_for_cleanup
<ide> end
<ide> end
<ide> end
<del> elsif installed_prefixes.any? && !pinned?
<add> elsif !installed_prefixes.empty? && !pinned?
<ide> # If the cellar only has one version installed, don't complain
<ide> # that we can't tell which one to keep. Don't complain at all if the
<ide> # only installed version is a pinned formula.
<ide><path>Library/Homebrew/formula_pin.rb
<ide> def pinned?
<ide> end
<ide>
<ide> def pinnable?
<del> @f.installed_prefixes.any?
<add> !@f.installed_prefixes.empty?
<ide> end
<ide>
<ide> def pinned_version
<ide><path>Library/Homebrew/keg.rb
<ide> def completion_installed?(shell)
<ide> when :zsh then path.join("share", "zsh", "site-functions")
<ide> when :fish then path.join("share", "fish", "vendor_completions.d")
<ide> end
<del> dir && dir.directory? && dir.children.any?
<add> dir && dir.directory? && !dir.children.empty?
<ide> end
<ide>
<ide> def plist_installed?
<del> Dir["#{path}/*.plist"].any?
<add> !Dir["#{path}/*.plist"].empty?
<ide> end
<ide>
<ide> def python_site_packages_installed?
<ide> path.join("lib", "python2.7", "site-packages").directory?
<ide> end
<ide>
<ide> def python_pth_files_installed?
<del> Dir["#{path}/lib/python2.7/site-packages/*.pth"].any?
<add> !Dir["#{path}/lib/python2.7/site-packages/*.pth"].empty?
<ide> end
<ide>
<ide> def apps
<ide><path>Library/Homebrew/language/python.rb
<ide> def virtualenv_create(venv_root, python = "python", formula = self)
<ide> dep_site_packages = Formula[d.name].opt_lib/"python#{xy}/site-packages"
<ide> next unless dep_site_packages.exist?
<ide> "import site; site.addsitedir('#{dep_site_packages}')\n"
<del> end
<del> if pth_contents.any?
<add> end.compact
<add> unless pth_contents.empty?
<ide> (venv_root/"lib/python#{xy}/site-packages/homebrew_deps.pth").write pth_contents.join
<ide> end
<ide>
<ide><path>Library/Homebrew/software_spec.rb
<ide> def bottle_disable_reason
<ide> end
<ide>
<ide> def bottle_defined?
<del> bottle_specification.collector.keys.any?
<add> !bottle_specification.collector.keys.empty?
<ide> end
<ide>
<ide> def bottled?
<ide><path>Library/Homebrew/utils/github.rb
<ide> def print_pull_requests_matching(query)
<ide> open_or_closed_prs = issues_matching(query, :type => "pr")
<ide>
<ide> open_prs = open_or_closed_prs.select { |i| i["state"] == "open" }
<del> if open_prs.any?
<add> if !open_prs.empty?
<ide> puts "Open pull requests:"
<ide> prs = open_prs
<del> elsif open_or_closed_prs.any?
<add> elsif !open_or_closed_prs.empty?
<ide> puts "Closed pull requests:"
<ide> prs = open_or_closed_prs
<ide> else
<ide><path>Library/Homebrew/utils/inreplace.rb
<ide> def inreplace(paths, before = nil, after = nil, audit_result = true)
<ide> s.gsub!(before, after, audit_result)
<ide> end
<ide>
<del> errors[path] = s.errors if s.errors.any?
<add> errors[path] = s.errors unless s.errors.empty?
<ide>
<ide> Pathname(path).atomic_write(s)
<ide> end
<ide>
<del> raise InreplaceError.new(errors) if errors.any?
<add> raise InreplaceError.new(errors) unless errors.empty?
<ide> end
<ide> module_function :inreplace
<ide> end
| 19
|
Python
|
Python
|
use a separate method to detect an installation cd
|
ca7fb9090155c661dcb5c7d5fa83fa07924ccf1d
|
<ide><path>libcloud/compute/drivers/cloudsigma.py
<ide> def create_node(self, name, size, image, ex_metadata=None,
<ide> and 1 with the provided VLAN.
<ide> :type ex_vlan: ``str``
<ide> """
<del> # Only pre-installed images can be used with create_node
<add> is_installation_cd = self._is_installation_cd(image=image)
<ide>
<ide> if ex_vnc_password:
<ide> vnc_password = ex_vnc_password
<ide> def create_node(self, name, size, image, ex_metadata=None,
<ide> # 2. Resize drive to the desired disk size if the desired disk size is
<ide> # larger than the cloned drive size.
<ide> if drive_size > drive.size:
<del> pass
<del> #drive = self.ex_resize_drive(drive=drive, size=drive_size)
<add> drive = self.ex_resize_drive(drive=drive, size=drive_size)
<ide>
<ide> # Wait for drive resize to finish
<ide> drive = self._wait_for_drive_state_transition(drive=drive,
<ide> def create_node(self, name, size, image, ex_metadata=None,
<ide> nics.append(nic)
<ide>
<ide> # Need to use IDE for installation CDs
<del> if isinstance(image, CloudSigmaDrive) and image.media == 'cdrom':
<add> if is_installation_cd:
<ide> device_type = 'ide'
<ide> else:
<ide> device_type = 'virtio'
<ide> def create_node(self, name, size, image, ex_metadata=None,
<ide> 'drive': drive.id
<ide> }
<ide>
<del> # ide for cdrom
<del>
<ide> drives = [drive]
<ide>
<ide> data['nics'] = nics
<ide> def _perform_action(self, path, action, method='POST', params=None,
<ide> params=params, data=data)
<ide> return response
<ide>
<add> def _is_installation_cd(self, image):
<add> """
<add> Detect if the provided image is an installation CD.
<add>
<add> :rtype: ``bool``
<add> """
<add> if isinstance(image, CloudSigmaDrive) and image.media == 'cdrom':
<add> return True
<add>
<add> return False
<add>
<ide> def _extract_values(self, obj, keys):
<ide> """
<ide> Extract values from a dictionary and return a new dictionary with
| 1
|
PHP
|
PHP
|
add support for newqueryforrestoration from queues
|
8766d9909007bdb318f390a1d157898bd56b8082
|
<ide><path>src/Illuminate/Database/Eloquent/Model.php
<ide> public function newQueryWithoutScope($scope)
<ide> return $builder->withoutGlobalScope($scope);
<ide> }
<ide>
<add> /**
<add> * Get a new query to restore one or more models by the queueable IDs.
<add> *
<add> * @param array|int $ids
<add> *
<add> * @return \Illuminate\Database\Eloquent\Builder
<add> */
<add> public function newQueryForRestoration($ids)
<add> {
<add> if (is_array($ids)) {
<add> return $this->newQueryWithoutScopes()->whereIn(
<add> $this->getQualifiedKeyName(),
<add> $ids
<add> );
<add> }
<add>
<add> return $this->newQueryWithoutScopes()->whereKey($ids);
<add> }
<add>
<ide> /**
<ide> * Create a new Eloquent query builder for the model.
<ide> *
<ide><path>src/Illuminate/Queue/SerializesAndRestoresModelIdentifiers.php
<ide> protected function getRestoredPropertyValue($value)
<ide> return $value;
<ide> }
<ide>
<add> $model = (new $value->class)->setConnection($value->connection);
<add>
<ide> return is_array($value->id)
<del> ? $this->restoreCollection($value)
<del> : $this->getQueryForModelRestoration((new $value->class)->setConnection($value->connection))
<del> ->useWritePdo()->findOrFail($value->id);
<add> ? $this->restoreCollection($value)
<add> : $this->getQueryForModelRestoration($model, $value->id)
<add> ->useWritePdo()->firstOrFail();
<ide> }
<ide>
<ide> /**
<ide> protected function restoreCollection($value)
<ide>
<ide> $model = (new $value->class)->setConnection($value->connection);
<ide>
<del> return $this->getQueryForModelRestoration($model)->useWritePdo()
<del> ->whereIn($model->getQualifiedKeyName(), $value->id)->get();
<add> return $this->getQueryForModelRestoration($model, $value->id)
<add> ->useWritePdo()->get();
<ide> }
<ide>
<ide> /**
<ide> * Get the query for restoration.
<ide> *
<ide> * @param \Illuminate\Database\Eloquent\Model $model
<add> * @param array|int $ids
<ide> * @return \Illuminate\Database\Eloquent\Builder
<ide> */
<del> protected function getQueryForModelRestoration($model)
<add> protected function getQueryForModelRestoration($model, $ids)
<ide> {
<del> return $model->newQueryWithoutScopes();
<add> return $model->newQueryForRestoration($ids);
<ide> }
<ide> }
| 2
|
Text
|
Text
|
elevate diagnostic report to tier1
|
6ad65aed12dc58ca7e389d06cb515c97b565b1d1
|
<ide><path>doc/guides/diagnostic-tooling-support-tiers.md
<ide> The tools are currently assigned to Tiers as follows:
<ide>
<ide> | Tool Type | Tool/API Name | Regular Testing in Node.js CI | Integrated with Node.js | Target Tier |
<ide> |-----------|---------------------------|-------------------------------|-------------------------|-------------|
<del> | | | | | |
<add> | FFDC | diagnostic report | Yes | Yes | 1 |
<add> | | | | | |
<ide>
<ide> ## Tier 2
<ide>
| 1
|
Mixed
|
Python
|
add patience sort
|
79d57552aa4d8c4777dbcf5126dbb37142866469
|
<ide><path>DIRECTORY.md
<ide> * [Odd Even Transposition Parallel](https://github.com/TheAlgorithms/Python/blob/master/sorts/odd_even_transposition_parallel.py)
<ide> * [Odd Even Transposition Single Threaded](https://github.com/TheAlgorithms/Python/blob/master/sorts/odd_even_transposition_single_threaded.py)
<ide> * [Pancake Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/pancake_sort.py)
<add> * [Patience Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/patience_sort.py)
<ide> * [Pigeon Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/pigeon_sort.py)
<ide> * [Pigeonhole Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/pigeonhole_sort.py)
<ide> * [Quick Sort](https://github.com/TheAlgorithms/Python/blob/master/sorts/quick_sort.py)
<ide><path>sorts/patience_sort.py
<add>from bisect import bisect_left
<add>from functools import total_ordering
<add>from heapq import merge
<add>
<add>"""
<add>A pure Python implementation of the patience sort algorithm
<add>
<add>For more information: https://en.wikipedia.org/wiki/Patience_sorting
<add>
<add>This algorithm is based on the card game patience
<add>
<add>For doctests run following command:
<add>python3 -m doctest -v patience_sort.py
<add>
<add>For manual testing run:
<add>python3 patience_sort.py
<add>"""
<add>
<add>
<add>@total_ordering
<add>class Stack(list):
<add> def __lt__(self, other):
<add> return self[-1] < other[-1]
<add>
<add> def __eq__(self, other):
<add> return self[-1] == other[-1]
<add>
<add>
<add>def patience_sort(collection: list) -> list:
<add> """A pure implementation of quick sort algorithm in Python
<add>
<add> :param collection: some mutable ordered collection with heterogeneous
<add> comparable items inside
<add> :return: the same collection ordered by ascending
<add>
<add> Examples:
<add> >>> patience_sort([1, 9, 5, 21, 17, 6])
<add> [1, 5, 6, 9, 17, 21]
<add>
<add> >>> patience_sort([])
<add> []
<add>
<add> >>> patience_sort([-3, -17, -48])
<add> [-48, -17, -3]
<add> """
<add> stacks = []
<add> # sort into stacks
<add> for element in collection:
<add> new_stacks = Stack([element])
<add> i = bisect_left(stacks, new_stacks)
<add> if i != len(stacks):
<add> stacks[i].append(element)
<add> else:
<add> stacks.append(new_stacks)
<add>
<add> # use a heap-based merge to merge stack efficiently
<add> collection[:] = merge(*[reversed(stack) for stack in stacks])
<add> return collection
<add>
<add>
<add>if __name__ == "__main__":
<add> user_input = input("Enter numbers separated by a comma:\n").strip()
<add> unsorted = [int(item) for item in user_input.split(",")]
<add> print(patience_sort(unsorted))
| 2
|
Text
|
Text
|
add detail for how to tell git to track new files
|
e32ff6485381f033141d4b5365515a94f0109ec5
|
<ide><path>guide/english/git/git-stash/index.md
<ide> git stash save "optional message for yourself"
<ide>
<ide> This saves your changes and reverts the working directory to what it looked like for the latest commit. Stashed changes are available from any branch in that repository.
<ide>
<del>Note that changes you want to stash need to be on tracked files. If you created a new file and try to stash your changes, you may get the error `No local changes to save`.
<add>Note that changes you want to stash need to be on tracked files. If you created a new file and try to stash your changes, you may get the error `No local changes to save`. To tell git to track a new file you created, run the command:
<add>
<add>```shell
<add>git add <name of the new file>
<add>```
<ide>
<ide> ### View Stashed Changes
<ide> To see what is in your stash, run the command:
| 1
|
Python
|
Python
|
remove unnecessary dash for the dividing line
|
379ffd443c12ef45f178a30605a237198689f29b
|
<ide><path>numpy/core/fromnumeric.py
<ide> def clip(a, a_min, a_max, out=None, **kwargs):
<ide> :ref:`ufuncs-output-type`
<ide>
<ide> Notes
<del> --------
<add> -----
<ide> When `a_min` is greater than `a_max`, `clip` returns an
<ide> array in which all values are equal to `a_max`,
<ide> as shown in the second example.
| 1
|
Text
|
Text
|
add table of contents to release guide
|
b62349137106a0bb8a418947b76216868886e87c
|
<ide><path>doc/releases.md
<ide> The intended audience is those who have been authorized by the Node.js
<ide> Foundation Technical Steering Committee (TSC) to create, promote, and sign
<ide> official release builds for Node.js, hosted on <https://nodejs.org/>.
<ide>
<add>## Table of Contents
<add>
<add>* [Who can make a release?](#who-can-make-a-release)
<add> * [1. Jenkins Release Access](#1-jenkins-release-access)
<add> * [2. <nodejs.org> Access](#2-nodejsorg-access)
<add> * [3. A Publicly Listed GPG Key](#3-a-publicly-listed-gpg-key)
<add>* [How to create a release](#how-to-create-a-release)
<add> * [0. Pre-release steps](#0-pre-release-steps)
<add> * [1. Update the staging branch](#1-update-the-staging-branch)
<add> * [2. Create a new branch for the release](#2-create-a-new-branch-for-the-release)
<add> * [3. Update `src/node_version.h`](#3-update-srcnode_versionh)
<add> * [4. Update the Changelog](#4-update-the-changelog)
<add> * [5. Create Release Commit](#5-create-release-commit)
<add> * [6. Propose Release on GitHub](#6-propose-release-on-github)
<add> * [7. Ensure that the Release Branch is Stable](#7-ensure-that-the-release-branch-is-stable)
<add> * [8. Produce a Nightly Build _(optional)_](#8-produce-a-nightly-build-optional)
<add> * [9. Produce Release Builds](#9-produce-release-builds)
<add> * [10. Test the Build](#10-test-the-build)
<add> * [11. Tag and Sign the Release Commit](#11-tag-and-sign-the-release-commit)
<add> * [12. Set Up For the Next Release](#12-set-up-for-the-next-release)
<add> * [13. Promote and Sign the Release Builds](#13-promote-and-sign-the-release-builds)
<add> * [14. Check the Release](#14-check-the-release)
<add> * [15. Create a Blog Post](#15-create-a-blog-post)
<add> * [16. Create the release on GitHub](#16-create-the-release-on-github)
<add> * [17. Cleanup](#17-cleanup)
<add> * [18. Announce](#18-announce)
<add> * [19. Celebrate](#19-celebrate)
<add>
<ide> ## Who can make a release?
<ide>
<ide> Release authorization is given by the Node.js TSC. Once authorized, an
<ide> This script will use the promoted builds and changelog to generate the post. Run
<ide> - Changes to `master` on the nodejs.org repo will trigger a new build of
<ide> nodejs.org so your changes should appear in a few minutes after pushing.
<ide>
<del>### 16. Announce
<add>### 16. Create the release on GitHub
<add>
<add>- Go to the [New release page](https://github.com/nodejs/node/releases/new).
<add>- Select the tag version you pushed earlier.
<add>- For release title, copy the title from the changelog.
<add>- For the description, copy the rest of the changelog entry.
<add>- Click on the "Publish release" button.
<add>
<add>### 17. Cleanup
<add>
<add>Close your release proposal PR and delete the proposal branch.
<add>
<add>### 18. Announce
<ide>
<ide> The nodejs.org website will automatically rebuild and include the new version.
<ide> To announce the build on Twitter through the official @nodejs account, email
<ide> To ensure communication goes out with the timing of the blog post, please allow
<ide> will be shared with the community in the email to coordinate these
<ide> announcements.
<ide>
<del>### 17. Create the release on GitHub
<del>
<del>- Got to the [New release page](https://github.com/nodejs/node/releases/new).
<del>- Select the tag version you pushed earlier.
<del>- For release title, copy the title from the changelog.
<del>- For the description, copy the rest of the changelog entry.
<del>- Click on the "Publish release" button.
<del>
<del>### 18. Cleanup
<del>
<del>Close your release proposal PR and delete the proposal branch.
<del>
<ide> ### 19. Celebrate
<ide>
<ide> _In whatever form you do this..._
| 1
|
Text
|
Text
|
update known issues
|
f130a365e6c7106a9a05707b1b861356c47fff95
|
<ide><path>docs/KnownIssues.md
<ide> However, you can still use the Console feature of the devtools, and debugging Ja
<ide>
<ide> ### Missing Modules and Native Views
<ide>
<del>This is an initial release of React Native Android and therefore not all of the views present on iOS are released on Android. We are very much interested in the communities' feedback on the next set of modules and views for Open Source. Not all native views between iOS and Android have a 100% equivalent representation, here it will be necessary to use a counterpart eg using ProgressBar on Android in place of ActivityIndicator on iOS.
<del>
<del>Our provisional plan for common views and modules includes:
<add>The work on React Native for Android started later than React Native for iOS. Not all of the views present on iOS have been released on Android yet.
<ide>
<ide> #### Views
<ide>
<del>```
<del>Maps
<del>Modal
<del>Spinner (http://developer.android.com/guide/topics/ui/controls/spinner.html)
<del>Slider (known as SeekBar)
<del>```
<add>- Maps - Please use Leland Richardson's [react-native-maps](https://github.com/lelandrichardson/react-native-maps) as it is more feature-complete than our internal implementation at fb.
<add>- Modal
<add>- Slider (also known as SeekBar)
<ide>
<ide> #### Modules
<ide>
<del>```
<del>Camera Roll
<del>Media
<del>PushNotificationIOS
<del>```
<add>- Media
<add>- PushNotificationIOS
<ide>
<ide> ### Some props are only supported on one platform
<ide>
| 1
|
PHP
|
PHP
|
add macroable trait to database factory
|
aa19ecd0ceef93b6e8a96764cdb07d7c878871b0
|
<ide><path>src/Illuminate/Database/Eloquent/Factories/Factory.php
<ide> use Illuminate\Support\Collection;
<ide> use Illuminate\Support\Str;
<ide> use Illuminate\Support\Traits\ForwardsCalls;
<add>use Illuminate\Support\Traits\Macroable;
<ide> use Throwable;
<ide>
<ide> abstract class Factory
<ide> {
<del> use ForwardsCalls;
<add> use ForwardsCalls, Macroable {
<add> __call as macroCall;
<add> }
<ide>
<ide> /**
<ide> * The name of the factory's corresponding model.
<ide> protected static function appNamespace()
<ide> */
<ide> public function __call($method, $parameters)
<ide> {
<add> if (static::hasMacro($method)) {
<add> return $this->macroCall($method, $parameters);
<add> }
<add>
<ide> if (! Str::startsWith($method, ['for', 'has'])) {
<ide> static::throwBadMethodCallException($method);
<ide> }
<ide><path>tests/Database/DatabaseEloquentFactoryTest.php
<ide> public function test_dynamic_has_and_for_methods()
<ide> $this->assertCount(2, $post->comments);
<ide> }
<ide>
<add> public function test_can_be_macroable()
<add> {
<add> $factory = FactoryTestUserFactory::new();
<add> $factory->macro('getFoo', function () {
<add> return 'Hello World';
<add> });
<add>
<add> $this->assertEquals('Hello World', $factory->getFoo());
<add> }
<add>
<ide> /**
<ide> * Get a database connection instance.
<ide> *
| 2
|
Go
|
Go
|
handle external mounts outside of lxc
|
45d7dcfea276841cce782feced3a2eb3eab01208
|
<ide><path>container.go
<ide> type Container struct {
<ide> network *NetworkInterface
<ide> NetworkSettings *NetworkSettings
<ide>
<del> SysInitPath string
<ide> ResolvConfPath string
<ide> HostnamePath string
<ide> HostsPath string
<ide> func (container *Container) generateEnvConfig(env []string) error {
<ide> if err != nil {
<ide> return err
<ide> }
<del> ioutil.WriteFile(container.EnvConfigPath(), data, 0600)
<add> p, err := container.EnvConfigPath()
<add> if err != nil {
<add> return err
<add> }
<add> ioutil.WriteFile(p, data, 0600)
<ide> return nil
<ide> }
<ide>
<ide> func (container *Container) Start() (err error) {
<ide> }
<ide> }
<ide>
<add> mounts, err := runtime.getMounts(container)
<add> if err != nil {
<add> return err
<add> }
<add>
<add> for _, m := range mounts {
<add> if err := m.Mount(container.RootfsPath()); err != nil {
<add> return err
<add> }
<add> }
<add>
<ide> container.cmd = exec.Command(params[0], params[1:]...)
<ide>
<ide> // Setup logging of stdout and stderr to disk
<ide> func (container *Container) GetImage() (*Image, error) {
<ide> }
<ide>
<ide> func (container *Container) Unmount() error {
<add> mounts, err := container.runtime.getMounts(container)
<add> if err != nil {
<add> return err
<add> }
<add> for _, m := range mounts {
<add> if lastError := m.Unmount(container.RootfsPath()); lastError != nil {
<add> err = lastError
<add> }
<add> }
<add> if err != nil {
<add> return err
<add> }
<ide> return container.runtime.Unmount(container)
<ide> }
<ide>
<ide> func (container *Container) jsonPath() string {
<ide> return path.Join(container.root, "config.json")
<ide> }
<ide>
<del>func (container *Container) EnvConfigPath() string {
<del> return path.Join(container.root, "config.env")
<add>func (container *Container) EnvConfigPath() (string, error) {
<add> p := path.Join(container.root, "config.env")
<add> if _, err := os.Stat(p); err != nil {
<add> if os.IsNotExist(err) {
<add> f, err := os.Create(p)
<add> if err != nil {
<add> return "", err
<add> }
<add> f.Close()
<add> } else {
<add> return "", err
<add> }
<add> }
<add> return p, nil
<ide> }
<ide>
<ide> func (container *Container) lxcConfigPath() string {
<ide><path>graphdriver/driver.go
<ide> import (
<ide> "github.com/dotcloud/docker/utils"
<ide> "os"
<ide> "path"
<add> "strings"
<add> "syscall"
<ide> )
<ide>
<ide> type InitFunc func(root string) (Driver, error)
<ide> type Differ interface {
<ide> DiffSize(id string) (bytes int64, err error)
<ide> }
<ide>
<add>type Mount struct {
<add> Device string
<add> Target string
<add> Type string
<add> Options string
<add>}
<add>
<ide> var (
<ide> DefaultDriver string
<ide> // All registred drivers
<ide> func New(root string) (driver Driver, err error) {
<ide> }
<ide> return nil, err
<ide> }
<add>
<add>func (m *Mount) Mount(root string) error {
<add> var (
<add> flag int
<add> data []string
<add> target = path.Join(root, m.Target)
<add> )
<add>
<add> if mounted, err := Mounted(target); err != nil || mounted {
<add> return err
<add> }
<add>
<add> flags := map[string]struct {
<add> clear bool
<add> flag int
<add> }{
<add> "defaults": {false, 0},
<add> "ro": {false, syscall.MS_RDONLY},
<add> "rw": {true, syscall.MS_RDONLY},
<add> "suid": {true, syscall.MS_NOSUID},
<add> "nosuid": {false, syscall.MS_NOSUID},
<add> "dev": {true, syscall.MS_NODEV},
<add> "nodev": {false, syscall.MS_NODEV},
<add> "exec": {true, syscall.MS_NOEXEC},
<add> "noexec": {false, syscall.MS_NOEXEC},
<add> "sync": {false, syscall.MS_SYNCHRONOUS},
<add> "async": {true, syscall.MS_SYNCHRONOUS},
<add> "dirsync": {false, syscall.MS_DIRSYNC},
<add> "remount": {false, syscall.MS_REMOUNT},
<add> "mand": {false, syscall.MS_MANDLOCK},
<add> "nomand": {true, syscall.MS_MANDLOCK},
<add> "atime": {true, syscall.MS_NOATIME},
<add> "noatime": {false, syscall.MS_NOATIME},
<add> "diratime": {true, syscall.MS_NODIRATIME},
<add> "nodiratime": {false, syscall.MS_NODIRATIME},
<add> "bind": {false, syscall.MS_BIND},
<add> "rbind": {false, syscall.MS_BIND | syscall.MS_REC},
<add> "relatime": {false, syscall.MS_RELATIME},
<add> "norelatime": {true, syscall.MS_RELATIME},
<add> "strictatime": {false, syscall.MS_STRICTATIME},
<add> "nostrictatime": {true, syscall.MS_STRICTATIME},
<add> }
<add>
<add> for _, o := range strings.Split(m.Options, ",") {
<add> // If the option does not exist in the flags table then it is a
<add> // data value for a specific fs type
<add> if f, exists := flags[o]; exists {
<add> if f.clear {
<add> flag &= ^f.flag
<add> } else {
<add> flag |= f.flag
<add> }
<add> } else {
<add> data = append(data, o)
<add> }
<add> }
<add>
<add> if err := syscall.Mount(m.Device, target, m.Type, uintptr(flag), strings.Join(data, ",")); err != nil {
<add> panic(err)
<add> }
<add> return nil
<add>}
<add>
<add>func (m *Mount) Unmount(root string) error {
<add> target := path.Join(root, m.Target)
<add> if mounted, err := Mounted(target); err != nil || !mounted {
<add> return err
<add> }
<add> return syscall.Unmount(target, 0)
<add>}
<add>
<add>func Mounted(mountpoint string) (bool, error) {
<add> mntpoint, err := os.Stat(mountpoint)
<add> if err != nil {
<add> if os.IsNotExist(err) {
<add> return false, nil
<add> }
<add> return false, err
<add> }
<add> parent, err := os.Stat(path.Join(mountpoint, ".."))
<add> if err != nil {
<add> return false, err
<add> }
<add> mntpointSt := mntpoint.Sys().(*syscall.Stat_t)
<add> parentSt := parent.Sys().(*syscall.Stat_t)
<add>
<add> return mntpointSt.Dev != parentSt.Dev, nil
<add>}
<ide><path>lxc_template.go
<ide> lxc.network.mtu = 1500
<ide> {{$ROOTFS := .RootfsPath}}
<ide> lxc.rootfs = {{$ROOTFS}}
<ide>
<del>{{if and .HostnamePath .HostsPath}}
<del># enable domain name support
<del>lxc.mount.entry = {{escapeFstabSpaces .HostnamePath}} {{escapeFstabSpaces $ROOTFS}}/etc/hostname none bind,ro 0 0
<del>lxc.mount.entry = {{escapeFstabSpaces .HostsPath}} {{escapeFstabSpaces $ROOTFS}}/etc/hosts none bind,ro 0 0
<del>{{end}}
<del>
<ide> # use a dedicated pts for the container (and limit the number of pseudo terminal
<ide> # available)
<ide> lxc.pts = 1024
<ide> lxc.cgroup.devices.allow = c 10:200 rwm
<ide> # standard mount point
<ide> # Use mnt.putold as per https://bugs.launchpad.net/ubuntu/+source/lxc/+bug/986385
<ide> lxc.pivotdir = lxc_putold
<add>
<add># NOTICE: These mounts must be applied within the namespace
<add>
<ide> # WARNING: procfs is a known attack vector and should probably be disabled
<ide> # if your userspace allows it. eg. see http://blog.zx2c4.com/749
<ide> lxc.mount.entry = proc {{escapeFstabSpaces $ROOTFS}}/proc proc nosuid,nodev,noexec 0 0
<del># WARNING: sysfs is a known attack vector and should probably be disabled
<del># if your userspace allows it. eg. see http://bit.ly/T9CkqJ
<add>
<add># WARNING: sysfs is a known attack vector and should probably be disabled
<add># if your userspace allows it. eg. see http://bit.ly/T9CkqJ
<ide> lxc.mount.entry = sysfs {{escapeFstabSpaces $ROOTFS}}/sys sysfs nosuid,nodev,noexec 0 0
<add>
<ide> lxc.mount.entry = devpts {{escapeFstabSpaces $ROOTFS}}/dev/pts devpts newinstance,ptmxmode=0666,nosuid,noexec 0 0
<del>#lxc.mount.entry = varrun {{escapeFstabSpaces $ROOTFS}}/var/run tmpfs mode=755,size=4096k,nosuid,nodev,noexec 0 0
<del>#lxc.mount.entry = varlock {{escapeFstabSpaces $ROOTFS}}/var/lock tmpfs size=1024k,nosuid,nodev,noexec 0 0
<ide> lxc.mount.entry = shm {{escapeFstabSpaces $ROOTFS}}/dev/shm tmpfs size=65536k,nosuid,nodev,noexec 0 0
<ide>
<del># Inject dockerinit
<del>lxc.mount.entry = {{escapeFstabSpaces .SysInitPath}} {{escapeFstabSpaces $ROOTFS}}/.dockerinit none bind,ro 0 0
<del>
<del># Inject env
<del>lxc.mount.entry = {{escapeFstabSpaces .EnvConfigPath}} {{escapeFstabSpaces $ROOTFS}}/.dockerenv none bind,ro 0 0
<del>
<del># In order to get a working DNS environment, mount bind (ro) the host's /etc/resolv.conf into the container
<del>lxc.mount.entry = {{escapeFstabSpaces .ResolvConfPath}} {{escapeFstabSpaces $ROOTFS}}/etc/resolv.conf none bind,ro 0 0
<del>{{if .Volumes}}
<del>{{ $rw := .VolumesRW }}
<del>{{range $virtualPath, $realPath := .Volumes}}
<del>lxc.mount.entry = {{escapeFstabSpaces $realPath}} {{escapeFstabSpaces $ROOTFS}}/{{escapeFstabSpaces $virtualPath}} none bind,{{ if index $rw $virtualPath }}rw{{else}}ro{{end}} 0 0
<del>{{end}}
<del>{{end}}
<del>
<ide> {{if (getHostConfig .).Privileged}}
<ide> {{if (getCapabilities .).AppArmor}}
<ide> lxc.aa_profile = unconfined
<ide><path>runtime.go
<ide> func (runtime *Runtime) Create(config *Config, name string) (*Container, []strin
<ide> hostConfig: &HostConfig{},
<ide> Image: img.ID, // Always use the resolved image id
<ide> NetworkSettings: &NetworkSettings{},
<del> // FIXME: do we need to store this in the container?
<del> SysInitPath: runtime.sysInitPath,
<del> Name: name,
<del> Driver: runtime.driver.String(),
<add> Name: name,
<add> Driver: runtime.driver.String(),
<ide> }
<ide> container.root = runtime.containerRoot(container.ID)
<ide> // Step 1: create the container directory.
<ide> func (runtime *Runtime) Close() error {
<ide> return nil
<ide> }
<ide>
<add>func (runtime *Runtime) getMounts(container *Container) ([]*graphdriver.Mount, error) {
<add> // Generate additional bind mounts
<add> envPath, err := container.EnvConfigPath()
<add> if err != nil {
<add> return nil, err
<add> }
<add> mounts := []*graphdriver.Mount{
<add> {
<add> Device: runtime.sysInitPath,
<add> Target: "/.dockerinit",
<add> Type: "none",
<add> Options: "bind,ro",
<add> },
<add> {
<add> Device: envPath,
<add> Target: "/.dockerenv",
<add> Type: "none",
<add> Options: "bind,ro",
<add> },
<add> // In order to get a working DNS environment, mount bind (ro) the host's /etc/resolv.conf into the container
<add> {
<add> Device: container.ResolvConfPath,
<add> Target: "/etc/resolv.conf",
<add> Type: "none",
<add> Options: "bind,ro",
<add> },
<add> }
<add>
<add> if container.HostnamePath != "" && container.HostsPath != "" {
<add> mounts = append(mounts,
<add> &graphdriver.Mount{
<add> Device: container.HostnamePath,
<add> Target: "/etc/hostname",
<add> Type: "none",
<add> Options: "bind,ro",
<add> },
<add> &graphdriver.Mount{
<add> Device: container.HostsPath,
<add> Target: "/etc/hosts",
<add> Type: "none",
<add> Options: "bind,ro",
<add> })
<add> }
<add>
<add> for r, v := range container.Volumes {
<add> mountAs := "ro"
<add> if container.VolumesRW[v] {
<add> mountAs = "rw"
<add> }
<add>
<add> mounts = append(mounts,
<add> &graphdriver.Mount{
<add> Device: v,
<add> Target: r,
<add> Type: "none",
<add> Options: fmt.Sprintf("bind,%s", mountAs),
<add> })
<add> }
<add> return mounts, nil
<add>}
<add>
<ide> func (runtime *Runtime) Mount(container *Container) error {
<ide> dir, err := runtime.driver.Get(container.ID)
<ide> if err != nil {
| 4
|
Python
|
Python
|
add slow generate tests for pretrained lm models
|
17c45c39ed5b3e26bf6240ef4a95bc97b4e7fe09
|
<ide><path>src/transformers/modeling_xlm.py
<ide> def forward(
<ide> inputs_embeds = self.embeddings(input_ids)
<ide>
<ide> tensor = inputs_embeds + self.position_embeddings(position_ids).expand_as(inputs_embeds)
<del> if langs is not None and self.use_lang_emb:
<add> if langs is not None and self.use_lang_emb and self.n_langs > 1:
<ide> tensor = tensor + self.lang_embeddings(langs)
<ide> if token_type_ids is not None:
<ide> tensor = tensor + self.embeddings(token_type_ids)
<ide><path>tests/test_modeling_common.py
<ide> def _check_generated_tokens(self, output_ids):
<ide>
<ide>
<ide> def ids_tensor(shape, vocab_size, rng=None, name=None):
<del> """Creates a random int32 tensor of the shape within the vocab size."""
<add> # Creates a random int32 tensor of the shape within the vocab size
<ide> if rng is None:
<ide> rng = global_rng
<ide>
<ide><path>tests/test_modeling_ctrl.py
<ide>
<ide>
<ide> if is_torch_available():
<add> import torch
<ide> from transformers import CTRLConfig, CTRLModel, CTRL_PRETRAINED_MODEL_ARCHIVE_MAP, CTRLLMHeadModel
<ide>
<ide>
<ide> def test_model_from_pretrained(self):
<ide> for model_name in list(CTRL_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
<ide> model = CTRLModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
<ide> self.assertIsNotNone(model)
<add>
<add>
<add>class CTRLModelLanguageGenerationTest(unittest.TestCase):
<add> @slow
<add> def test_lm_generate_ctrl(self):
<add> model = CTRLLMHeadModel.from_pretrained("ctrl")
<add> input_ids = torch.Tensor([[11859, 586, 20984, 8]]).long() # Legal My neighbor is
<add> expected_output_ids = [
<add> 11859,
<add> 586,
<add> 20984,
<add> 8,
<add> 13391,
<add> 3,
<add> 980,
<add> 8258,
<add> 72,
<add> 327,
<add> 148,
<add> 2,
<add> 53,
<add> 29,
<add> 226,
<add> 3,
<add> 780,
<add> 49,
<add> 3,
<add> 980,
<add> ] # Legal My neighbor is refusing to pay rent after 2 years and we are having to force him to pay
<add> torch.manual_seed(0)
<add>
<add> output_ids = model.generate(input_ids)
<add> self.assertListEqual(output_ids[0].tolist(), expected_output_ids)
<ide><path>tests/test_modeling_gpt2.py
<ide>
<ide>
<ide> if is_torch_available():
<add> import torch
<ide> from transformers import (
<ide> GPT2Config,
<ide> GPT2Model,
<ide> def create_and_check_gpt2_model(self, config, input_ids, input_mask, head_mask,
<ide> "presents": presents,
<ide> }
<ide> self.parent.assertListEqual(
<del> list(result["sequence_output"].size()), [self.batch_size, self.seq_length, self.hidden_size]
<add> list(result["sequence_output"].size()), [self.batch_size, self.seq_length, self.hidden_size],
<ide> )
<ide> self.parent.assertEqual(len(result["presents"]), config.n_layer)
<ide>
<ide> def create_and_check_lm_head_model(self, config, input_ids, input_mask, head_mas
<ide>
<ide> self.parent.assertListEqual(list(result["loss"].size()), [])
<ide> self.parent.assertListEqual(
<del> list(result["lm_logits"].size()), [self.batch_size, self.seq_length, self.vocab_size]
<add> list(result["lm_logits"].size()), [self.batch_size, self.seq_length, self.vocab_size],
<ide> )
<ide>
<ide> def create_and_check_double_lm_head_model(
<ide> def create_and_check_double_lm_head_model(
<ide>
<ide> self.parent.assertListEqual(list(result["loss"].size()), [])
<ide> self.parent.assertListEqual(
<del> list(result["lm_logits"].size()), [self.batch_size, self.num_choices, self.seq_length, self.vocab_size]
<add> list(result["lm_logits"].size()),
<add> [self.batch_size, self.num_choices, self.seq_length, self.vocab_size],
<ide> )
<ide> self.parent.assertListEqual(list(result["mc_logits"].size()), [self.batch_size, self.num_choices])
<ide>
<ide> def prepare_config_and_inputs_for_common(self):
<ide> choice_labels,
<ide> ) = config_and_inputs
<ide>
<del> inputs_dict = {"input_ids": input_ids, "token_type_ids": token_type_ids, "head_mask": head_mask}
<add> inputs_dict = {
<add> "input_ids": input_ids,
<add> "token_type_ids": token_type_ids,
<add> "head_mask": head_mask,
<add> }
<ide>
<ide> return config, inputs_dict
<ide>
<ide> def test_model_from_pretrained(self):
<ide> for model_name in list(GPT2_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
<ide> model = GPT2Model.from_pretrained(model_name, cache_dir=CACHE_DIR)
<ide> self.assertIsNotNone(model)
<add>
<add>
<add>def prepare_generation_special_tokens():
<add> return {"bos_token_id": 50256, "eos_token_id": 50256}
<add>
<add>
<add>class GPT2ModelLanguageGenerationTest(unittest.TestCase):
<add>
<add> special_tokens = prepare_generation_special_tokens()
<add>
<add> @slow
<add> def test_lm_generate_gpt2(self):
<add> model = GPT2LMHeadModel.from_pretrained("gpt2")
<add> input_ids = torch.Tensor([[464, 3290, 318, 13779]]).long() # The dog is cute
<add> expected_output_ids = [
<add> 464,
<add> 3290,
<add> 318,
<add> 13779,
<add> 1165,
<add> 13,
<add> 632,
<add> 7832,
<add> 284,
<add> 6437,
<add> 319,
<add> 502,
<add> 290,
<add> 318,
<add> 922,
<add> 329,
<add> 502,
<add> 357,
<add> 1169,
<add> 3290,
<add> ] # The dog is cute too. It likes to rub on me and is good for me (the dog
<add> torch.manual_seed(0)
<add>
<add> output_ids = model.generate(
<add> input_ids,
<add> bos_token_id=self.special_tokens["bos_token_id"],
<add> eos_token_ids=self.special_tokens["eos_token_id"],
<add> )
<add>
<add> self.assertListEqual(output_ids[0].tolist(), expected_output_ids)
<add>
<add> @slow
<add> def test_lm_generate_distilgpt2(self):
<add> model = GPT2LMHeadModel.from_pretrained("distilgpt2")
<add> input_ids = torch.Tensor([[464, 3290, 318, 13779]]).long() # The dog is cute
<add> expected_output_ids = [
<add> 464,
<add> 3290,
<add> 318,
<add> 13779,
<add> 996,
<add> 339,
<add> 460,
<add> 3360,
<add> 655,
<add> 2513,
<add> 287,
<add> 262,
<add> 3952,
<add> 13,
<add> 632,
<add> 318,
<add> 407,
<add> 845,
<add> 3621,
<add> 284,
<add> ] # The dog is cute though he can sometimes just walk in the park. It is not very nice to
<add> torch.manual_seed(0)
<add>
<add> output_ids = model.generate(
<add> input_ids,
<add> bos_token_id=self.special_tokens["bos_token_id"],
<add> eos_token_ids=self.special_tokens["eos_token_id"],
<add> )
<add>
<add> self.assertListEqual(output_ids[0].tolist(), expected_output_ids)
<ide><path>tests/test_modeling_openai.py
<ide>
<ide>
<ide> if is_torch_available():
<add> import torch
<ide> from transformers import (
<ide> OpenAIGPTConfig,
<ide> OpenAIGPTModel,
<ide> def test_model_from_pretrained(self):
<ide> for model_name in list(OPENAI_GPT_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
<ide> model = OpenAIGPTModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
<ide> self.assertIsNotNone(model)
<add>
<add>
<add>class OPENAIGPTModelLanguageGenerationTest(unittest.TestCase):
<add> @slow
<add> def test_lm_generate_openai_gpt(self):
<add> model = OpenAIGPTLMHeadModel.from_pretrained("openai-gpt")
<add> input_ids = torch.Tensor([[481, 2585, 544, 4957]]).long() # The dog is cute
<add> expected_output_ids = [
<add> 481,
<add> 2585,
<add> 544,
<add> 4957,
<add> 669,
<add> 512,
<add> 761,
<add> 5990,
<add> 271,
<add> 645,
<add> 487,
<add> 535,
<add> 976,
<add> 2479,
<add> 240,
<add> 487,
<add> 804,
<add> 1296,
<add> 2891,
<add> 512,
<add> ] # the dog is cute when you're annoyed : if he's really stupid, he 'll stop fighting you
<add> torch.manual_seed(0)
<add>
<add> output_ids = model.generate(input_ids)
<add> self.assertListEqual(output_ids[0].tolist(), expected_output_ids)
<ide><path>tests/test_modeling_transfo_xl.py
<ide> def test_model_from_pretrained(self):
<ide> for model_name in list(TRANSFO_XL_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
<ide> model = TransfoXLModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
<ide> self.assertIsNotNone(model)
<add>
<add>
<add>def prepare_generation_special_tokens():
<add> return {"eos_token_id": 0}
<add>
<add>
<add>class TransfoXLModelLanguageGenerationTest(unittest.TestCase):
<add>
<add> special_tokens = prepare_generation_special_tokens()
<add>
<add> @slow
<add> def test_lm_generate_transfo_xl_wt103(self):
<add> model = TransfoXLLMHeadModel.from_pretrained("transfo-xl-wt103")
<add> input_ids = torch.Tensor(
<add> [
<add> [
<add> 33,
<add> 1297,
<add> 2,
<add> 1,
<add> 1009,
<add> 4,
<add> 1109,
<add> 11739,
<add> 4762,
<add> 358,
<add> 5,
<add> 25,
<add> 245,
<add> 22,
<add> 1706,
<add> 17,
<add> 20098,
<add> 5,
<add> 3215,
<add> 21,
<add> 37,
<add> 1110,
<add> 3,
<add> 13,
<add> 1041,
<add> 4,
<add> 24,
<add> 603,
<add> 490,
<add> 2,
<add> 71477,
<add> 20098,
<add> 104447,
<add> 2,
<add> 20961,
<add> 1,
<add> 2604,
<add> 4,
<add> 1,
<add> 329,
<add> 3,
<add> 6224,
<add> 831,
<add> 16002,
<add> 2,
<add> 8,
<add> 603,
<add> 78967,
<add> 29546,
<add> 23,
<add> 803,
<add> 20,
<add> 25,
<add> 416,
<add> 5,
<add> 8,
<add> 232,
<add> 4,
<add> 277,
<add> 6,
<add> 1855,
<add> 4601,
<add> 3,
<add> 29546,
<add> 54,
<add> 8,
<add> 3609,
<add> 5,
<add> 57211,
<add> 49,
<add> 4,
<add> 1,
<add> 277,
<add> 18,
<add> 8,
<add> 1755,
<add> 15691,
<add> 3,
<add> 341,
<add> 25,
<add> 416,
<add> 693,
<add> 42573,
<add> 71,
<add> 17,
<add> 401,
<add> 94,
<add> 31,
<add> 17919,
<add> 2,
<add> 29546,
<add> 7873,
<add> 18,
<add> 1,
<add> 435,
<add> 23,
<add> 11011,
<add> 755,
<add> 5,
<add> 5167,
<add> 3,
<add> 7983,
<add> 98,
<add> 84,
<add> 2,
<add> 29546,
<add> 3267,
<add> 8,
<add> 3609,
<add> 4,
<add> 1,
<add> 4865,
<add> 1075,
<add> 2,
<add> 6087,
<add> 71,
<add> 6,
<add> 346,
<add> 8,
<add> 5854,
<add> 3,
<add> 29546,
<add> 824,
<add> 1400,
<add> 1868,
<add> 2,
<add> 19,
<add> 160,
<add> 2,
<add> 311,
<add> 8,
<add> 5496,
<add> 2,
<add> 20920,
<add> 17,
<add> 25,
<add> 15097,
<add> 3,
<add> 24,
<add> 24,
<add> 0,
<add> ]
<add> ]
<add> ).long()
<add> # In 1991 , the remains of Russian Tsar Nicholas II and his family
<add> # ( except for Alexei and Maria ) are discovered .
<add> # The voice of Nicholas's young son , Tsarevich Alexei Nikolaevich , narrates the
<add> # remainder of the story . 1883 Western Siberia ,
<add> # a young Grigori Rasputin is asked by his father and a group of men to perform magic .
<add> # Rasputin has a vision and denounces one of the men as a horse thief . Although his
<add> # father initially slaps him for making such an accusation , Rasputin watches as the
<add> # man is chased outside and beaten . Twenty years later , Rasputin sees a vision of
<add> # the Virgin Mary , prompting him to become a priest . Rasputin quickly becomes famous ,
<add> # with people , even a bishop , begging for his blessing . <eod> </s> <eos>
<add>
<add> expected_output_ids = [
<add> 33,
<add> 1297,
<add> 2,
<add> 1,
<add> 1009,
<add> 4,
<add> 1109,
<add> 11739,
<add> 4762,
<add> 358,
<add> 5,
<add> 25,
<add> 245,
<add> 22,
<add> 1706,
<add> 17,
<add> 20098,
<add> 5,
<add> 3215,
<add> 21,
<add> 37,
<add> 1110,
<add> 3,
<add> 13,
<add> 1041,
<add> 4,
<add> 24,
<add> 603,
<add> 490,
<add> 2,
<add> 71477,
<add> 20098,
<add> 104447,
<add> 2,
<add> 20961,
<add> 1,
<add> 2604,
<add> 4,
<add> 1,
<add> 329,
<add> 3,
<add> 6224,
<add> 831,
<add> 16002,
<add> 2,
<add> 8,
<add> 603,
<add> 78967,
<add> 29546,
<add> 23,
<add> 803,
<add> 20,
<add> 25,
<add> 416,
<add> 5,
<add> 8,
<add> 232,
<add> 4,
<add> 277,
<add> 6,
<add> 1855,
<add> 4601,
<add> 3,
<add> 29546,
<add> 54,
<add> 8,
<add> 3609,
<add> 5,
<add> 57211,
<add> 49,
<add> 4,
<add> 1,
<add> 277,
<add> 18,
<add> 8,
<add> 1755,
<add> 15691,
<add> 3,
<add> 341,
<add> 25,
<add> 416,
<add> 693,
<add> 42573,
<add> 71,
<add> 17,
<add> 401,
<add> 94,
<add> 31,
<add> 17919,
<add> 2,
<add> 29546,
<add> 7873,
<add> 18,
<add> 1,
<add> 435,
<add> 23,
<add> 11011,
<add> 755,
<add> 5,
<add> 5167,
<add> 3,
<add> 7983,
<add> 98,
<add> 84,
<add> 2,
<add> 29546,
<add> 3267,
<add> 8,
<add> 3609,
<add> 4,
<add> 1,
<add> 4865,
<add> 1075,
<add> 2,
<add> 6087,
<add> 71,
<add> 6,
<add> 346,
<add> 8,
<add> 5854,
<add> 3,
<add> 29546,
<add> 824,
<add> 1400,
<add> 1868,
<add> 2,
<add> 19,
<add> 160,
<add> 2,
<add> 311,
<add> 8,
<add> 5496,
<add> 2,
<add> 20920,
<add> 17,
<add> 25,
<add> 15097,
<add> 3,
<add> 24,
<add> 24,
<add> 0,
<add> 29546,
<add> 40,
<add> 1092,
<add> 18,
<add> 8,
<add> 5854,
<add> 7,
<add> 1143,
<add> 2,
<add> 7,
<add> 1,
<add> 159,
<add> 99,
<add> 16,
<add> 1,
<add> 1009,
<add> 4,
<add> 1109,
<add> 11739,
<add> 4762,
<add> 358,
<add> 5,
<add> 25,
<add> 245,
<add> 28,
<add> 1110,
<add> 3,
<add> 57,
<add> 629,
<add> 38,
<add> 3493,
<add> 47,
<add> 1094,
<add> 7,
<add> 1297,
<add> 3,
<add> 0,
<add> ]
<add> # In 1991, the remains of Russian Tsar Nicholas II and his family (
<add> # except for Alexei and Maria ) are discovered. The voice of young son,
<add> # Tsarevich Alexei Nikolaevich, narrates the remainder of the story.
<add> # 1883 Western Siberia, a young Grigori Rasputin is asked by his father
<add> # and a group of men to perform magic. Rasputin has a vision and
<add> # denounces one of the men as a horse thief. Although his father initially
<add> # slaps him for making such an accusation, Rasputin watches as the man
<add> # is chased outside and beaten. Twenty years later, Rasputin sees a vision
<add> # of the Virgin Mary, prompting him to become a priest.
<add> # Rasputin quickly becomes famous, with people, even a bishop, begging for
<add> # his blessing. Rasputin first appears as a priest in 1996, in the same year
<add> # that the remains of Russian Tsar Nicholas II and his family were discovered. H
<add>
<add> torch.manual_seed(0)
<add>
<add> output_ids = model.generate(input_ids, eos_token_ids=self.special_tokens["eos_token_id"], max_length=200)
<add>
<add> self.assertListEqual(output_ids[0].tolist(), expected_output_ids)
<ide><path>tests/test_modeling_xlm.py
<ide>
<ide>
<ide> if is_torch_available():
<add> import torch
<ide> from transformers import (
<ide> XLMConfig,
<ide> XLMModel,
<ide> def test_model_from_pretrained(self):
<ide> for model_name in list(XLM_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
<ide> model = XLMModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
<ide> self.assertIsNotNone(model)
<add>
<add>
<add>def prepare_generation_special_tokens():
<add> return {"bos_token_id": 0, "pad_token_id": 2}
<add>
<add>
<add>class XLMModelLanguageGenerationTest(unittest.TestCase):
<add>
<add> special_tokens = prepare_generation_special_tokens()
<add>
<add> @slow
<add> def test_lm_generate_xlm_mlm_en_2048(self):
<add> model = XLMWithLMHeadModel.from_pretrained("xlm-mlm-en-2048")
<add> input_ids = torch.Tensor([[1, 14, 2232, 26, 1]]).long() # The dog is cute
<add> expected_output_ids = [
<add> 1,
<add> 14,
<add> 2232,
<add> 26,
<add> 1,
<add> 567,
<add> 26,
<add> 32,
<add> 149,
<add> 149,
<add> 149,
<add> 149,
<add> 149,
<add> 149,
<add> 149,
<add> 149,
<add> 149,
<add> 149,
<add> 149,
<add> 149,
<add> ] # The dog is nothing is it!!!!!!!!!!!! TODO (PVP): this sentence (and others I tried) does not make much sense, there seems to be a problem with xlm language generation.
<add> torch.manual_seed(0)
<add>
<add> output_ids = model.generate(
<add> input_ids,
<add> bos_token_id=self.special_tokens["bos_token_id"],
<add> pad_token_id=self.special_tokens["pad_token_id"],
<add> )
<add>
<add> self.assertListEqual(output_ids[0].tolist(), expected_output_ids)
<ide><path>tests/test_modeling_xlnet.py
<ide> def test_model_from_pretrained(self):
<ide> for model_name in list(XLNET_PRETRAINED_MODEL_ARCHIVE_MAP.keys())[:1]:
<ide> model = XLNetModel.from_pretrained(model_name, cache_dir=CACHE_DIR)
<ide> self.assertIsNotNone(model)
<add>
<add>
<add>def prepare_generation_special_tokens():
<add> return {"bos_token_id": 1, "pad_token_id": 5, "eos_token_id": 2}
<add>
<add>
<add>class XLNetModelLanguageGenerationTest(unittest.TestCase):
<add>
<add> special_tokens = prepare_generation_special_tokens()
<add>
<add> @slow
<add> def test_lm_generate_xlnet_base_cased(self):
<add> model = XLNetLMHeadModel.from_pretrained("xlnet-base-cased")
<add> input_ids = torch.Tensor(
<add> [
<add> [
<add> 67,
<add> 2840,
<add> 19,
<add> 18,
<add> 1484,
<add> 20,
<add> 965,
<add> 29077,
<add> 8719,
<add> 1273,
<add> 21,
<add> 45,
<add> 273,
<add> 17,
<add> 10,
<add> 15048,
<add> 28,
<add> 27511,
<add> 21,
<add> 4185,
<add> 11,
<add> 41,
<add> 2444,
<add> 9,
<add> 32,
<add> 1025,
<add> 20,
<add> 8719,
<add> 26,
<add> 23,
<add> 673,
<add> 966,
<add> 19,
<add> 29077,
<add> 20643,
<add> 27511,
<add> 20822,
<add> 20643,
<add> 19,
<add> 17,
<add> 6616,
<add> 17511,
<add> 18,
<add> 8978,
<add> 20,
<add> 18,
<add> 777,
<add> 9,
<add> 19233,
<add> 1527,
<add> 17669,
<add> 19,
<add> 24,
<add> 673,
<add> 17,
<add> 28756,
<add> 150,
<add> 12943,
<add> 4354,
<add> 153,
<add> 27,
<add> 442,
<add> 37,
<add> 45,
<add> 668,
<add> 21,
<add> 24,
<add> 256,
<add> 20,
<add> 416,
<add> 22,
<add> 2771,
<add> 4901,
<add> 9,
<add> 12943,
<add> 4354,
<add> 153,
<add> 51,
<add> 24,
<add> 3004,
<add> 21,
<add> 28142,
<add> 23,
<add> 65,
<add> 20,
<add> 18,
<add> 416,
<add> 34,
<add> 24,
<add> 2958,
<add> 22947,
<add> 9,
<add> 1177,
<add> 45,
<add> 668,
<add> 3097,
<add> 13768,
<add> 23,
<add> 103,
<add> 28,
<add> 441,
<add> 148,
<add> 48,
<add> 20522,
<add> 19,
<add> 12943,
<add> 4354,
<add> 153,
<add> 12860,
<add> 34,
<add> 18,
<add> 326,
<add> 27,
<add> 17492,
<add> 684,
<add> 21,
<add> 6709,
<add> 9,
<add> 8585,
<add> 123,
<add> 266,
<add> 19,
<add> 12943,
<add> 4354,
<add> 153,
<add> 6872,
<add> 24,
<add> 3004,
<add> 20,
<add> 18,
<add> 9225,
<add> 2198,
<add> 19,
<add> 12717,
<add> 103,
<add> 22,
<add> 401,
<add> 24,
<add> 6348,
<add> 9,
<add> 12943,
<add> 4354,
<add> 153,
<add> 1068,
<add> 2768,
<add> 2286,
<add> 19,
<add> 33,
<add> 104,
<add> 19,
<add> 176,
<add> 24,
<add> 9313,
<add> 19,
<add> 20086,
<add> 28,
<add> 45,
<add> 10292,
<add> 9,
<add> 4,
<add> 3,
<add> ]
<add> ]
<add> ).long()
<add> # In 1991, the remains of Russian Tsar Nicholas II and his family
<add> # (except for Alexei and Maria) are discovered.
<add> # The voice of Nicholas's young son, Tsarevich Alexei Nikolaevich, narrates the
<add> # remainder of the story. 1883 Western Siberia,
<add> # a young Grigori Rasputin is asked by his father and a group of men to perform magic.
<add> # Rasputin has a vision and denounces one of the men as a horse thief. Although his
<add> # father initially slaps him for making such an accusation, Rasputin watches as the
<add> # man is chased outside and beaten. Twenty years later, Rasputin sees a vision of
<add> # the Virgin Mary, prompting him to become a priest. Rasputin quickly becomes famous,
<add> # with people, even a bishop, begging for his blessing. """
<add>
<add> expected_output_ids = [
<add> 67,
<add> 2840,
<add> 19,
<add> 18,
<add> 1484,
<add> 20,
<add> 965,
<add> 29077,
<add> 8719,
<add> 1273,
<add> 21,
<add> 45,
<add> 273,
<add> 17,
<add> 10,
<add> 15048,
<add> 28,
<add> 27511,
<add> 21,
<add> 4185,
<add> 11,
<add> 41,
<add> 2444,
<add> 9,
<add> 32,
<add> 1025,
<add> 20,
<add> 8719,
<add> 26,
<add> 23,
<add> 673,
<add> 966,
<add> 19,
<add> 29077,
<add> 20643,
<add> 27511,
<add> 20822,
<add> 20643,
<add> 19,
<add> 17,
<add> 6616,
<add> 17511,
<add> 18,
<add> 8978,
<add> 20,
<add> 18,
<add> 777,
<add> 9,
<add> 19233,
<add> 1527,
<add> 17669,
<add> 19,
<add> 24,
<add> 673,
<add> 17,
<add> 28756,
<add> 150,
<add> 12943,
<add> 4354,
<add> 153,
<add> 27,
<add> 442,
<add> 37,
<add> 45,
<add> 668,
<add> 21,
<add> 24,
<add> 256,
<add> 20,
<add> 416,
<add> 22,
<add> 2771,
<add> 4901,
<add> 9,
<add> 12943,
<add> 4354,
<add> 153,
<add> 51,
<add> 24,
<add> 3004,
<add> 21,
<add> 28142,
<add> 23,
<add> 65,
<add> 20,
<add> 18,
<add> 416,
<add> 34,
<add> 24,
<add> 2958,
<add> 22947,
<add> 9,
<add> 1177,
<add> 45,
<add> 668,
<add> 3097,
<add> 13768,
<add> 23,
<add> 103,
<add> 28,
<add> 441,
<add> 148,
<add> 48,
<add> 20522,
<add> 19,
<add> 12943,
<add> 4354,
<add> 153,
<add> 12860,
<add> 34,
<add> 18,
<add> 326,
<add> 27,
<add> 17492,
<add> 684,
<add> 21,
<add> 6709,
<add> 9,
<add> 8585,
<add> 123,
<add> 266,
<add> 19,
<add> 12943,
<add> 4354,
<add> 153,
<add> 6872,
<add> 24,
<add> 3004,
<add> 20,
<add> 18,
<add> 9225,
<add> 2198,
<add> 19,
<add> 12717,
<add> 103,
<add> 22,
<add> 401,
<add> 24,
<add> 6348,
<add> 9,
<add> 12943,
<add> 4354,
<add> 153,
<add> 1068,
<add> 2768,
<add> 2286,
<add> 19,
<add> 33,
<add> 104,
<add> 19,
<add> 176,
<add> 24,
<add> 9313,
<add> 19,
<add> 20086,
<add> 28,
<add> 45,
<add> 10292,
<add> 9,
<add> 4,
<add> 3,
<add> 1722,
<add> 19,
<add> 24,
<add> 6348,
<add> 61,
<add> 977,
<add> 176,
<add> 1772,
<add> 33,
<add> 45,
<add> 970,
<add> 19,
<add> 4185,
<add> 19,
<add> 27,
<add> 442,
<add> 22,
<add> 2771,
<add> 4901,
<add> 25,
<add> 18,
<add> 2059,
<add> 20,
<add> 24,
<add> 303,
<add> 1775,
<add> 691,
<add> 9,
<add> 1147,
<add> 19,
<add> 634,
<add> 19,
<add> 43,
<add> 51,
<add> 54,
<add> 6157,
<add> 2999,
<add> 33,
<add> 4185,
<add> ]
<add> # In 1991, the remains of Russian Tsar Nicholas II and his family (except for Alexei and Maria)
<add> # are discovered. The voice of Nicholas's young son, Tsarevich Alexei Nikolaevich,
<add> # narrates the remainder of the story. 1883 Western Siberia, a young Grigori Rasputin
<add> # is asked by his father and a group of men to perform magic. Rasputin has a vision and
<add> # denounces one of the men as a horse thief. Although his father initially slaps
<add> # him for making such an accusation, Rasputin watches as the man is chased outside and beaten.
<add> # Twenty years later, Rasputin sees a vision of the Virgin Mary, prompting him to become a priest.
<add> # Rasputin quickly becomes famous, with people, even a bishop, begging for his blessing.
<add> # 1990, a priest who cannot even walk with his wife, Maria, is asked to perform magic
<add> # in the presence of a local religious leader.
<add> # Since, however, he has had difficulty walking with Maria
<add>
<add> torch.manual_seed(0)
<add> output_ids = model.generate(
<add> input_ids,
<add> bos_token_id=self.special_tokens["bos_token_id"],
<add> pad_token_id=self.special_tokens["pad_token_id"],
<add> eos_token_ids=self.special_tokens["eos_token_id"],
<add> max_length=200,
<add> )
<add>
<add> self.assertListEqual(output_ids[0].tolist(), expected_output_ids)
| 8
|
Java
|
Java
|
remove groupby with selector
|
9c50bdc5f5587dfddf4c09191fae9e8132fbf1ab
|
<ide><path>rxjava-core/src/main/java/rx/Observable.java
<ide> public final <K> Observable<GroupedObservable<K, T>> groupBy(final Func1<? super
<ide> return lift(new OperatorGroupBy<K, T>(keySelector));
<ide> }
<ide>
<del> /**
<del> * Groups the items emitted by an Observable according to a specified criterion, and emits these grouped
<del> * items, transformed by a selector, within {@link GroupedObservable}s, one {@code GroupedObservable} per
<del> * group.
<del> * <p>
<del> * <img width="640" src="https://raw.github.com/wiki/Netflix/RxJava/images/rx-operators/groupBy.png">
<del> *
<del> * @param keySelector
<del> * a function that extracts the key from an item
<del> * @param elementSelector
<del> * a function to map a source item to an item emitted by a {@link GroupedObservable}
<del> * @param <K>
<del> * the key type
<del> * @param <R>
<del> * the type of items emitted by the resulting {@link GroupedObservable}s
<del> * @return an Observable that emits {@link GroupedObservable}s, each of which corresponds to a unique key
<del> * value and emits transformed items corresponding to items from the source Observable that share
<del> * that key value
<del> * @see <a href="https://github.com/Netflix/RxJava/wiki/Transforming-Observables#wiki-groupby-and-groupbyuntil">RxJava Wiki: groupBy</a>
<del> */
<del> public final <K, R> Observable<GroupedObservable<K, R>> groupBy(final Func1<? super T, ? extends K> keySelector, final Func1<? super T, ? extends R> elementSelector) {
<del> return null;
<del> }
<del>
<ide> /**
<ide> * Groups the items emitted by an Observable according to a specified key selector function until the
<ide> * duration Observable expires for the key.
| 1
|
Python
|
Python
|
fix bad conflict detection during makemigrations
|
28779abb759d5687a27011b40c0d2f34ada815e9
|
<ide><path>django/core/management/commands/makemigrations.py
<ide> def handle(self, *app_labels, **options):
<ide> # Load the current graph state. Takes a connection, but it's not used
<ide> # (makemigrations doesn't look at the database state).
<ide> # Also make sure the graph is built without unmigrated apps shoehorned in.
<del> loader = MigrationLoader(connections[DEFAULT_DB_ALIAS])
<add> loader = MigrationLoader(connections[DEFAULT_DB_ALIAS], load=False)
<ide> loader.build_graph(ignore_unmigrated=True)
<ide>
<ide> # Before anything else, see if there's conflicting apps and drop out
<ide><path>django/db/migrations/loader.py
<ide> def build_graph(self, ignore_unmigrated=False):
<ide> if parent[1] == "__first__" and parent not in self.graph:
<ide> if parent[0] in self.unmigrated_apps:
<ide> if ignore_unmigrated:
<del> migration.dependencies.remove(parent)
<ide> parent = None
<ide> else:
<ide> # This app isn't migrated, but something depends on it.
| 2
|
Go
|
Go
|
fix apparmor not being applied to exec processes
|
8f3308ae10ec9ad0dd4edfb46fde53a0e1e19b34
|
<ide><path>daemon/exec_linux.go
<ide> func (daemon *Daemon) execSetPlatformOpt(c *container.Container, ec *exec.Config
<ide> if c.AppArmorProfile != "" {
<ide> appArmorProfile = c.AppArmorProfile
<ide> } else if c.HostConfig.Privileged {
<add> // `docker exec --privileged` does not currently disable AppArmor
<add> // profiles. Privileged configuration of the container is inherited
<ide> appArmorProfile = "unconfined"
<ide> } else {
<ide> appArmorProfile = "docker-default"
<ide> func (daemon *Daemon) execSetPlatformOpt(c *container.Container, ec *exec.Config
<ide> return err
<ide> }
<ide> }
<add> p.ApparmorProfile = appArmorProfile
<ide> }
<ide> daemon.setRlimits(&specs.Spec{Process: p}, c)
<ide> return nil
<ide><path>daemon/exec_linux_test.go
<add>// +build linux
<add>
<add>package daemon
<add>
<add>import (
<add> "testing"
<add>
<add> containertypes "github.com/docker/docker/api/types/container"
<add> "github.com/docker/docker/container"
<add> "github.com/docker/docker/daemon/exec"
<add> "github.com/gotestyourself/gotestyourself/assert"
<add> "github.com/opencontainers/runc/libcontainer/apparmor"
<add> "github.com/opencontainers/runtime-spec/specs-go"
<add>)
<add>
<add>func TestExecSetPlatformOpt(t *testing.T) {
<add> if !apparmor.IsEnabled() {
<add> t.Skip("requires AppArmor to be enabled")
<add> }
<add> d := &Daemon{}
<add> c := &container.Container{AppArmorProfile: "my-custom-profile"}
<add> ec := &exec.Config{}
<add> p := &specs.Process{}
<add>
<add> err := d.execSetPlatformOpt(c, ec, p)
<add> assert.NilError(t, err)
<add> assert.Equal(t, "my-custom-profile", p.ApparmorProfile)
<add>}
<add>
<add>// TestExecSetPlatformOptPrivileged verifies that `docker exec --privileged`
<add>// does not disable AppArmor profiles. Exec currently inherits the `Privileged`
<add>// configuration of the container. See https://github.com/moby/moby/pull/31773#discussion_r105586900
<add>//
<add>// This behavior may change in future, but test for the behavior to prevent it
<add>// from being changed accidentally.
<add>func TestExecSetPlatformOptPrivileged(t *testing.T) {
<add> if !apparmor.IsEnabled() {
<add> t.Skip("requires AppArmor to be enabled")
<add> }
<add> d := &Daemon{}
<add> c := &container.Container{AppArmorProfile: "my-custom-profile"}
<add> ec := &exec.Config{Privileged: true}
<add> p := &specs.Process{}
<add>
<add> err := d.execSetPlatformOpt(c, ec, p)
<add> assert.NilError(t, err)
<add> assert.Equal(t, "my-custom-profile", p.ApparmorProfile)
<add>
<add> c.HostConfig = &containertypes.HostConfig{Privileged: true}
<add> err = d.execSetPlatformOpt(c, ec, p)
<add> assert.NilError(t, err)
<add> assert.Equal(t, "unconfined", p.ApparmorProfile)
<add>}
| 2
|
Javascript
|
Javascript
|
coerce pie values to numbers
|
4a0c3842b9baa76f2989ea5157a8523171abf89e
|
<ide><path>d3.layout.js
<ide> d3.layout.pie = function() {
<ide> });
<ide>
<ide> // Compute the numeric values for each data element.
<del> var values = data.map(value);
<add> var values = data.map(function(d, i) { return +value.call(pie, d, i); });
<ide>
<ide> // Convert k into a scale factor from value to angle, using the sum.
<ide> k /= values.reduce(function(p, d) { return p + d; }, 0);
<ide><path>d3.layout.min.js
<del>(function(){function a(a){var b=a.source,d=a.target,e=c(b,d),f=[b];while(b!==e)b=b.parent,f.push(b);var g=f.length;while(d!==e)f.splice(g,0,d),d=d.parent;return f}function b(a){var b=[],c=a.parent;while(c!=null)b.push(a),a=c,c=c.parent;return b.push(a),b}function c(a,c){if(a===c)return a;var d=b(a),e=b(c),f=d.pop(),g=e.pop(),h=null;while(f===g)h=f,f=d.pop(),g=e.pop();return h}function g(a){a.fixed|=2}function h(a){a!==f&&(a.fixed&=1)}function i(){j(),f.fixed&=1,e=f=null}function j(){f.px+=d3.event.dx,f.py+=d3.event.dy,e.resume()}function k(a,b,c){var d=0,e=0;a.charge=0;if(!a.leaf){var f=a.nodes,g=f.length,h=-1,i;while(++h<g){i=f[h];if(i==null)continue;k(i,b,c),a.charge+=i.charge,d+=i.charge*i.cx,e+=i.charge*i.cy}}if(a.point){a.leaf||(a.point.x+=Math.random()-.5,a.point.y+=Math.random()-.5);var j=b*c[a.point.index];a.charge+=a.pointCharge=j,d+=j*a.point.x,e+=j*a.point.y}a.cx=d/a.charge,a.cy=e/a.charge}function l(a){return 20}function m(a){return 1}function n(a){return a.x}function o(a){return a.y}function p(a,b,c){a.y0=b,a.y=c}function s(a){var b=1,c=0,d=a[0][1],e,f=a.length;for(;b<f;++b)(e=a[b][1])>d&&(c=b,d=e);return c}function t(a){return a.reduce(u,0)}function u(a,b){return a+b[1]}function v(a,b){return w(a,Math.ceil(Math.log(b.length)/Math.LN2+1))}function w(a,b){var c=-1,d=+a[0],e=(a[1]-d)/b,f=[];while(++c<=b)f[c]=e*c+d;return f}function x(a){return[d3.min(a),d3.max(a)]}function y(a,b){return a.sort=d3.rebind(a,b.sort),a.children=d3.rebind(a,b.children),a.links=C,a.value=d3.rebind(a,b.value),a.nodes=function(b){return D=!0,(a.nodes=a)(b)},a}function z(a){return a.children}function A(a){return a.value}function B(a,b){return b.value-a.value}function C(a){return d3.merge(a.map(function(a){return(a.children||[]).map(function(b){return{source:a,target:b}})}))}function E(a,b){return a.value-b.value}function F(a,b){var c=a._pack_next;a._pack_next=b,b._pack_prev=a,b._pack_next=c,c._pack_prev=b}function G(a,b){a._pack_next=b,b._pack_prev=a}function H(a,b){var c=b.x-a.x,d=b.y-a.y,e=a.r+b.r;return e*e-c*c-d*d>.001}function I(a){function l(a){b=Math.min(a.x-a.r,b),c=Math.max(a.x+a.r,c),d=Math.min(a.y-a.r,d),e=Math.max(a.y+a.r,e)}var b=Infinity,c=-Infinity,d=Infinity,e=-Infinity,f=a.length,g,h,i,j,k;a.forEach(J),g=a[0],g.x=-g.r,g.y=0,l(g);if(f>1){h=a[1],h.x=h.r,h.y=0,l(h);if(f>2){i=a[2],N(g,h,i),l(i),F(g,i),g._pack_prev=i,F(i,h),h=g._pack_next;for(var m=3;m<f;m++){N(g,h,i=a[m]);var n=0,o=1,p=1;for(j=h._pack_next;j!==h;j=j._pack_next,o++)if(H(j,i)){n=1;break}if(n==1)for(k=g._pack_prev;k!==j._pack_prev;k=k._pack_prev,p++)if(H(k,i)){p<o&&(n=-1,j=k);break}n==0?(F(g,i),h=i,l(i)):n>0?(G(g,j),h=j,m--):(G(j,h),g=j,m--)}}}var q=(b+c)/2,r=(d+e)/2,s=0;for(var m=0;m<f;m++){var t=a[m];t.x-=q,t.y-=r,s=Math.max(s,t.r+Math.sqrt(t.x*t.x+t.y*t.y))}return a.forEach(K),s}function J(a){a._pack_next=a._pack_prev=a}function K(a){delete a._pack_next,delete a._pack_prev}function L(a){var b=a.children;b&&b.length?(b.forEach(L),a.r=I(b)):a.r=Math.sqrt(a.value)}function M(a,b,c,d){var e=a.children;a.x=b+=d*a.x,a.y=c+=d*a.y,a.r*=d;if(e){var f=-1,g=e.length;while(++f<g)M(e[f],b,c,d)}}function N(a,b,c){var d=a.r+c.r,e=b.x-a.x,f=b.y-a.y;if(d&&(e||f)){var g=b.r+c.r,h=Math.sqrt(e*e+f*f),i=Math.max(-1,Math.min(1,(d*d+h*h-g*g)/(2*d*h))),j=Math.acos(i),k=i*(d/=h),l=Math.sin(j)*d;c.x=a.x+k*e+l*f,c.y=a.y+k*f-l*e}else c.x=a.x+d,c.y=a.y}function O(a){return 1+d3.max(a,function(a){return a.y})}function P(a){return a.reduce(function(a,b){return a+b.x},0)/a.length}function Q(a){var b=a.children;return b&&b.length?Q(b[0]):a}function R(a){var b=a.children,c;return b&&(c=b.length)?R(b[c-1]):a}function S(a,b){return a.parent==b.parent?1:2}function T(a){var b=a.children;return b&&b.length?b[0]:a._tree.thread}function U(a){var b=a.children,c;return b&&(c=b.length)?b[c-1]:a._tree.thread}function V(a,b){var c=a.children;if(c&&(e=c.length)){var d,e,f=-1;while(++f<e)b(d=V(c[f],b),a)>0&&(a=d)}return a}function W(a,b){return a.x-b.x}function X(a,b){return b.x-a.x}function Y(a,b){return a.depth-b.depth}function Z(a,b){function c(a,d){var e=a.children;if(e&&(i=e.length)){var f,g=null,h=-1,i;while(++h<i)f=e[h],c(f,g),g=f}b(a,d)}c(a,null)}function $(a){var b=0,c=0,d=a.children,e=d.length,f;while(--e>=0)f=d[e]._tree,f.prelim+=b,f.mod+=b,b+=f.shift+(c+=f.change)}function _(a,b,c){a=a._tree,b=b._tree;var d=c/(b.number-a.number);a.change+=d,b.change-=d,b.shift+=c,b.prelim+=c,b.mod+=c}function ba(a,b,c){return a._tree.ancestor.parent==b.parent?a._tree.ancestor:c}function bb(a){return{x:a.x,y:a.y,dx:a.dx,dy:a.dy}}function bc(a,b){var c=a.x+b[3],d=a.y+b[0],e=a.dx-b[1]-b[3],f=a.dy-b[0]-b[2];return e<0&&(c+=e/2,e=0),f<0&&(d+=f/2,f=0),{x:c,y:d,dx:e,dy:f}}d3.layout={},d3.layout.bundle=function(){return function(b){var c=[],d=-1,e=b.length;while(++d<e)c.push(a(b[d]));return c}},d3.layout.chord=function(){function j(){var a={},j=[],l=d3.range(e),m=[],n,o,p,q,r;b=[],c=[],n=0,q=-1;while(++q<e){o=0,r=-1;while(++r<e)o+=d[q][r];j.push(o),m.push(d3.range(e)),n+=o}g&&l.sort(function(a,b){return g(j[a],j[b])}),h&&m.forEach(function(a,b){a.sort(function(a,c){return h(d[b][a],d[b][c])})}),n=(2*Math.PI-f*e)/n,o=0,q=-1;while(++q<e){p=o,r=-1;while(++r<e){var s=l[q],t=m[s][r],u=d[s][t];a[s+"-"+t]={index:s,subindex:t,startAngle:o,endAngle:o+=u*n,value:u}}c.push({index:s,startAngle:p,endAngle:o,value:(o-p)/n}),o+=f}q=-1;while(++q<e){r=q-1;while(++r<e){var v=a[q+"-"+r],w=a[r+"-"+q];(v.value||w.value)&&b.push(v.value<w.value?{source:w,target:v}:{source:v,target:w})}}i&&k()}function k(){b.sort(function(a,b){return i((a.source.value+a.target.value)/2,(b.source.value+b.target.value)/2)})}var a={},b,c,d,e,f=0,g,h,i;return a.matrix=function(f){return arguments.length?(e=(d=f)&&d.length,b=c=null,a):d},a.padding=function(d){return arguments.length?(f=d,b=c=null,a):f},a.sortGroups=function(d){return arguments.length?(g=d,b=c=null,a):g},a.sortSubgroups=function(c){return arguments.length?(h=c,b=null,a):h},a.sortChords=function(c){return arguments.length?(i=c,b&&k(),a):i},a.chords=function(){return b||j(),b},a.groups=function(){return c||j(),c},a},d3.layout.force=function(){function A(a){return function(b,c,d,e,f){if(b.point!==a){var g=b.cx-a.x,h=b.cy-a.y,i=1/Math.sqrt(g*g+h*h);if((e-c)*i<t){var j=b.charge*i*i;return a.px-=g*j,a.py-=h*j,!0}if(b.point&&isFinite(i)){var j=b.pointCharge*i*i;a.px-=g*j,a.py-=h*j}}return!b.charge}}function B(){var a=v.length,d=w.length,e,f,g,h,i,j,l,m,p;for(f=0;f<d;++f){g=w[f],h=g.source,i=g.target,m=i.x-h.x,p=i.y-h.y;if(j=m*m+p*p)j=n*y[f]*((j=Math.sqrt(j))-x[f])/j,m*=j,p*=j,i.x-=m*(l=h.weight/(i.weight+h.weight)),i.y-=p*l,h.x+=m*(l=1-l),h.y+=p*l}if(l=n*s){m=c[0]/2,p=c[1]/2,f=-1;if(l)while(++f<a)g=v[f],g.x+=(m-g.x)*l,g.y+=(p-g.y)*l}if(r){k(e=d3.geom.quadtree(v),n,z),f=-1;while(++f<a)(g=v[f]).fixed||e.visit(A(g))}f=-1;while(++f<a)g=v[f],g.fixed?(g.x=g.px,g.y=g.py):(g.x-=(g.px-(g.px=g.x))*o,g.y-=(g.py-(g.py=g.y))*o);return b.tick.dispatch({type:"tick",alpha:n}),(n*=.99)<.005}function C(b){g(f=b),e=a}var a={},b=d3.dispatch("tick"),c=[1,1],d,n,o=.9,p=l,q=m,r=-30,s=.1,t=.8,u,v=[],w=[],x,y,z;return a.on=function(c,d){return b[c].add(d),a},a.nodes=function(b){return arguments.length?(v=b,a):v},a.links=function(b){return arguments.length?(w=b,a):w},a.size=function(b){return arguments.length?(c=b,a):c},a.linkDistance=function(b){return arguments.length?(p=d3.functor(b),a):p},a.distance=a.linkDistance,a.linkStrength=function(b){return arguments.length?(q=d3.functor(b),a):q},a.friction=function(b){return arguments.length?(o=b,a):o},a.charge=function(b){return arguments.length?(r=typeof b=="function"?b:+b,a):r},a.gravity=function(b){return arguments.length?(s=b,a):s},a.theta=function(b){return arguments.length?(t=b,a):t},a.start=function(){function k(a,c){var d=l(b),e=-1,f=d.length,g;while(++e<f)if(!isNaN(g=d[e][a]))return g;return Math.random()*c}function l(){if(!i){i=[];for(d=0;d<e;++d)i[d]=[];for(d=0;d<f;++d){var a=w[d];i[a.source.index].push(a.target),i[a.target.index].push(a.source)}}return i[b]}var b,d,e=v.length,f=w.length,g=c[0],h=c[1],i,j;for(b=0;b<e;++b)(j=v[b]).index=b,j.weight=0;x=[],y=[];for(b=0;b<f;++b)j=w[b],typeof j.source=="number"&&(j.source=v[j.source]),typeof j.target=="number"&&(j.target=v[j.target]),x[b]=p.call(this,j,b),y[b]=q.call(this,j,b),++j.source.weight,++j.target.weight;for(b=0;b<e;++b)j=v[b],isNaN(j.x)&&(j.x=k("x",g)),isNaN(j.y)&&(j.y=k("y",h)),isNaN(j.px)&&(j.px=j.x),isNaN(j.py)&&(j.py=j.y);z=[];if(typeof r=="function")for(b=0;b<e;++b)z[b]=+r.call(this,v[b],b);else for(b=0;b<e;++b)z[b]=r;return a.resume()},a.resume=function(){return n=.1,d3.timer(B),a},a.stop=function(){return n=0,a},a.drag=function(){d||(d=d3.behavior.drag().on("dragstart",C).on("drag",j).on("dragend",i)),this.on("mouseover.force",g).on("mouseout.force",h).call(d)},a};var e,f;d3.layout.partition=function(){function c(a,b,d,e){var f=a.children;a.x=b,a.y=a.depth*e,a.dx=d,a.dy=e;if(f&&(h=f.length)){var g=-1,h,i,j;d=a.value?d/a.value:0;while(++g<h)c(i=f[g],b,j=i.value*d,e),b+=j}}function d(a){var b=a.children,c=0;if(b&&(f=b.length)){var e=-1,f;while(++e<f)c=Math.max(c,d(b[e]))}return 1+c}function e(e,f){var g=a.call(this,e,f);return c(g[0],0,b[0],b[1]/d(g[0])),g}var a=d3.layout.hierarchy(),b=[1,1];return e.size=function(a){return arguments.length?(b=a,e):b},y(e,a)},d3.layout.pie=function(){function f(f,g){var h=+(typeof c=="function"?c.apply(this,arguments):c),i=(typeof e=="function"?e.apply(this,arguments):e)-c,j=d3.range(f.length);b!=null&&j.sort(function(a,c){return b(f[a],f[c])});var k=f.map(a);i/=k.reduce(function(a,b){return a+b},0);var l=j.map(function(a){return{data:f[a],value:d=k[a],startAngle:h,endAngle:h+=d*i}});return f.map(function(a,b){return l[j[b]]})}var a=Number,b=null,c=0,e=2*Math.PI;return f.value=function(b){return arguments.length?(a=b,f):a},f.sort=function(a){return arguments.length?(b=a,f):b},f.startAngle=function(a){return arguments.length?(c=a,f):c},f.endAngle=function(a){return arguments.length?(e=a,f):e},f},d3.layout.stack=function(){function g(h,i){var j=h.map(function(b,c){return a.call(g,b,c)}),k=j.map(function(a,b){return a.map(function(a,b){return[e.call(g,a,b),f.call(g,a,b)]})}),l=b.call(g,k,i);j=d3.permute(j,l),k=d3.permute(k,l);var m=c.call(g,k,i),n=j.length,o=j[0].length,p,q,r;for(q=0;q<o;++q){d.call(g,j[0][q],r=m[q],k[0][q][1]);for(p=1;p<n;++p)d.call(g,j[p][q],r+=k[p-1][q][1],k[p][q][1])}return h}var a=Object,b=q["default"],c=r.zero,d=p,e=n,f=o;return g.values=function(b){return arguments.length?(a=b,g):a},g.order=function(a){return arguments.length?(b=typeof a=="function"?a:q[a],g):b},g.offset=function(a){return arguments.length?(c=typeof a=="function"?a:r[a],g):c},g.x=function(a){return arguments.length?(e=a,g):e},g.y=function(a){return arguments.length?(f=a,g):f},g.out=function(a){return arguments.length?(d=a,g):d},g};var q={"inside-out":function(a){var b=a.length,c,d,e=a.map(s),f=a.map(t),g=d3.range(b).sort(function(a,b){return e[a]-e[b]}),h=0,i=0,j=[],k=[];for(c=0;c<b;++c)d=g[c],h<i?(h+=f[d],j.push(d)):(i+=f[d],k.push(d));return k.reverse().concat(j)},reverse:function(a){return d3.range(a.length).reverse()},"default":function(a){return d3.range(a.length)}},r={silhouette:function(a){var b=a.length,c=a[0].length,d=[],e=0,f,g,h,i=[];for(g=0;g<c;++g){for(f=0,h=0;f<b;f++)h+=a[f][g][1];h>e&&(e=h),d.push(h)}for(g=0;g<c;++g)i[g]=(e-d[g])/2;return i},wiggle:function(a){var b=a.length,c=a[0],d=c.length,e=0,f,g,h,i,j,k,l,m,n,o=[];o[0]=m=n=0;for(g=1;g<d;++g){for(f=0,i=0;f<b;++f)i+=a[f][g][1];for(f=0,j=0,l=c[g][0]-c[g-1][0];f<b;++f){for(h=0,k=(a[f][g][1]-a[f][g-1][1])/(2*l);h<f;++h)k+=(a[h][g][1]-a[h][g-1][1])/l;j+=k*a[f][g][1]}o[g]=m-=i?j/i*l:0,m<n&&(n=m)}for(g=0;g<d;++g)o[g]-=n;return o},expand:function(a){var b=a.length,c=a[0].length,d=1/b,e,f,g,h=[];for(f=0;f<c;++f){for(e=0,g=0;e<b;e++)g+=a[e][f][1];if(g)for(e=0;e<b;e++)a[e][f][1]/=g;else for(e=0;e<b;e++)a[e][f][1]=d}for(f=0;f<c;++f)h[f]=0;return h},zero:function(a){var b=-1,c=a[0].length,d=[];while(++b<c)d[b]=0;return d}};d3.layout.histogram=function(){function e(e,f){var g=[],h=e.map(b,this),i=c.call(this,h,f),j=d.call(this,i,h,f),k,f=-1,l=h.length,m=j.length-1,n=a?1:1/l,o;while(++f<m)k=g[f]=[],k.dx=j[f+1]-(k.x=j[f]),k.y=0;f=-1;while(++f<l)o=h[f],o>=i[0]&&o<=i[1]&&(k=g[d3.bisect(j,o,1,m)-1],k.y+=n,k.push(e[f]));return g}var a=!0,b=Number,c=x,d=v;return e.value=function(a){return arguments.length?(b=a,e):b},e.range=function(a){return arguments.length?(c=d3.functor(a),e):c},e.bins=function(a){return arguments.length?(d=typeof a=="number"?function(b){return w(b,a)}:d3.functor(a),e):d},e.frequency=function(b){return arguments.length?(a=!!b,e):a},e},d3.layout.hierarchy=function(){function e(f,h,i){var j=b.call(g,f,h),k=D?f:{data:f};k.depth=h,i.push(k);if(j&&(m=j.length)){var l=-1,m,n=k.children=[],o=0,p=h+1;while(++l<m)d=e(j[l],p,i),d.parent=k,n.push(d),o+=d.value;a&&n.sort(a),c&&(k.value=o)}else c&&(k.value=+c.call(g,f,h)||0);return k}function f(a,b){var d=a.children,e=0;if(d&&(i=d.length)){var h=-1,i,j=b+1;while(++h<i)e+=f(d[h],j)}else c&&(e=+c.call(g,D?a:a.data,b)||0);return c&&(a.value=e),e}function g(a){var b=[];return e(a,0,b),b}var a=B,b=z,c=A;return g.sort=function(b){return arguments.length?(a=b,g):a},g.children=function(a){return arguments.length?(b=a,g):b},g.value=function(a){return arguments.length?(c=a,g):c},g.revalue=function(a){return f(a,0),a},g};var D=!1;d3.layout.pack=function(){function c(c,d){var e=a.call(this,c,d),f=e[0];f.x=0,f.y=0,L(f);var g=b[0],h=b[1],i=1/Math.max(2*f.r/g,2*f.r/h);return M(f,g/2,h/2,i),e}var a=d3.layout.hierarchy().sort(E),b=[1,1];return c.size=function(a){return arguments.length?(b=a,c):b},y(c,a)},d3.layout.cluster=function(){function d(d,e){var f=a.call(this,d,e),g=f[0],h,i=0,j,k;Z(g,function(a){var c=a.children;c&&c.length?(a.x=P(c),a.y=O(c)):(a.x=h?i+=b(a,h):0,a.y=0,h=a)});var l=Q(g),m=R(g),n=l.x-b(l,m)/2,o=m.x+b(m,l)/2;return Z(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=(1-a.y/g.y)*c[1]}),f}var a=d3.layout.hierarchy().sort(null).value(null),b=S,c=[1,1];return d.separation=function(a){return arguments.length?(b=a,d):b},d.size=function(a){return arguments.length?(c=a,d):c},y(d,a)},d3.layout.tree=function(){function d(d,e){function h(a,c){var d=a.children,e=a._tree;if(d&&(f=d.length)){var f,g=d[0],i,k=g,l,m=-1;while(++m<f)l=d[m],h(l,i),k=j(l,i,k),i=l;$(a);var n=.5*(g._tree.prelim+l._tree.prelim);c?(e.prelim=c._tree.prelim+b(a,c),e.mod=e.prelim-n):e.prelim=n}else c&&(e.prelim=c._tree.prelim+b(a,c))}function i(a,b){a.x=a._tree.prelim+b;var c=a.children;if(c&&(e=c.length)){var d=-1,e;b+=a._tree.mod;while(++d<e)i(c[d],b)}}function j(a,c,d){if(c){var e=a,f=a,g=c,h=a.parent.children[0],i=e._tree.mod,j=f._tree.mod,k=g._tree.mod,l=h._tree.mod,m;while(g=U(g),e=T(e),g&&e)h=T(h),f=U(f),f._tree.ancestor=a,m=g._tree.prelim+k-e._tree.prelim-i+b(g,e),m>0&&(_(ba(g,a,d),a,m),i+=m,j+=m),k+=g._tree.mod,i+=e._tree.mod,l+=h._tree.mod,j+=f._tree.mod;g&&!U(f)&&(f._tree.thread=g,f._tree.mod+=k-j),e&&!T(h)&&(h._tree.thread=e,h._tree.mod+=i-l,d=a)}return d}var f=a.call(this,d,e),g=f[0];Z(g,function(a,b){a._tree={ancestor:a,prelim:0,mod:0,change:0,shift:0,number:b?b._tree.number+1:0}}),h(g),i(g,-g._tree.prelim);var k=V(g,X),l=V(g,W),m=V(g,Y),n=k.x-b(k,l)/2,o=l.x+b(l,k)/2,p=m.depth||1;return Z(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=a.depth/p*c[1],delete a._tree}),f}var a=d3.layout.hierarchy().sort(null).value(null),b=S,c=[1,1];return d.separation=function(a){return arguments.length?(b=a,d):b},d.size=function(a){return arguments.length?(c=a,d):c},y(d,a)},d3.layout.treemap=function(){function i(a,b){var c=-1,d=a.length,e,f;while(++c<d)f=(e=a[c]).value*(b<0?0:b),e.area=isNaN(f)||f<=0?0:f}function j(a){var b=a.children;if(b&&b.length){var c=e(a),d=[],f=b.slice(),g,h=Infinity,k,n=Math.min(c.dx,c.dy),o;i(f,c.dx*c.dy/a.value),d.area=0;while((o=f.length)>0)d.push(g=f[o-1]),d.area+=g.area,(k=l(d,n))<=h?(f.pop(),h=k):(d.area-=d.pop().area,m(d,n,c,!1),n=Math.min(c.dx,c.dy),d.length=d.area=0,h=Infinity);d.length&&(m(d,n,c,!0),d.length=d.area=0),b.forEach(j)}}function k(a){var b=a.children;if(b&&b.length){var c=e(a),d=b.slice(),f,g=[];i(d,c.dx*c.dy/a.value),g.area=0;while(f=d.pop())g.push(f),g.area+=f.area,f.z!=null&&(m(g,f.z?c.dx:c.dy,c,!d.length),g.length=g.area=0);b.forEach(k)}}function l(a,b){var c=a.area,d,e=0,f=Infinity,g=-1,i=a.length;while(++g<i){if(!(d=a[g].area))continue;d<f&&(f=d),d>e&&(e=d)}return c*=c,b*=b,c?Math.max(b*e*h/c,c/(b*f*h)):Infinity}function m(a,c,d,e){var f=-1,g=a.length,h=d.x,i=d.y,j=c?b(a.area/c):0,k;if(c==d.dx){if(e||j>d.dy)j=j?d.dy:0;while(++f<g)k=a[f],k.x=h,k.y=i,k.dy=j,h+=k.dx=j?b(k.area/j):0;k.z=!0,k.dx+=d.x+d.dx-h,d.y+=j,d.dy-=j}else{if(e||j>d.dx)j=j?d.dx:0;while(++f<g)k=a[f],k.x=h,k.y=i,k.dx=j,i+=k.dy=j?b(k.area/j):0;k.z=!1,k.dy+=d.y+d.dy-i,d.x+=j,d.dx-=j}}function n(b){var d=g||a(b),e=d[0];return e.x=0,e.y=0,e.dx=c[0],e.dy=c[1],g&&a.revalue(e),i([e],e.dx*e.dy/e.value),(g?k:j)(e),f&&(g=d),d}var a=d3.layout.hierarchy(),b=Math.round,c=[1,1],d=null,e=bb,f=!1,g,h=.5*(1+Math.sqrt(5));return n.size=function(a){return arguments.length?(c=a,n):c},n.padding=function(a){function b(b){var c=a.call(n,b,b.depth);return c==null?bb(b):bc(b,typeof c=="number"?[c,c,c,c]:c)}function c(b){return bc(b,a)}if(!arguments.length)return d;var f;return e=(d=a)==null?bb:(f=typeof a)==="function"?b:f==="number"?(a=[a,a,a,a],c):c,n},n.round=function(a){return arguments.length?(b=a?Math.round:Number,n):b!=Number},n.sticky=function(a){return arguments.length?(f=a,g=null,n):f},n.ratio=function(a){return arguments.length?(h=a,n):h},y(n,a)}})();
<ide>\ No newline at end of file
<add>(function(){function a(a){var b=a.source,d=a.target,e=c(b,d),f=[b];while(b!==e)b=b.parent,f.push(b);var g=f.length;while(d!==e)f.splice(g,0,d),d=d.parent;return f}function b(a){var b=[],c=a.parent;while(c!=null)b.push(a),a=c,c=c.parent;return b.push(a),b}function c(a,c){if(a===c)return a;var d=b(a),e=b(c),f=d.pop(),g=e.pop(),h=null;while(f===g)h=f,f=d.pop(),g=e.pop();return h}function g(a){a.fixed|=2}function h(a){a!==f&&(a.fixed&=1)}function i(){j(),f.fixed&=1,e=f=null}function j(){f.px+=d3.event.dx,f.py+=d3.event.dy,e.resume()}function k(a,b,c){var d=0,e=0;a.charge=0;if(!a.leaf){var f=a.nodes,g=f.length,h=-1,i;while(++h<g){i=f[h];if(i==null)continue;k(i,b,c),a.charge+=i.charge,d+=i.charge*i.cx,e+=i.charge*i.cy}}if(a.point){a.leaf||(a.point.x+=Math.random()-.5,a.point.y+=Math.random()-.5);var j=b*c[a.point.index];a.charge+=a.pointCharge=j,d+=j*a.point.x,e+=j*a.point.y}a.cx=d/a.charge,a.cy=e/a.charge}function l(a){return 20}function m(a){return 1}function n(a){return a.x}function o(a){return a.y}function p(a,b,c){a.y0=b,a.y=c}function s(a){var b=1,c=0,d=a[0][1],e,f=a.length;for(;b<f;++b)(e=a[b][1])>d&&(c=b,d=e);return c}function t(a){return a.reduce(u,0)}function u(a,b){return a+b[1]}function v(a,b){return w(a,Math.ceil(Math.log(b.length)/Math.LN2+1))}function w(a,b){var c=-1,d=+a[0],e=(a[1]-d)/b,f=[];while(++c<=b)f[c]=e*c+d;return f}function x(a){return[d3.min(a),d3.max(a)]}function y(a,b){return a.sort=d3.rebind(a,b.sort),a.children=d3.rebind(a,b.children),a.links=C,a.value=d3.rebind(a,b.value),a.nodes=function(b){return D=!0,(a.nodes=a)(b)},a}function z(a){return a.children}function A(a){return a.value}function B(a,b){return b.value-a.value}function C(a){return d3.merge(a.map(function(a){return(a.children||[]).map(function(b){return{source:a,target:b}})}))}function E(a,b){return a.value-b.value}function F(a,b){var c=a._pack_next;a._pack_next=b,b._pack_prev=a,b._pack_next=c,c._pack_prev=b}function G(a,b){a._pack_next=b,b._pack_prev=a}function H(a,b){var c=b.x-a.x,d=b.y-a.y,e=a.r+b.r;return e*e-c*c-d*d>.001}function I(a){function l(a){b=Math.min(a.x-a.r,b),c=Math.max(a.x+a.r,c),d=Math.min(a.y-a.r,d),e=Math.max(a.y+a.r,e)}var b=Infinity,c=-Infinity,d=Infinity,e=-Infinity,f=a.length,g,h,i,j,k;a.forEach(J),g=a[0],g.x=-g.r,g.y=0,l(g);if(f>1){h=a[1],h.x=h.r,h.y=0,l(h);if(f>2){i=a[2],N(g,h,i),l(i),F(g,i),g._pack_prev=i,F(i,h),h=g._pack_next;for(var m=3;m<f;m++){N(g,h,i=a[m]);var n=0,o=1,p=1;for(j=h._pack_next;j!==h;j=j._pack_next,o++)if(H(j,i)){n=1;break}if(n==1)for(k=g._pack_prev;k!==j._pack_prev;k=k._pack_prev,p++)if(H(k,i)){p<o&&(n=-1,j=k);break}n==0?(F(g,i),h=i,l(i)):n>0?(G(g,j),h=j,m--):(G(j,h),g=j,m--)}}}var q=(b+c)/2,r=(d+e)/2,s=0;for(var m=0;m<f;m++){var t=a[m];t.x-=q,t.y-=r,s=Math.max(s,t.r+Math.sqrt(t.x*t.x+t.y*t.y))}return a.forEach(K),s}function J(a){a._pack_next=a._pack_prev=a}function K(a){delete a._pack_next,delete a._pack_prev}function L(a){var b=a.children;b&&b.length?(b.forEach(L),a.r=I(b)):a.r=Math.sqrt(a.value)}function M(a,b,c,d){var e=a.children;a.x=b+=d*a.x,a.y=c+=d*a.y,a.r*=d;if(e){var f=-1,g=e.length;while(++f<g)M(e[f],b,c,d)}}function N(a,b,c){var d=a.r+c.r,e=b.x-a.x,f=b.y-a.y;if(d&&(e||f)){var g=b.r+c.r,h=Math.sqrt(e*e+f*f),i=Math.max(-1,Math.min(1,(d*d+h*h-g*g)/(2*d*h))),j=Math.acos(i),k=i*(d/=h),l=Math.sin(j)*d;c.x=a.x+k*e+l*f,c.y=a.y+k*f-l*e}else c.x=a.x+d,c.y=a.y}function O(a){return 1+d3.max(a,function(a){return a.y})}function P(a){return a.reduce(function(a,b){return a+b.x},0)/a.length}function Q(a){var b=a.children;return b&&b.length?Q(b[0]):a}function R(a){var b=a.children,c;return b&&(c=b.length)?R(b[c-1]):a}function S(a,b){return a.parent==b.parent?1:2}function T(a){var b=a.children;return b&&b.length?b[0]:a._tree.thread}function U(a){var b=a.children,c;return b&&(c=b.length)?b[c-1]:a._tree.thread}function V(a,b){var c=a.children;if(c&&(e=c.length)){var d,e,f=-1;while(++f<e)b(d=V(c[f],b),a)>0&&(a=d)}return a}function W(a,b){return a.x-b.x}function X(a,b){return b.x-a.x}function Y(a,b){return a.depth-b.depth}function Z(a,b){function c(a,d){var e=a.children;if(e&&(i=e.length)){var f,g=null,h=-1,i;while(++h<i)f=e[h],c(f,g),g=f}b(a,d)}c(a,null)}function $(a){var b=0,c=0,d=a.children,e=d.length,f;while(--e>=0)f=d[e]._tree,f.prelim+=b,f.mod+=b,b+=f.shift+(c+=f.change)}function _(a,b,c){a=a._tree,b=b._tree;var d=c/(b.number-a.number);a.change+=d,b.change-=d,b.shift+=c,b.prelim+=c,b.mod+=c}function ba(a,b,c){return a._tree.ancestor.parent==b.parent?a._tree.ancestor:c}function bb(a){return{x:a.x,y:a.y,dx:a.dx,dy:a.dy}}function bc(a,b){var c=a.x+b[3],d=a.y+b[0],e=a.dx-b[1]-b[3],f=a.dy-b[0]-b[2];return e<0&&(c+=e/2,e=0),f<0&&(d+=f/2,f=0),{x:c,y:d,dx:e,dy:f}}d3.layout={},d3.layout.bundle=function(){return function(b){var c=[],d=-1,e=b.length;while(++d<e)c.push(a(b[d]));return c}},d3.layout.chord=function(){function j(){var a={},j=[],l=d3.range(e),m=[],n,o,p,q,r;b=[],c=[],n=0,q=-1;while(++q<e){o=0,r=-1;while(++r<e)o+=d[q][r];j.push(o),m.push(d3.range(e)),n+=o}g&&l.sort(function(a,b){return g(j[a],j[b])}),h&&m.forEach(function(a,b){a.sort(function(a,c){return h(d[b][a],d[b][c])})}),n=(2*Math.PI-f*e)/n,o=0,q=-1;while(++q<e){p=o,r=-1;while(++r<e){var s=l[q],t=m[s][r],u=d[s][t];a[s+"-"+t]={index:s,subindex:t,startAngle:o,endAngle:o+=u*n,value:u}}c.push({index:s,startAngle:p,endAngle:o,value:(o-p)/n}),o+=f}q=-1;while(++q<e){r=q-1;while(++r<e){var v=a[q+"-"+r],w=a[r+"-"+q];(v.value||w.value)&&b.push(v.value<w.value?{source:w,target:v}:{source:v,target:w})}}i&&k()}function k(){b.sort(function(a,b){return i((a.source.value+a.target.value)/2,(b.source.value+b.target.value)/2)})}var a={},b,c,d,e,f=0,g,h,i;return a.matrix=function(f){return arguments.length?(e=(d=f)&&d.length,b=c=null,a):d},a.padding=function(d){return arguments.length?(f=d,b=c=null,a):f},a.sortGroups=function(d){return arguments.length?(g=d,b=c=null,a):g},a.sortSubgroups=function(c){return arguments.length?(h=c,b=null,a):h},a.sortChords=function(c){return arguments.length?(i=c,b&&k(),a):i},a.chords=function(){return b||j(),b},a.groups=function(){return c||j(),c},a},d3.layout.force=function(){function A(a){return function(b,c,d,e,f){if(b.point!==a){var g=b.cx-a.x,h=b.cy-a.y,i=1/Math.sqrt(g*g+h*h);if((e-c)*i<t){var j=b.charge*i*i;return a.px-=g*j,a.py-=h*j,!0}if(b.point&&isFinite(i)){var j=b.pointCharge*i*i;a.px-=g*j,a.py-=h*j}}return!b.charge}}function B(){var a=v.length,d=w.length,e,f,g,h,i,j,l,m,p;for(f=0;f<d;++f){g=w[f],h=g.source,i=g.target,m=i.x-h.x,p=i.y-h.y;if(j=m*m+p*p)j=n*y[f]*((j=Math.sqrt(j))-x[f])/j,m*=j,p*=j,i.x-=m*(l=h.weight/(i.weight+h.weight)),i.y-=p*l,h.x+=m*(l=1-l),h.y+=p*l}if(l=n*s){m=c[0]/2,p=c[1]/2,f=-1;if(l)while(++f<a)g=v[f],g.x+=(m-g.x)*l,g.y+=(p-g.y)*l}if(r){k(e=d3.geom.quadtree(v),n,z),f=-1;while(++f<a)(g=v[f]).fixed||e.visit(A(g))}f=-1;while(++f<a)g=v[f],g.fixed?(g.x=g.px,g.y=g.py):(g.x-=(g.px-(g.px=g.x))*o,g.y-=(g.py-(g.py=g.y))*o);return b.tick.dispatch({type:"tick",alpha:n}),(n*=.99)<.005}function C(b){g(f=b),e=a}var a={},b=d3.dispatch("tick"),c=[1,1],d,n,o=.9,p=l,q=m,r=-30,s=.1,t=.8,u,v=[],w=[],x,y,z;return a.on=function(c,d){return b[c].add(d),a},a.nodes=function(b){return arguments.length?(v=b,a):v},a.links=function(b){return arguments.length?(w=b,a):w},a.size=function(b){return arguments.length?(c=b,a):c},a.linkDistance=function(b){return arguments.length?(p=d3.functor(b),a):p},a.distance=a.linkDistance,a.linkStrength=function(b){return arguments.length?(q=d3.functor(b),a):q},a.friction=function(b){return arguments.length?(o=b,a):o},a.charge=function(b){return arguments.length?(r=typeof b=="function"?b:+b,a):r},a.gravity=function(b){return arguments.length?(s=b,a):s},a.theta=function(b){return arguments.length?(t=b,a):t},a.start=function(){function k(a,c){var d=l(b),e=-1,f=d.length,g;while(++e<f)if(!isNaN(g=d[e][a]))return g;return Math.random()*c}function l(){if(!i){i=[];for(d=0;d<e;++d)i[d]=[];for(d=0;d<f;++d){var a=w[d];i[a.source.index].push(a.target),i[a.target.index].push(a.source)}}return i[b]}var b,d,e=v.length,f=w.length,g=c[0],h=c[1],i,j;for(b=0;b<e;++b)(j=v[b]).index=b,j.weight=0;x=[],y=[];for(b=0;b<f;++b)j=w[b],typeof j.source=="number"&&(j.source=v[j.source]),typeof j.target=="number"&&(j.target=v[j.target]),x[b]=p.call(this,j,b),y[b]=q.call(this,j,b),++j.source.weight,++j.target.weight;for(b=0;b<e;++b)j=v[b],isNaN(j.x)&&(j.x=k("x",g)),isNaN(j.y)&&(j.y=k("y",h)),isNaN(j.px)&&(j.px=j.x),isNaN(j.py)&&(j.py=j.y);z=[];if(typeof r=="function")for(b=0;b<e;++b)z[b]=+r.call(this,v[b],b);else for(b=0;b<e;++b)z[b]=r;return a.resume()},a.resume=function(){return n=.1,d3.timer(B),a},a.stop=function(){return n=0,a},a.drag=function(){d||(d=d3.behavior.drag().on("dragstart",C).on("drag",j).on("dragend",i)),this.on("mouseover.force",g).on("mouseout.force",h).call(d)},a};var e,f;d3.layout.partition=function(){function c(a,b,d,e){var f=a.children;a.x=b,a.y=a.depth*e,a.dx=d,a.dy=e;if(f&&(h=f.length)){var g=-1,h,i,j;d=a.value?d/a.value:0;while(++g<h)c(i=f[g],b,j=i.value*d,e),b+=j}}function d(a){var b=a.children,c=0;if(b&&(f=b.length)){var e=-1,f;while(++e<f)c=Math.max(c,d(b[e]))}return 1+c}function e(e,f){var g=a.call(this,e,f);return c(g[0],0,b[0],b[1]/d(g[0])),g}var a=d3.layout.hierarchy(),b=[1,1];return e.size=function(a){return arguments.length?(b=a,e):b},y(e,a)},d3.layout.pie=function(){function f(g,h){var i=+(typeof c=="function"?c.apply(this,arguments):c),j=(typeof e=="function"?e.apply(this,arguments):e)-c,k=d3.range(g.length);b!=null&&k.sort(function(a,c){return b(g[a],g[c])});var l=g.map(function(b,c){return+a.call(f,b,c)});j/=l.reduce(function(a,b){return a+b},0);var m=k.map(function(a){return{data:g[a],value:d=l[a],startAngle:i,endAngle:i+=d*j}});return g.map(function(a,b){return m[k[b]]})}var a=Number,b=null,c=0,e=2*Math.PI;return f.value=function(b){return arguments.length?(a=b,f):a},f.sort=function(a){return arguments.length?(b=a,f):b},f.startAngle=function(a){return arguments.length?(c=a,f):c},f.endAngle=function(a){return arguments.length?(e=a,f):e},f},d3.layout.stack=function(){function g(h,i){var j=h.map(function(b,c){return a.call(g,b,c)}),k=j.map(function(a,b){return a.map(function(a,b){return[e.call(g,a,b),f.call(g,a,b)]})}),l=b.call(g,k,i);j=d3.permute(j,l),k=d3.permute(k,l);var m=c.call(g,k,i),n=j.length,o=j[0].length,p,q,r;for(q=0;q<o;++q){d.call(g,j[0][q],r=m[q],k[0][q][1]);for(p=1;p<n;++p)d.call(g,j[p][q],r+=k[p-1][q][1],k[p][q][1])}return h}var a=Object,b=q["default"],c=r.zero,d=p,e=n,f=o;return g.values=function(b){return arguments.length?(a=b,g):a},g.order=function(a){return arguments.length?(b=typeof a=="function"?a:q[a],g):b},g.offset=function(a){return arguments.length?(c=typeof a=="function"?a:r[a],g):c},g.x=function(a){return arguments.length?(e=a,g):e},g.y=function(a){return arguments.length?(f=a,g):f},g.out=function(a){return arguments.length?(d=a,g):d},g};var q={"inside-out":function(a){var b=a.length,c,d,e=a.map(s),f=a.map(t),g=d3.range(b).sort(function(a,b){return e[a]-e[b]}),h=0,i=0,j=[],k=[];for(c=0;c<b;++c)d=g[c],h<i?(h+=f[d],j.push(d)):(i+=f[d],k.push(d));return k.reverse().concat(j)},reverse:function(a){return d3.range(a.length).reverse()},"default":function(a){return d3.range(a.length)}},r={silhouette:function(a){var b=a.length,c=a[0].length,d=[],e=0,f,g,h,i=[];for(g=0;g<c;++g){for(f=0,h=0;f<b;f++)h+=a[f][g][1];h>e&&(e=h),d.push(h)}for(g=0;g<c;++g)i[g]=(e-d[g])/2;return i},wiggle:function(a){var b=a.length,c=a[0],d=c.length,e=0,f,g,h,i,j,k,l,m,n,o=[];o[0]=m=n=0;for(g=1;g<d;++g){for(f=0,i=0;f<b;++f)i+=a[f][g][1];for(f=0,j=0,l=c[g][0]-c[g-1][0];f<b;++f){for(h=0,k=(a[f][g][1]-a[f][g-1][1])/(2*l);h<f;++h)k+=(a[h][g][1]-a[h][g-1][1])/l;j+=k*a[f][g][1]}o[g]=m-=i?j/i*l:0,m<n&&(n=m)}for(g=0;g<d;++g)o[g]-=n;return o},expand:function(a){var b=a.length,c=a[0].length,d=1/b,e,f,g,h=[];for(f=0;f<c;++f){for(e=0,g=0;e<b;e++)g+=a[e][f][1];if(g)for(e=0;e<b;e++)a[e][f][1]/=g;else for(e=0;e<b;e++)a[e][f][1]=d}for(f=0;f<c;++f)h[f]=0;return h},zero:function(a){var b=-1,c=a[0].length,d=[];while(++b<c)d[b]=0;return d}};d3.layout.histogram=function(){function e(e,f){var g=[],h=e.map(b,this),i=c.call(this,h,f),j=d.call(this,i,h,f),k,f=-1,l=h.length,m=j.length-1,n=a?1:1/l,o;while(++f<m)k=g[f]=[],k.dx=j[f+1]-(k.x=j[f]),k.y=0;f=-1;while(++f<l)o=h[f],o>=i[0]&&o<=i[1]&&(k=g[d3.bisect(j,o,1,m)-1],k.y+=n,k.push(e[f]));return g}var a=!0,b=Number,c=x,d=v;return e.value=function(a){return arguments.length?(b=a,e):b},e.range=function(a){return arguments.length?(c=d3.functor(a),e):c},e.bins=function(a){return arguments.length?(d=typeof a=="number"?function(b){return w(b,a)}:d3.functor(a),e):d},e.frequency=function(b){return arguments.length?(a=!!b,e):a},e},d3.layout.hierarchy=function(){function e(f,h,i){var j=b.call(g,f,h),k=D?f:{data:f};k.depth=h,i.push(k);if(j&&(m=j.length)){var l=-1,m,n=k.children=[],o=0,p=h+1;while(++l<m)d=e(j[l],p,i),d.parent=k,n.push(d),o+=d.value;a&&n.sort(a),c&&(k.value=o)}else c&&(k.value=+c.call(g,f,h)||0);return k}function f(a,b){var d=a.children,e=0;if(d&&(i=d.length)){var h=-1,i,j=b+1;while(++h<i)e+=f(d[h],j)}else c&&(e=+c.call(g,D?a:a.data,b)||0);return c&&(a.value=e),e}function g(a){var b=[];return e(a,0,b),b}var a=B,b=z,c=A;return g.sort=function(b){return arguments.length?(a=b,g):a},g.children=function(a){return arguments.length?(b=a,g):b},g.value=function(a){return arguments.length?(c=a,g):c},g.revalue=function(a){return f(a,0),a},g};var D=!1;d3.layout.pack=function(){function c(c,d){var e=a.call(this,c,d),f=e[0];f.x=0,f.y=0,L(f);var g=b[0],h=b[1],i=1/Math.max(2*f.r/g,2*f.r/h);return M(f,g/2,h/2,i),e}var a=d3.layout.hierarchy().sort(E),b=[1,1];return c.size=function(a){return arguments.length?(b=a,c):b},y(c,a)},d3.layout.cluster=function(){function d(d,e){var f=a.call(this,d,e),g=f[0],h,i=0,j,k;Z(g,function(a){var c=a.children;c&&c.length?(a.x=P(c),a.y=O(c)):(a.x=h?i+=b(a,h):0,a.y=0,h=a)});var l=Q(g),m=R(g),n=l.x-b(l,m)/2,o=m.x+b(m,l)/2;return Z(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=(1-a.y/g.y)*c[1]}),f}var a=d3.layout.hierarchy().sort(null).value(null),b=S,c=[1,1];return d.separation=function(a){return arguments.length?(b=a,d):b},d.size=function(a){return arguments.length?(c=a,d):c},y(d,a)},d3.layout.tree=function(){function d(d,e){function h(a,c){var d=a.children,e=a._tree;if(d&&(f=d.length)){var f,g=d[0],i,k=g,l,m=-1;while(++m<f)l=d[m],h(l,i),k=j(l,i,k),i=l;$(a);var n=.5*(g._tree.prelim+l._tree.prelim);c?(e.prelim=c._tree.prelim+b(a,c),e.mod=e.prelim-n):e.prelim=n}else c&&(e.prelim=c._tree.prelim+b(a,c))}function i(a,b){a.x=a._tree.prelim+b;var c=a.children;if(c&&(e=c.length)){var d=-1,e;b+=a._tree.mod;while(++d<e)i(c[d],b)}}function j(a,c,d){if(c){var e=a,f=a,g=c,h=a.parent.children[0],i=e._tree.mod,j=f._tree.mod,k=g._tree.mod,l=h._tree.mod,m;while(g=U(g),e=T(e),g&&e)h=T(h),f=U(f),f._tree.ancestor=a,m=g._tree.prelim+k-e._tree.prelim-i+b(g,e),m>0&&(_(ba(g,a,d),a,m),i+=m,j+=m),k+=g._tree.mod,i+=e._tree.mod,l+=h._tree.mod,j+=f._tree.mod;g&&!U(f)&&(f._tree.thread=g,f._tree.mod+=k-j),e&&!T(h)&&(h._tree.thread=e,h._tree.mod+=i-l,d=a)}return d}var f=a.call(this,d,e),g=f[0];Z(g,function(a,b){a._tree={ancestor:a,prelim:0,mod:0,change:0,shift:0,number:b?b._tree.number+1:0}}),h(g),i(g,-g._tree.prelim);var k=V(g,X),l=V(g,W),m=V(g,Y),n=k.x-b(k,l)/2,o=l.x+b(l,k)/2,p=m.depth||1;return Z(g,function(a){a.x=(a.x-n)/(o-n)*c[0],a.y=a.depth/p*c[1],delete a._tree}),f}var a=d3.layout.hierarchy().sort(null).value(null),b=S,c=[1,1];return d.separation=function(a){return arguments.length?(b=a,d):b},d.size=function(a){return arguments.length?(c=a,d):c},y(d,a)},d3.layout.treemap=function(){function i(a,b){var c=-1,d=a.length,e,f;while(++c<d)f=(e=a[c]).value*(b<0?0:b),e.area=isNaN(f)||f<=0?0:f}function j(a){var b=a.children;if(b&&b.length){var c=e(a),d=[],f=b.slice(),g,h=Infinity,k,n=Math.min(c.dx,c.dy),o;i(f,c.dx*c.dy/a.value),d.area=0;while((o=f.length)>0)d.push(g=f[o-1]),d.area+=g.area,(k=l(d,n))<=h?(f.pop(),h=k):(d.area-=d.pop().area,m(d,n,c,!1),n=Math.min(c.dx,c.dy),d.length=d.area=0,h=Infinity);d.length&&(m(d,n,c,!0),d.length=d.area=0),b.forEach(j)}}function k(a){var b=a.children;if(b&&b.length){var c=e(a),d=b.slice(),f,g=[];i(d,c.dx*c.dy/a.value),g.area=0;while(f=d.pop())g.push(f),g.area+=f.area,f.z!=null&&(m(g,f.z?c.dx:c.dy,c,!d.length),g.length=g.area=0);b.forEach(k)}}function l(a,b){var c=a.area,d,e=0,f=Infinity,g=-1,i=a.length;while(++g<i){if(!(d=a[g].area))continue;d<f&&(f=d),d>e&&(e=d)}return c*=c,b*=b,c?Math.max(b*e*h/c,c/(b*f*h)):Infinity}function m(a,c,d,e){var f=-1,g=a.length,h=d.x,i=d.y,j=c?b(a.area/c):0,k;if(c==d.dx){if(e||j>d.dy)j=j?d.dy:0;while(++f<g)k=a[f],k.x=h,k.y=i,k.dy=j,h+=k.dx=j?b(k.area/j):0;k.z=!0,k.dx+=d.x+d.dx-h,d.y+=j,d.dy-=j}else{if(e||j>d.dx)j=j?d.dx:0;while(++f<g)k=a[f],k.x=h,k.y=i,k.dx=j,i+=k.dy=j?b(k.area/j):0;k.z=!1,k.dy+=d.y+d.dy-i,d.x+=j,d.dx-=j}}function n(b){var d=g||a(b),e=d[0];return e.x=0,e.y=0,e.dx=c[0],e.dy=c[1],g&&a.revalue(e),i([e],e.dx*e.dy/e.value),(g?k:j)(e),f&&(g=d),d}var a=d3.layout.hierarchy(),b=Math.round,c=[1,1],d=null,e=bb,f=!1,g,h=.5*(1+Math.sqrt(5));return n.size=function(a){return arguments.length?(c=a,n):c},n.padding=function(a){function b(b){var c=a.call(n,b,b.depth);return c==null?bb(b):bc(b,typeof c=="number"?[c,c,c,c]:c)}function c(b){return bc(b,a)}if(!arguments.length)return d;var f;return e=(d=a)==null?bb:(f=typeof a)==="function"?b:f==="number"?(a=[a,a,a,a],c):c,n},n.round=function(a){return arguments.length?(b=a?Math.round:Number,n):b!=Number},n.sticky=function(a){return arguments.length?(f=a,g=null,n):f},n.ratio=function(a){return arguments.length?(h=a,n):h},y(n,a)}})();
<ide>\ No newline at end of file
<ide><path>src/layout/pie.js
<ide> d3.layout.pie = function() {
<ide> });
<ide>
<ide> // Compute the numeric values for each data element.
<del> var values = data.map(value);
<add> var values = data.map(function(d, i) { return +value.call(pie, d, i); });
<ide>
<ide> // Convert k into a scale factor from value to angle, using the sum.
<ide> k /= values.reduce(function(p, d) { return p + d; }, 0);
| 3
|
Python
|
Python
|
set version to v2.2.0
|
2eb31012e7af1ad752cefdfecace13a257e4282c
|
<ide><path>spacy/about.py
<ide> # fmt: off
<ide> __title__ = "spacy"
<del>__version__ = "2.2.0.dev19"
<add>__version__ = "2.2.0"
<ide> __release__ = True
<ide> __download_url__ = "https://github.com/explosion/spacy-models/releases/download"
<ide> __compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json"
| 1
|
Java
|
Java
|
add basic authentication consumer
|
6bcf6ffb06da7193e93cf990b5d86a7deb208781
|
<ide><path>spring-web/src/main/java/org/springframework/http/HttpHeaders.java
<ide> import java.net.InetSocketAddress;
<ide> import java.net.URI;
<ide> import java.nio.charset.Charset;
<add>import java.nio.charset.CharsetEncoder;
<add>import java.nio.charset.StandardCharsets;
<ide> import java.text.DecimalFormat;
<ide> import java.text.DecimalFormatSymbols;
<ide> import java.time.Instant;
<ide> import java.time.format.DateTimeFormatter;
<ide> import java.time.format.DateTimeParseException;
<ide> import java.util.ArrayList;
<add>import java.util.Base64;
<ide> import java.util.Collection;
<ide> import java.util.Collections;
<ide> import java.util.EnumSet;
<ide> import java.util.Locale;
<ide> import java.util.Map;
<ide> import java.util.Set;
<add>import java.util.function.Consumer;
<add>import java.util.function.Supplier;
<ide> import java.util.regex.Matcher;
<ide> import java.util.regex.Pattern;
<ide> import java.util.stream.Collectors;
<ide> public static HttpHeaders readOnlyHttpHeaders(HttpHeaders headers) {
<ide> return (headers.readOnly ? headers : new HttpHeaders(headers, true));
<ide> }
<ide>
<add> /**
<add> * Returns a {@code HttpHeaders} consumer that adds Basic Authentication.
<add> * More specifically: a consumer that adds an {@linkplain #AUTHORIZATION
<add> * Authorization} header based on the given username and password. Meant
<add> * to be used in combination with
<add> * {@link org.springframework.web.reactive.function.client.WebClient.RequestHeadersSpec#headers(java.util.function.Consumer)}.
<add> * <p>Note that Basic Authentication only supports characters in the
<add> * {@linkplain StandardCharsets#ISO_8859_1 ISO-8859-1} character set.
<add> *
<add> * @param username the username
<add> * @param password the password
<add> * @return a consumer that adds a Basic Authentication header
<add> */
<add> public static Consumer<HttpHeaders> basicAuthenticationConsumer(String username,String password) {
<add> return basicAuthenticationConsumer(() -> username, () -> password);
<add>
<add> }
<add>
<add> /**
<add> * Returns a {@code HttpHeaders} consumer that adds Basic Authentication.
<add> * More specifically: a consumer that adds an {@linkplain #AUTHORIZATION
<add> * Authorization} header based on the given username and password
<add> * suppliers. Meant to be used in combination with
<add> * {@link org.springframework.web.reactive.function.client.WebClient.RequestHeadersSpec#headers(java.util.function.Consumer)}.
<add> * <p>Note that Basic Authentication only supports characters in the
<add> * {@linkplain StandardCharsets#ISO_8859_1 ISO-8859-1} character set.
<add> *
<add> * @param usernameSupplier supplier for the username
<add> * @param passwordSupplier supplier for the password
<add> * @return a consumer that adds a Basic Authentication header
<add> */
<add> public static Consumer<HttpHeaders> basicAuthenticationConsumer(Supplier<String> usernameSupplier,
<add> Supplier<String> passwordSupplier) {
<add>
<add> Assert.notNull(usernameSupplier, "Username Supplier must not be null");
<add> Assert.notNull(passwordSupplier, "Password Supplier must not be null");
<add>
<add> return new BasicAuthenticationConsumer(usernameSupplier, passwordSupplier);
<add> }
<add>
<add>
<add> /**
<add> * @see #basicAuthenticationConsumer
<add> */
<add> private static class BasicAuthenticationConsumer implements Consumer<HttpHeaders> {
<add>
<add> private final Supplier<String> usernameSupplier;
<add>
<add> private final Supplier<String> passwordSupplier;
<add>
<add> public BasicAuthenticationConsumer(Supplier<String> usernameSupplier,
<add> Supplier<String> passwordSupplier) {
<add> this.usernameSupplier = usernameSupplier;
<add> this.passwordSupplier = passwordSupplier;
<add> }
<add>
<add> @Override
<add> public void accept(HttpHeaders httpHeaders) {
<add> String username = this.usernameSupplier.get();
<add> String password = this.passwordSupplier.get();
<add>
<add> Assert.state(username != null, "Supplied username is null");
<add> Assert.state(password != null, "Supplied password is null");
<add>
<add> checkIllegalCharacters(username, password);
<add>
<add> String credentialsString = username + ":" + password;
<add> byte[] credentialBytes = credentialsString.getBytes(StandardCharsets.ISO_8859_1);
<add> byte[] encodedBytes = Base64.getEncoder().encode(credentialBytes);
<add> String encodedCredentials = new String(encodedBytes, StandardCharsets.ISO_8859_1);
<add>
<add> httpHeaders.set(HttpHeaders.AUTHORIZATION, "Basic " + encodedCredentials);
<add> }
<add>
<add> private static void checkIllegalCharacters(String username, String password) {
<add> // Basic authentication only supports ISO 8859-1
<add> CharsetEncoder encoder = StandardCharsets.ISO_8859_1.newEncoder();
<add> if (!encoder.canEncode(username) || !encoder.canEncode(password)) {
<add> throw new IllegalArgumentException(
<add> "Username or password contains characters that cannot be encoded to ISO-8859-1");
<add> }
<add> }
<add>
<add> }
<ide> }
<ide><path>spring-web/src/test/java/org/springframework/http/HttpHeadersTests.java
<ide> import java.time.ZonedDateTime;
<ide> import java.util.ArrayList;
<ide> import java.util.Arrays;
<add>import java.util.Base64;
<ide> import java.util.Calendar;
<ide> import java.util.Collections;
<ide> import java.util.EnumSet;
<ide> import java.util.GregorianCalendar;
<ide> import java.util.List;
<ide> import java.util.Locale;
<ide> import java.util.TimeZone;
<add>import java.util.function.Consumer;
<ide>
<ide> import org.hamcrest.Matchers;
<ide> import org.junit.Test;
<ide>
<del>import static java.time.format.DateTimeFormatter.*;
<del>import static org.hamcrest.Matchers.*;
<add>import static java.time.format.DateTimeFormatter.RFC_1123_DATE_TIME;
<add>import static org.hamcrest.Matchers.is;
<ide> import static org.junit.Assert.*;
<ide>
<ide> /**
<ide> public void firstZonedDateTime() {
<ide> assertTrue(headers.getFirstZonedDateTime(HttpHeaders.DATE).isEqual(date));
<ide> }
<ide>
<add> @Test
<add> public void basicAuthenticationConsumer() throws Exception {
<add>
<add> String username = "foo";
<add> String password = "bar";
<add>
<add> Consumer<HttpHeaders> consumer =
<add> HttpHeaders.basicAuthenticationConsumer(username, password);
<add>
<add> HttpHeaders headers = new HttpHeaders();
<add> assertFalse(headers.containsKey(HttpHeaders.AUTHORIZATION));
<add> consumer.accept(headers);
<add> String authorization = headers.getFirst(HttpHeaders.AUTHORIZATION);
<add> assertNotNull(authorization);
<add> assertTrue(authorization.startsWith("Basic "));
<add> byte[] result = Base64.getDecoder().decode(authorization.substring(6).getBytes(StandardCharsets.ISO_8859_1));
<add> assertEquals("foo:bar", new String(result, StandardCharsets.ISO_8859_1));
<add>
<add> }
<ide> }
| 2
|
Python
|
Python
|
correct infinite loop in browser mode issue#547
|
558d7a22a10eef03265fb30e0f8d941ac565f386
|
<ide><path>glances/core/glances_client.py
<ide> def serve_forever(self):
<ide>
<ide> # Export stats using export modules
<ide> self.stats.export(self.stats)
<del> finally:
<add> except Exception as e:
<add> logger.critical(e)
<ide> self.end()
<ide>
<ide> return self.client_mode
<ide><path>glances/outputs/glances_curses.py
<ide> def update(self, servers_list):
<ide> servers_list: Dict of dict with servers stats
<ide> """
<ide> # Flush display
<add> logger.debug("Servers list: {}".format(servers_list))
<ide> self.flush(servers_list)
<ide>
<ide> # Wait
| 2
|
Ruby
|
Ruby
|
add regression test for broken filter
|
bf1fe429186505bbdfb04f4d0a845d1f52581f84
|
<ide><path>railties/test/application/test_runner_test.rb
<ide> class PostTest < ActiveSupport::TestCase
<ide> assert true
<ide> end
<ide>
<del> test "second filter" do
<del> puts 'PostTest:SecondFilter'
<add> test "line filter does not run this" do
<ide> assert true
<ide> end
<ide>
<del> test "line filter does not run this" do
<add> test "second filter" do
<add> puts 'PostTest:SecondFilter'
<ide> assert true
<ide> end
<ide> end
<ide> RUBY
<ide>
<del> run_test_command("test/models/post_test.rb:4:9").tap do |output|
<add> run_test_command("test/models/post_test.rb:4:13").tap do |output|
<ide> assert_match "PostTest:FirstFilter", output
<ide> assert_match "PostTest:SecondFilter", output
<ide> assert_match "2 runs, 2 assertions", output
| 1
|
Text
|
Text
|
add docs for global font options
|
9cf31da4c92fc6200c4983127700f08f29a8c161
|
<ide><path>docs/00-Getting-Started.md
<ide> hover |-|-|-
<ide> *hover*.animationDuration | Number | 400 | Duration in milliseconds it takes to animate hover style changes
<ide> onClick | Function | null | Called if the event is of type 'mouseup' or 'click'. Called in the context of the chart and passed an array of active elements
<ide> defaultColor | Color | 'rgba(0,0,0,0.1)' |
<add>defaultFontColor | Color | '#666' | Default font color for all text
<add>defaultFontFamily | String | "'Helvetica Neue', 'Helvetica', 'Arial', sans-serif" | Default font family for all text
<add>defaultFontSize | Number | 12 | Default font size (in px) for text. Does not apply to radialLinear scale point labels
<add>defaultFontStyle | String | 'normal' | Default font style. Does not apply to tooltip title or footer. Does not apply to chart title
<ide> legendCallback | Function | ` function (chart) { // the chart object to generate a legend from. }` | Function to generate a legend. Default implementation returns an HTML string.
<ide>
<ide> The global options for the chart title is defined in `Chart.defaults.global.title`
| 1
|
Python
|
Python
|
use exception chaining
|
6a4bf9eec13e035cf10ecc16e462049b4c967e41
|
<ide><path>src/flask/app.py
<ide> def async_to_sync(
<ide> except ImportError:
<ide> raise RuntimeError(
<ide> "Install Flask with the 'async' extra in order to use async views."
<del> )
<add> ) from None
<ide>
<ide> # Check that Werkzeug isn't using its fallback ContextVar class.
<ide> if ContextVar.__module__ == "werkzeug.local":
<ide> def make_response(self, rv: ResponseReturnValue) -> Response:
<ide> " response. The return type must be a string,"
<ide> " dict, tuple, Response instance, or WSGI"
<ide> f" callable, but it was a {type(rv).__name__}."
<del> ).with_traceback(sys.exc_info()[2])
<add> ).with_traceback(sys.exc_info()[2]) from None
<ide> else:
<ide> raise TypeError(
<ide> "The view function did not return a valid"
<ide><path>src/flask/cli.py
<ide> def find_best_app(script_info, module):
<ide>
<ide> if isinstance(app, Flask):
<ide> return app
<del> except TypeError:
<add> except TypeError as e:
<ide> if not _called_with_wrong_args(app_factory):
<ide> raise
<add>
<ide> raise NoAppException(
<ide> f"Detected factory {attr_name!r} in module {module.__name__!r},"
<ide> " but could not call it without arguments. Use"
<ide> f" \"FLASK_APP='{module.__name__}:{attr_name}(args)'\""
<ide> " to specify arguments."
<del> )
<add> ) from e
<ide>
<ide> raise NoAppException(
<ide> "Failed to find Flask application or factory in module"
<ide> def find_app_by_string(script_info, module, app_name):
<ide> except SyntaxError:
<ide> raise NoAppException(
<ide> f"Failed to parse {app_name!r} as an attribute name or function call."
<del> )
<add> ) from None
<ide>
<ide> if isinstance(expr, ast.Name):
<ide> name = expr.id
<ide> def find_app_by_string(script_info, module, app_name):
<ide> # message with the full expression instead.
<ide> raise NoAppException(
<ide> f"Failed to parse arguments as literal values: {app_name!r}."
<del> )
<add> ) from None
<ide> else:
<ide> raise NoAppException(
<ide> f"Failed to parse {app_name!r} as an attribute name or function call."
<ide> )
<ide>
<ide> try:
<ide> attr = getattr(module, name)
<del> except AttributeError:
<add> except AttributeError as e:
<ide> raise NoAppException(
<ide> f"Failed to find attribute {name!r} in {module.__name__!r}."
<del> )
<add> ) from e
<ide>
<ide> # If the attribute is a function, call it with any args and kwargs
<ide> # to get the real application.
<ide> if inspect.isfunction(attr):
<ide> try:
<ide> app = call_factory(script_info, attr, args, kwargs)
<del> except TypeError:
<add> except TypeError as e:
<ide> if not _called_with_wrong_args(attr):
<ide> raise
<ide>
<ide> raise NoAppException(
<ide> f"The factory {app_name!r} in module"
<ide> f" {module.__name__!r} could not be called with the"
<ide> " specified arguments."
<del> )
<add> ) from e
<ide> else:
<ide> app = attr
<ide>
<ide> def locate_app(script_info, module_name, app_name, raise_if_not_found=True):
<ide>
<ide> try:
<ide> __import__(module_name)
<del> except ImportError:
<add> except ImportError as e:
<ide> # Reraise the ImportError if it occurred within the imported module.
<ide> # Determine this by checking whether the trace has a depth > 1.
<ide> if sys.exc_info()[2].tb_next:
<ide> raise NoAppException(
<del> f"While importing {module_name!r}, an ImportError was"
<del> f" raised:\n\n{traceback.format_exc()}"
<del> )
<add> f"While importing {module_name!r}, an ImportError was raised."
<add> ) from e
<ide> elif raise_if_not_found:
<del> raise NoAppException(f"Could not import {module_name!r}.")
<add> raise NoAppException(f"Could not import {module_name!r}.") from e
<ide> else:
<ide> return
<ide>
<ide> def convert(self, value, param, ctx):
<ide> "Using ad-hoc certificates requires the cryptography library.",
<ide> ctx,
<ide> param,
<del> )
<add> ) from None
<ide>
<ide> return value
<ide>
<ide><path>src/flask/debughelpers.py
<ide> class newcls(oldcls):
<ide> def __getitem__(self, key):
<ide> try:
<ide> return oldcls.__getitem__(self, key)
<del> except KeyError:
<add> except KeyError as e:
<ide> if key not in request.form:
<ide> raise
<del> raise DebugFilesKeyError(request, key)
<add>
<add> raise DebugFilesKeyError(request, key) from e
<ide>
<ide> newcls.__name__ = oldcls.__name__
<ide> newcls.__module__ = oldcls.__module__
<ide><path>src/flask/scaffold.py
<ide> def register_error_handler(
<ide> f"'{code_or_exception}' is not a recognized HTTP error"
<ide> " code. Use a subclass of HTTPException with that code"
<ide> " instead."
<del> )
<add> ) from None
<ide>
<ide> self.error_handler_spec[None][code][exc_class] = t.cast(
<ide> "ErrorHandlerCallable[Exception]", f
<ide><path>src/flask/signals.py
<ide> def _fail(self, *args: t.Any, **kwargs: t.Any) -> t.Any:
<ide> raise RuntimeError(
<ide> "Signalling support is unavailable because the blinker"
<ide> " library is not installed."
<del> )
<add> ) from None
<ide>
<ide> connect = connect_via = connected_to = temporarily_connected_to = _fail
<ide> disconnect = _fail
| 5
|
Text
|
Text
|
fix links in docs
|
a4cfe5c57369441bb6377d7cfb790302e13dbd10
|
<ide><path>README.md
<ide> the community.
<ide>
<ide> # React Native [](https://magnum.travis-ci.com/facebook/react-native)
<ide>
<del>Our first React Native implementation is `ReactKit`, targeting iOS. We are also
<del>working on an Android implementation which we will release later. `ReactKit`
<add>Our first React Native implementation is `React`, targeting iOS. We are also
<add>working on an Android implementation which we will release later. `React`
<ide> apps are built using the [React JS](https://github.com/facebook/react) framework, and render directly to
<ide> native UIKit elements using a fully asynchronous architecture. There is no
<ide> browser and no HTML. We have picked what we think is the best set of features
<ide><path>docs/NativeModulesIOS.md
<ide> CalendarManager.addEvent('Birthday Party', {
<ide>
<ide> > **NOTE**: About array and map
<ide> >
<del>> React Native doesn't provide any guarantees about the types of values in these structures. Your native module might expect array of strings, but if JavaScript calls your method with an array that contains number and string you'll get `NSArray` with `NSNumber` and `NSString`. It's developer's responsibility to check array/map values types (see [`RCTConvert`](https://github.com/facebook/react-native/blob/master/ReactKit/Base/RCTConvert.h) for helper methods).
<add>> React Native doesn't provide any guarantees about the types of values in these structures. Your native module might expect array of strings, but if JavaScript calls your method with an array that contains number and string you'll get `NSArray` with `NSNumber` and `NSString`. It's developer's responsibility to check array/map values types (see [`RCTConvert`](https://github.com/facebook/react-native/blob/master/React/Base/RCTConvert.h) for helper methods).
<ide>
<ide> # Callbacks
<ide>
<ide> CalendarManager.findEvents((error, events) => {
<ide> })
<ide> ```
<ide>
<del>Native module is supposed to invoke callback only once. It can, however, store the callback as an ivar and invoke it later. This pattern is often used to wrap iOS APIs that require delegate. See [`RCTAlertManager`](https://github.com/facebook/react-native/blob/master/ReactKit/Modules/RCTAlertManager.m).
<add>Native module is supposed to invoke callback only once. It can, however, store the callback as an ivar and invoke it later. This pattern is often used to wrap iOS APIs that require delegate. See [`RCTAlertManager`](https://github.com/facebook/react-native/blob/master/React/Modules/RCTAlertManager.m).
<ide>
<del>If you want to pass error-like object to JavaScript, use `RCTMakeError` from [`RCTUtils.h`](https://github.com/facebook/react-native/blob/master/ReactKit/Base/RCTUtils.h).
<add>If you want to pass error-like object to JavaScript, use `RCTMakeError` from [`RCTUtils.h`](https://github.com/facebook/react-native/blob/master/React/Base/RCTUtils.h).
<ide>
<ide> ## Implementing native module
<ide>
| 2
|
Python
|
Python
|
deprecate unused argument
|
2b3eae5f08ab7669c0003027f715abac422bc153
|
<ide><path>keras/layers/core.py
<ide> def get_config(self):
<ide> return dict(list(base_config.items()) + list(config.items()))
<ide>
<ide> @classmethod
<del> def from_config(cls, config, custom_objects={}):
<add> def from_config(cls, config, custom_objects=None):
<ide> # Insert custom objects into globals.
<ide> if custom_objects:
<ide> globs = globals().copy()
<ide> class Highway(Layer):
<ide> or alternatively, Theano function to use for weights
<ide> initialization. This parameter is only relevant
<ide> if you don't pass a `weights` argument.
<del> transform_bias: value for the bias to take on initially (default -2)
<ide> activation: name of activation function to use
<ide> (see [activations](../activations.md)),
<ide> or alternatively, elementwise Theano function.
<ide> class Highway(Layer):
<ide> '''
<ide> def __init__(self,
<ide> init='glorot_uniform',
<del> transform_bias=-2,
<ide> activation=None,
<ide> weights=None,
<ide> W_regularizer=None,
<ide> def __init__(self,
<ide> bias=True,
<ide> input_dim=None,
<ide> **kwargs):
<add> if 'transform_bias' in kwargs:
<add> kwargs.pop('transform_bias')
<add> warnings.warn('`transform_bias` argument is deprecated and '
<add> 'will be removed after 5/2017.')
<ide> self.init = initializations.get(init)
<del> self.transform_bias = transform_bias
<ide> self.activation = activations.get(activation)
<ide>
<ide> self.W_regularizer = regularizers.get(W_regularizer)
<ide> def call(self, x, mask=None):
<ide>
<ide> def get_config(self):
<ide> config = {'init': self.init.__name__,
<del> 'transform_bias': self.transform_bias,
<ide> 'activation': self.activation.__name__,
<ide> 'W_regularizer': self.W_regularizer.get_config() if self.W_regularizer else None,
<ide> 'b_regularizer': self.b_regularizer.get_config() if self.b_regularizer else None,
| 1
|
Text
|
Text
|
add advantages for arrow examples
|
f53940c5c0f05899547127cb43b44c43962f266c
|
<ide><path>client/src/guide/english/javascript/es6/arrow-functions/index.md
<ide> let newOneWithOneParam = a => {
<ide> }
<ide> ```
<ide>
<del>An incredible advantage of the arrows function is that you can not rebind an arrow function. It will always be called with the context in which it was defined. Just use a normal function.
<del>```javascript
<del>// Old Syntax
<del>axios.get(url).then(function(response) {
<del> this.data = response.data;
<del>}).bind(this);
<del>
<del>// New Syntax
<del>axios.get(url).then(response => {
<del> this.data = response.data;
<del>});
<add>### advantages of arrow sytnax
<add>1. It will always be called with the context in which it was defined. Therefore there is no existence of the this keyword.
<ide>
<add>```javascript
<add> // Old Syntax
<add> axios.get(url).then(function(response) {
<add> this.data = response.data;
<add> }).bind(this);
<add>
<add> // New Syntax
<add> axios.get(url).then(response => {
<add> this.data = response.data;
<add> });
<add> ```
<add> * before arrow functions each function had it's own this contect
<add>2. shorter more readable functions
<add>```javascript
<add> // Old Syntax
<add> const sumValues = function (a, b) {
<add> return a+b;
<add> }
<add>
<add> // New Syntax
<add> const sumValues = (a,b) => a+b;
<ide> ```
<add> * since it's a one line return we can ommit the brackets
<ide>
<ide>
<del>
<del>I don’t think I need to give an explanation for this. It's straightforward.
| 1
|
Javascript
|
Javascript
|
dump the tree structure to a string for debugging
|
ba7275dc4fb6e21f44977bfe09c5432af868bd9e
|
<ide><path>src/native-watcher-registry.js
<ide> class RegistryTree {
<ide> return this.root
<ide> }
<ide>
<add> print () {
<add> return this.root.print()
<add> }
<add>
<ide> }
<ide>
<ide> // Private: Non-leaf node in a tree used by the {NativeWatcherRegistry} to cover the allocated {Watcher} instances with
<ide> class RegistryNode {
<ide> }
<ide> return results
<ide> }
<add>
<add> print (indent = 0) {
<add> let spaces = ''
<add> for (let i = 0; i < indent; i++) {
<add> spaces += ' '
<add> }
<add>
<add> let result = ''
<add> for (const p of Object.keys(this.children)) {
<add> result += `${spaces}${p}\n${this.children[p].print(indent + 2)}`
<add> }
<add> return result
<add> }
<ide> }
<ide>
<ide> // Private: Leaf node within a {NativeWatcherRegistry} tree. Represents a directory that is covered by a
<ide> class RegistryWatcherNode {
<ide> leaves (prefix) {
<ide> return [{node: this, path: prefix}]
<ide> }
<add>
<add> print (indent = 0) {
<add> let result = ''
<add> for (let i = 0; i < indent; i++) {
<add> result += ' '
<add> }
<add> result += '[watcher'
<add> if (this.childPaths.size > 0) {
<add> result += ` +${this.childPaths.size}`
<add> }
<add> result += ']\n'
<add>
<add> return result
<add> }
<ide> }
<ide>
<ide> // Private: A {RegisteryNode} traversal result that's returned when neither a directory, its children, nor its parents
| 1
|
Mixed
|
Python
|
add featureextractor from thinc
|
a22215f427308c3d311b2e1de7fe0e690ed78215
|
<ide><path>spacy/ml/featureextractor.py
<add>from typing import List, Union, Callable, Tuple
<add>from thinc.types import Ints2d, Doc
<add>from thinc.api import Model, registry
<add>
<add>
<add>
<add>@registry.layers("spacy.FeatureExtractor.v1")
<add>def FeatureExtractor(columns: List[Union[int, str]]) -> Model[List[Doc], List[Ints2d]]:
<add> return Model("extract_features", forward, attrs={"columns": columns})
<add>
<add>
<add>def forward(model: Model[List[Doc], List[Ints2d]], docs, is_train: bool) -> Tuple[List[Ints2d], Callable]:
<add> columns = model.attrs["columns"]
<add> features: List[Ints2d] = []
<add> for doc in docs:
<add> if hasattr(doc, "to_array"):
<add> attrs = doc.to_array(columns)
<add> else:
<add> attrs = doc.doc.to_array(columns)[doc.start : doc.end]
<add> if attrs.ndim == 1:
<add> attrs = attrs.reshape((attrs.shape[0], 1))
<add> features.append(model.ops.asarray2i(attrs, dtype="uint64"))
<add>
<add> backprop: Callable[[List[Ints2d]], List] = lambda d_features: []
<add> return features, backprop
<ide><path>spacy/ml/models/textcat.py
<ide> from thinc.api import chain, concatenate, clone, Dropout, ParametricAttention
<ide> from thinc.api import SparseLinear, Softmax, softmax_activation, Maxout, reduce_sum
<ide> from thinc.api import HashEmbed, with_array, with_cpu, uniqued
<del>from thinc.api import Relu, residual, expand_window, FeatureExtractor
<add>from thinc.api import Relu, residual, expand_window
<ide>
<ide> from ...attrs import ID, ORTH, PREFIX, SUFFIX, SHAPE, LOWER
<ide> from ...util import registry
<ide> from ..extract_ngrams import extract_ngrams
<ide> from ..staticvectors import StaticVectors
<add>from ..featureextractor import FeatureExtractor
<ide>
<ide>
<ide> @registry.architectures.register("spacy.TextCatCNN.v1")
<ide><path>spacy/ml/models/tok2vec.py
<ide> from typing import Optional, List
<add>from thinc.types import Floats2d
<ide> from thinc.api import chain, clone, concatenate, with_array, with_padded
<del>from thinc.api import Model, noop, list2ragged, ragged2list
<del>from thinc.api import FeatureExtractor, HashEmbed
<add>from thinc.api import Model, noop, list2ragged, ragged2list, HashEmbed
<ide> from thinc.api import expand_window, residual, Maxout, Mish, PyTorchLSTM
<del>from thinc.types import Floats2d
<ide>
<ide> from ...tokens import Doc
<ide> from ...util import registry
<ide> from ...ml import _character_embed
<ide> from ..staticvectors import StaticVectors
<add>from ..featureextractor import FeatureExtractor
<ide> from ...pipeline.tok2vec import Tok2VecListener
<ide> from ...attrs import ORTH, NORM, PREFIX, SUFFIX, SHAPE
<ide>
<ide><path>website/docs/api/architectures.md
<ide> argument that connects to the shared `tok2vec` component in the pipeline.
<ide> Construct an embedding layer that separately embeds a number of lexical
<ide> attributes using hash embedding, concatenates the results, and passes it through
<ide> a feed-forward subnetwork to build mixed representations. The features used are
<del>the `NORM`, `PREFIX`, `SUFFIX` and `SHAPE`, which can have varying definitions
<del>depending on the `Vocab` of the `Doc` object passed in. Vectors from pretrained
<del>static vectors can also be incorporated into the concatenated representation.
<add>the `NORM`, `PREFIX`, `SUFFIX` and `SHAPE`, and they are extracted with a
<add>[FeatureExtractor](/api/architectures#FeatureExtractor) layer. Vectors from pretrained static
<add>vectors can also be incorporated into the concatenated representation.
<ide>
<ide> | Name | Description |
<ide> | ------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
<ide> on [static vectors](/usage/embeddings-transformers#static-vectors) for details.
<ide> | `key_attr` | Defaults to `"ORTH"`. ~~str~~ |
<ide> | **CREATES** | The model using the architecture. ~~Model[List[Doc], Ragged]~~ |
<ide>
<add>### spacy.FeatureExtractor.v1 {#FeatureExtractor}
<add>
<add>> #### Example config
<add>>
<add>> ```ini
<add>> [model]
<add>> @architectures = "spacy.FeatureExtractor.v1"
<add>> columns = ["NORM", "PREFIX", "SUFFIX", "SHAPE", "ORTH"]
<add>> ```
<add>
<add>Extract arrays of input features from [`Doc`](/api/doc) objects. Expects a list
<add>of feature names to extract, which should refer to token attributes.
<add>
<add>| Name | Description |
<add>| ----------- | ------------------------------------------------------------------------ |
<add>| `columns` | The token attributes to extract. ~~List[Union[int, str]]~~ |
<add>| **CREATES** | The created feature extraction layer. ~~Model[List[Doc], List[Ints2d]]~~ |
<add>
<ide> ## Transformer architectures {#transformers source="github.com/explosion/spacy-transformers/blob/master/spacy_transformers/architectures.py"}
<ide>
<ide> The following architectures are provided by the package
<ide><path>website/docs/usage/embeddings-transformers.md
<ide> vectors, but combines them via summation with a smaller table of learned
<ide> embeddings.
<ide>
<ide> ```python
<del>from thinc.api import add, chain, remap_ids, Embed, FeatureExtractor
<add>from thinc.api import add, chain, remap_ids, Embed
<ide> from spacy.ml.staticvectors import StaticVectors
<add>from spacy.ml.featureextractor import FeatureExtractor
<ide> from spacy.util import registry
<ide>
<ide> @registry.architectures("my_example.MyEmbedding.v1")
| 5
|
Javascript
|
Javascript
|
use css.supports instead of '..' in ....style
|
8bb96db3a0eff104ff0517fa810f0110e54c581a
|
<ide><path>extensions/chromium/contentscript.js
<ide> WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<ide> See the License for the specific language governing permissions and
<ide> limitations under the License.
<ide> */
<del>/* globals chrome */
<add>/* globals chrome, CSS */
<ide>
<ide> 'use strict';
<ide>
<ide> if (typeof Element.prototype.createShadowRoot !== 'undefined') {
<ide>
<ide> // Only observe the document if we can make use of Shadow DOM.
<ide> if (createShadowRoot) {
<del> if ('animation' in document.documentElement.style) {
<add> if (CSS.supports('animation', '0s')) {
<ide> document.addEventListener('animationstart', onAnimationStart, true);
<ide> } else {
<ide> document.addEventListener('webkitAnimationStart', onAnimationStart, true);
| 1
|
Ruby
|
Ruby
|
fix url reassign
|
7a75bbd85a6abc9a5828d7d48aa13a27a43432e4
|
<ide><path>Library/Homebrew/download_strategy.rb
<ide> def fetch
<ide> urls = actual_urls
<ide> unless urls.empty?
<ide> ohai "Downloading from #{urls.last}"
<del> @url = urls.last
<ide> if !ENV["HOMEBREW_NO_INSECURE_REDIRECT"].nil? && @url.start_with?("https://") &&
<ide> urls.any? { |u| !u.start_with? "https://" }
<ide> raise "HTTPS to HTTP redirect detected & HOMEBREW_NO_INSECURE_REDIRECT is set."
<ide> end
<add> @url = urls.last
<ide> end
<ide>
<ide> had_incomplete_download = temporary_path.exist?
| 1
|
Javascript
|
Javascript
|
add clock support in node
|
68c57aa9c9d90ef76b0f7896b64476a24c6b484a
|
<ide><path>src/core/Clock.js
<ide> Object.assign( Clock.prototype, {
<ide>
<ide> start: function () {
<ide>
<del> this.startTime = ( performance || Date ).now();
<add> this.startTime = ( typeof performance === 'undefined' ? Date : performance ).now();
<ide>
<ide> this.oldTime = this.startTime;
<ide> this.elapsedTime = 0;
<ide> Object.assign( Clock.prototype, {
<ide>
<ide> if ( this.running ) {
<ide>
<del> var newTime = ( performance || Date ).now();
<add> var newTime = ( typeof performance === 'undefined' ? Date : performance ).now();
<ide>
<ide> diff = ( newTime - this.oldTime ) / 1000;
<ide> this.oldTime = newTime;
| 1
|
Javascript
|
Javascript
|
add rel attr to search hits
|
e889b599fbabdc62be31d9b42d549d787f7ee91c
|
<ide><path>client/src/components/search/searchPage/SearchPageHits.js
<ide> const AllHits = connectAutoComplete(({ hits, currentRefinement }) => {
<ide> <a
<ide> href={buildUrl(index, result)}
<ide> key={result.objectID}
<add> rel='noopener noreferrer'
<ide> target='_blank'
<ide> >
<ide> <li className='ais-Hits-item dataset-node'>
| 1
|
Python
|
Python
|
fix notimplementederror in data_utils.py
|
eafdffff75a16801860a27281c93267b1d445177
|
<ide><path>keras/utils/data_utils.py
<ide> class SequenceEnqueuer(object):
<ide>
<ide> @abstractmethod
<ide> def is_running(self):
<del> raise NotImplemented
<add> raise NotImplementedError
<ide>
<ide> @abstractmethod
<ide> def start(self, workers=1, max_queue_size=10):
<ide> def start(self, workers=1, max_queue_size=10):
<ide> max_queue_size: queue size
<ide> (when full, threads could block on `put()`).
<ide> """
<del> raise NotImplemented
<add> raise NotImplementedError
<ide>
<ide> @abstractmethod
<ide> def stop(self, timeout=None):
<ide> def stop(self, timeout=None):
<ide> # Arguments
<ide> timeout: maximum time to wait on thread.join()
<ide> """
<del> raise NotImplemented
<add> raise NotImplementedError
<ide>
<ide> @abstractmethod
<ide> def get(self):
<ide> def get(self):
<ide> Generator yielding tuples `(inputs, targets)`
<ide> or `(inputs, targets, sample_weights)`.
<ide> """
<del> raise NotImplemented
<add> raise NotImplementedError
<ide>
<ide>
<ide> class OrderedEnqueuer(SequenceEnqueuer):
| 1
|
Go
|
Go
|
update reapnode interval
|
99f84ff5a7a750af9df8182ac6d2a3f5f5b77afc
|
<ide><path>libnetwork/controller.go
<ide> func (c *controller) NewNetwork(networkType, name string, id string, options ...
<ide> defer func() {
<ide> if err != nil {
<ide> if e := c.deleteFromStore(epCnt); e != nil {
<del> log.Warnf("couldnt rollback from store, epCnt %v on failure (%v): %v", epCnt, err, e)
<add> log.Warnf("could not rollback from store, epCnt %v on failure (%v): %v", epCnt, err, e)
<ide> }
<ide> }
<ide> }()
<ide><path>libnetwork/networkdb/cluster.go
<ide> func (nDB *NetworkDB) reapDeadNode() {
<ide> defer nDB.Unlock()
<ide> for id, n := range nDB.failedNodes {
<ide> if n.reapTime > 0 {
<del> n.reapTime -= reapPeriod
<add> n.reapTime -= nodeReapPeriod
<ide> continue
<ide> }
<ide> logrus.Debugf("Removing failed node %v from gossip cluster", n.Name)
| 2
|
Javascript
|
Javascript
|
handle ngclassodd/even affecting the same classes
|
c9677920d462046710fc72ca422ab7400f551d2e
|
<ide><path>src/ng/directive/ngClass.js
<ide>
<ide> function classDirective(name, selector) {
<ide> name = 'ngClass' + name;
<del> return function() {
<add> return ['$animate', function($animate) {
<ide> return {
<ide> restrict: 'AC',
<ide> link: function(scope, element, attr) {
<ide> function classDirective(name, selector) {
<ide> // jshint bitwise: false
<ide> var mod = $index & 1;
<ide> if (mod !== old$index & 1) {
<del> var classes = flattenClasses(scope.$eval(attr[name]));
<add> var classes = arrayClasses(scope.$eval(attr[name]));
<ide> mod === selector ?
<del> attr.$addClass(classes) :
<del> attr.$removeClass(classes);
<add> addClasses(classes) :
<add> removeClasses(classes);
<ide> }
<ide> });
<ide> }
<ide>
<add> function addClasses(classes) {
<add> var newClasses = digestClassCounts(classes, 1);
<add> attr.$addClass(newClasses);
<add> }
<add>
<add> function removeClasses(classes) {
<add> var newClasses = digestClassCounts(classes, -1);
<add> attr.$removeClass(newClasses);
<add> }
<add>
<add> function digestClassCounts (classes, count) {
<add> var classCounts = element.data('$classCounts') || {};
<add> var classesToUpdate = [];
<add> forEach(classes, function (className) {
<add> if (count > 0 || classCounts[className]) {
<add> classCounts[className] = (classCounts[className] || 0) + count;
<add> if (classCounts[className] === +(count > 0)) {
<add> classesToUpdate.push(className);
<add> }
<add> }
<add> });
<add> element.data('$classCounts', classCounts);
<add> return classesToUpdate.join(' ');
<add> }
<add>
<add> function updateClasses (oldClasses, newClasses) {
<add> var toAdd = arrayDifference(newClasses, oldClasses);
<add> var toRemove = arrayDifference(oldClasses, newClasses);
<add> toRemove = digestClassCounts(toRemove, -1);
<add> toAdd = digestClassCounts(toAdd, 1);
<add>
<add> if (toAdd.length === 0) {
<add> $animate.removeClass(element, toRemove);
<add> } else if (toRemove.length === 0) {
<add> $animate.addClass(element, toAdd);
<add> } else {
<add> $animate.setClass(element, toAdd, toRemove);
<add> }
<add> }
<ide>
<ide> function ngClassWatchAction(newVal) {
<ide> if (selector === true || scope.$index % 2 === selector) {
<del> var newClasses = flattenClasses(newVal || '');
<del> if(!oldVal) {
<del> attr.$addClass(newClasses);
<del> } else if(!equals(newVal,oldVal)) {
<del> attr.$updateClass(newClasses, flattenClasses(oldVal));
<add> var newClasses = arrayClasses(newVal || []);
<add> if (!oldVal) {
<add> addClasses(newClasses);
<add> } else if (!equals(newVal,oldVal)) {
<add> var oldClasses = arrayClasses(oldVal);
<add> updateClasses(oldClasses, newClasses);
<ide> }
<ide> }
<ide> oldVal = copy(newVal);
<ide> }
<add> }
<add> };
<ide>
<add> function arrayDifference(tokens1, tokens2) {
<add> var values = [];
<ide>
<del> function flattenClasses(classVal) {
<del> if(isArray(classVal)) {
<del> return classVal.join(' ');
<del> } else if (isObject(classVal)) {
<del> var classes = [], i = 0;
<del> forEach(classVal, function(v, k) {
<del> if (v) {
<del> classes.push(k);
<del> }
<del> });
<del> return classes.join(' ');
<del> }
<del>
<del> return classVal;
<add> outer:
<add> for(var i = 0; i < tokens1.length; i++) {
<add> var token = tokens1[i];
<add> for(var j = 0; j < tokens2.length; j++) {
<add> if(token == tokens2[j]) continue outer;
<ide> }
<add> values.push(token);
<ide> }
<del> };
<del> };
<add> return values;
<add> }
<add>
<add> function arrayClasses (classVal) {
<add> if (isArray(classVal)) {
<add> return classVal;
<add> } else if (isString(classVal)) {
<add> return classVal.split(' ');
<add> } else if (isObject(classVal)) {
<add> var classes = [], i = 0;
<add> forEach(classVal, function(v, k) {
<add> if (v) {
<add> classes.push(k);
<add> }
<add> });
<add> return classes;
<add> }
<add> return classVal;
<add> }
<add> }];
<ide> }
<ide>
<ide> /**
<ide><path>test/ng/directive/ngClassSpec.js
<ide> describe('ngClass', function() {
<ide> }));
<ide>
<ide>
<add> it("should allow ngClassOdd/Even on the same element with overlapping classes", inject(function($rootScope, $compile, $animate) {
<add> var className;
<add>
<add> element = $compile('<ul><li ng-repeat="i in [0,1,2]" ng-class-odd="\'same odd\'" ng-class-even="\'same even\'"></li><ul>')($rootScope);
<add> $rootScope.$digest();
<add> var e1 = jqLite(element[0].childNodes[1]);
<add> var e2 = jqLite(element[0].childNodes[5]);
<add> expect(e1.hasClass('same')).toBeTruthy();
<add> expect(e1.hasClass('odd')).toBeTruthy();
<add> expect(e2.hasClass('same')).toBeTruthy();
<add> expect(e2.hasClass('odd')).toBeTruthy();
<add> }));
<add>
<ide> it('should allow both ngClass and ngClassOdd/Even with multiple classes', inject(function($rootScope, $compile) {
<ide> element = $compile('<ul>' +
<ide> '<li ng-repeat="i in [0,1]" ng-class="[\'A\', \'B\']" ' +
| 2
|
Text
|
Text
|
add .model deprecation to release notes
|
8b2052172cf7138203e683731c30bd279c6e722a
|
<ide><path>docs/api-guide/generic-views.md
<ide> The following attributes control the basic view behavior.
<ide> * `lookup_field` - The model field that should be used to for performing object lookup of individual model instances. Defaults to `'pk'`. Note that when using hyperlinked APIs you'll need to ensure that *both* the API views *and* the serializer classes set the lookup fields if you need to use a custom value.
<ide> * `lookup_url_kwarg` - The URL keyword argument that should be used for object lookup. The URL conf should include a keyword argument corresponding to this value. If unset this defaults to using the same value as `lookup_field`.
<ide>
<del>**Shortcuts**:
<del>
<del>* `model` - This shortcut may be used instead of setting either (or both) of the `queryset`/`serializer_class` attributes, although using the explicit style is generally preferred. If used instead of `serializer_class`, then `DEFAULT_MODEL_SERIALIZER_CLASS` setting will determine the base serializer class. Note that `model` is only ever used for generating a default queryset or serializer class - the `queryset` and `serializer_class` attributes are always preferred if provided.
<del>
<ide> **Pagination**:
<ide>
<ide> The following attributes are used to control pagination when used with list views.
<ide> The following attributes are used to control pagination when used with list view
<ide>
<ide> * `filter_backends` - A list of filter backend classes that should be used for filtering the queryset. Defaults to the same value as the `DEFAULT_FILTER_BACKENDS` setting.
<ide>
<add>**Deprecated attributes**:
<add>
<add>* `model` - This shortcut may be used instead of setting either (or both) of the `queryset`/`serializer_class` attributes. The explicit style is preferred over the `.model` shortcut, and usage of this attribute is now deprecated.
<add>
<ide> ### Methods
<ide>
<ide> **Base methods**:
<ide><path>docs/api-guide/settings.md
<ide> Default: `'rest_framework.negotiation.DefaultContentNegotiation'`
<ide>
<ide> *The following settings control the behavior of the generic class based views.*
<ide>
<del>#### DEFAULT_MODEL_SERIALIZER_CLASS
<del>
<del>A class that determines the default type of model serializer that should be used by a generic view if `model` is specified, but `serializer_class` is not provided.
<del>
<del>Default: `'rest_framework.serializers.ModelSerializer'`
<del>
<ide> #### DEFAULT_PAGINATION_SERIALIZER_CLASS
<ide>
<ide> A class the determines the default serialization style for paginated responses.
<ide><path>docs/index.md
<ide> Here's our project's root `urls.py` module:
<ide> url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
<ide> ]
<ide>
<del>You can now open the API in your browser at [http://127.0.0.1:8000/](http://127.0.0.1:8000/), and view your new 'users' API. If you use the Login control in the top right corner you'll also be able to add, create and delete users from the system.
<add>You can now open the API in your browser at [http://127.0.0.1:8000/](http://127.0.0.1:8000/), and view your new 'users' API. If you use the login control in the top right corner you'll also be able to add, create and delete users from the system.
<ide>
<ide> ## Quickstart
<ide>
<ide><path>docs/topics/2.4-accouncement.md
<ide> The optional authtoken application now includes support for *both* Django 1.7 sc
<ide>
<ide> **If you are using authtoken, and you want to continue using `south`, you must upgrade your `south` package to version 1.0.**
<ide>
<add>## Deprecation of `.model` view attribute
<add>
<add>The `.model` attribute on view classes is an optional shortcut for either or both of `.serializer_class` and `.queryset`. It's usage results in more implicit, less obvious behavior.
<add>
<add>The documentation has previously stated that usage of the more explict style is prefered, and we're now taking that one step further and deprecating the usage of the `.model` shortcut.
<add>
<add>Doing so will mean that there are cases of API code where you'll now need to include a serializer class where you previously were just using the `.model` shortcut. However we firmly believe that it is the right trade-off to make. Removing the shortcut takes away an unneccessary layer of abstraction, and makes your codebase more explict without any significant extra complexity.
<add>
<add>The `DEFAULT_MODEL_SERIALIZER_CLASS` API setting is now also deprecated.
<add>
<ide> ## Updated test runner
<ide>
<ide> We now have a new test runner for developing against the project,, that uses the excellent [py.test](http://pytest.org) library.
<ide><path>docs/topics/release-notes.md
<ide> You can determine your currently installed version using `pip freeze`:
<ide>
<ide> * Added compatibility with Django 1.7's database migration support.
<ide> * New test runner, using `py.test`.
<add>* Deprecated `.model` view attribute in favor of explict `.queryset` and `.serializer_class` attributes. The `DEFAULT_MODEL_SERIALIZER_CLASS` setting is also deprecated.
<ide> * `@detail_route` and `@list_route` decorators replace `@action` and `@link`.
<ide> * Support customizable view name and description functions, using the `VIEW_NAME_FUNCTION` and `VIEW_DESCRIPTION_FUNCTION` settings.
<ide> * Added `NUM_PROXIES` setting for smarter client IP identification.
| 5
|
Python
|
Python
|
improve train cli
|
c52fde40f49780077e92cbe4869caa9ba29cfc06
|
<ide><path>spacy/cli/train.py
<ide> from ..util import prints
<ide> from .. import util
<ide> from .. import displacy
<add>from ..compat import json_dumps
<ide>
<ide>
<ide> @plac.annotations(
<ide> def train(cmd, lang, output_dir, train_data, dev_data, n_iter=20, n_sents=0,
<ide> train_path = util.ensure_path(train_data)
<ide> dev_path = util.ensure_path(dev_data)
<ide> if not output_path.exists():
<del> prints(output_path, title="Output directory not found", exits=1)
<add> output_path.mkdir()
<ide> if not train_path.exists():
<ide> prints(train_path, title="Training data not found", exits=1)
<ide> if dev_path and not dev_path.exists():
<ide> def train(cmd, lang, output_dir, train_data, dev_data, n_iter=20, n_sents=0,
<ide> else:
<ide> nlp = lang_class(pipeline=pipeline)
<ide> corpus = GoldCorpus(train_path, dev_path, limit=n_sents)
<del> n_train_docs = corpus.count_train()
<add> n_train_words = corpus.count_train()
<ide>
<ide> optimizer = nlp.begin_training(lambda: corpus.train_tuples, device=use_gpu)
<ide>
<ide> def train(cmd, lang, output_dir, train_data, dev_data, n_iter=20, n_sents=0,
<ide> for i in range(n_iter):
<ide> if resume:
<ide> i += 20
<del> with tqdm.tqdm(total=corpus.count_train(), leave=False) as pbar:
<add> with tqdm.tqdm(total=n_train_words, leave=False) as pbar:
<ide> train_docs = corpus.train_docs(nlp, projectivize=True,
<ide> gold_preproc=False, max_length=0)
<ide> losses = {}
<ide> for batch in minibatch(train_docs, size=batch_sizes):
<ide> docs, golds = zip(*batch)
<ide> nlp.update(docs, golds, sgd=optimizer,
<ide> drop=next(dropout_rates), losses=losses)
<del> pbar.update(len(docs))
<add> pbar.update(sum(len(doc) for doc in docs))
<ide>
<ide> with nlp.use_params(optimizer.averages):
<ide> util.set_env_log(False)
<ide> def train(cmd, lang, output_dir, train_data, dev_data, n_iter=20, n_sents=0,
<ide> corpus.dev_docs(
<ide> nlp_loaded,
<ide> gold_preproc=False))
<add> acc_loc =(output_path / ('model%d' % i) / 'accuracy.json')
<add> with acc_loc.open('w') as file_:
<add> file_.write(json_dumps(scorer.scores))
<ide> util.set_env_log(True)
<ide> print_progress(i, losses, scorer.scores)
<ide> finally:
| 1
|
Ruby
|
Ruby
|
define duration#== [chuyeow]
|
887870f20c347179aef0545ee2019c02ed9f74d1
|
<ide><path>activesupport/lib/active_support/duration.rb
<ide> module ActiveSupport
<ide> # 1.month.ago # equivalent to Time.now.advance(:months => -1)
<ide> class Duration < BasicObject
<ide> attr_accessor :value, :parts
<del>
<add>
<ide> def initialize(value, parts) #:nodoc:
<ide> @value, @parts = value, parts
<ide> end
<del>
<add>
<ide> # Adds another Duration or a Numeric to this Duration. Numeric values
<ide> # are treated as seconds.
<ide> def +(other)
<ide> def +(other)
<ide> Duration.new(value + other, @parts + [[:seconds, other]])
<ide> end
<ide> end
<del>
<add>
<ide> # Subtracts another Duration or a Numeric from this Duration. Numeric
<ide> # values are treated as seconds.
<ide> def -(other)
<ide> self + (-other)
<ide> end
<del>
<add>
<ide> def -@ #:nodoc:
<ide> Duration.new(-value, parts.map { |type,number| [type, -number] })
<ide> end
<del>
<add>
<ide> def is_a?(klass) #:nodoc:
<ide> klass == Duration || super
<ide> end
<del>
<add>
<add> # Returns true if <tt>other</tt> is also a Duration instance with the
<add> # same <tt>value</tt>, or if <tt>other == value</tt>.
<add> def ==(other)
<add> if Duration === other
<add> other.value == value
<add> else
<add> other == value
<add> end
<add> end
<add>
<ide> def self.===(other) #:nodoc:
<ide> other.is_a?(Duration) rescue super
<ide> end
<del>
<add>
<ide> # Calculates a new Time or Date that is as far in the future
<ide> # as this Duration represents.
<ide> def since(time = ::Time.now)
<ide> sum(1, time)
<ide> end
<ide> alias :from_now :since
<del>
<add>
<ide> # Calculates a new Time or Date that is as far in the past
<ide> # as this Duration represents.
<ide> def ago(time = ::Time.now)
<ide> sum(-1, time)
<ide> end
<ide> alias :until :ago
<del>
<add>
<ide> def inspect #:nodoc:
<ide> consolidated = parts.inject(Hash.new(0)) { |h,part| h[part.first] += part.last; h }
<ide> [:years, :months, :days, :minutes, :seconds].map do |length|
<ide> n = consolidated[length]
<ide> "#{n} #{n == 1 ? length.to_s.singularize : length.to_s}" if n.nonzero?
<ide> end.compact.to_sentence
<ide> end
<del>
<add>
<ide> protected
<del>
<del> def sum(sign, time = ::Time.now) #:nodoc:
<del> parts.inject(time) do |t,(type,number)|
<del> if t.acts_like?(:time) || t.acts_like?(:date)
<del> if type == :seconds
<del> t.since(sign * number)
<add>
<add> def sum(sign, time = ::Time.now) #:nodoc:
<add> parts.inject(time) do |t,(type,number)|
<add> if t.acts_like?(:time) || t.acts_like?(:date)
<add> if type == :seconds
<add> t.since(sign * number)
<add> else
<add> t.advance(type => sign * number)
<add> end
<ide> else
<del> t.advance(type => sign * number)
<add> raise ArgumentError, "expected a time or date, got #{time.inspect}"
<ide> end
<del> else
<del> raise ArgumentError, "expected a time or date, got #{time.inspect}"
<ide> end
<ide> end
<del> end
<del>
<add>
<ide> private
<del>
<del> def method_missing(method, *args, &block) #:nodoc:
<del> value.send(method, *args)
<del> end
<add>
<add> def method_missing(method, *args, &block) #:nodoc:
<add> value.send(method, *args)
<add> end
<ide> end
<ide> end
<ide><path>activesupport/test/core_ext/duration_test.rb
<ide> def test_minus_with_duration_does_not_break_subtraction_of_date_from_date
<ide> assert_nothing_raised { Date.today - Date.today }
<ide> end
<ide>
<del> # FIXME: ruby 1.9
<ide> def test_plus_with_time
<ide> assert_equal 1 + 1.second, 1.second + 1, "Duration + Numeric should == Numeric + Duration"
<ide> end
<ide><path>activesupport/test/core_ext/numeric_ext_test.rb
<ide> def setup
<ide> }
<ide> end
<ide>
<del> # FIXME: ruby 1.9
<ide> def test_units
<ide> @seconds.each do |actual, expected|
<ide> assert_equal expected, actual
| 3
|
PHP
|
PHP
|
add macro integration into consoleio
|
5dff76d1fae55f9f2d16a0cdbae18af440edd92d
|
<ide><path>src/Console/ConsoleIo.php
<ide> public function setLoggers($enable)
<ide> * Create and render the output for a macro object. If the macro
<ide> * object has not already been loaded, it will be loaded and constructed.
<ide> *
<del> * This method accepts variadic arguments that are
<del> *
<ide> * @param string $name The name of the macro to render
<ide> * @param array $args The arguments for the macro output.
<ide> * @return void
<ide> */
<del> public function macro($name, $args)
<add> public function macro($name, $args = [])
<add> {
<add> $name = ucfirst($name);
<add> $macro = $this->_macros->load($name);
<add> return $macro->output($args);
<add> }
<add>
<add> /**
<add> * Conveinence wrapper around macro()
<add> *
<add> * @param string $method The macro to invoke.
<add> * @param array $args The arguments for the macro.
<add> * @return mixed
<add> */
<add> public function __call($method, $args)
<ide> {
<add> return $this->macro($method, $args);
<ide> }
<ide> }
<ide><path>tests/TestCase/Console/ConsoleIoTest.php
<ide> namespace Cake\Test\TestCase\Console;
<ide>
<ide> use Cake\Console\ConsoleIo;
<add>use Cake\Core\Configure;
<ide> use Cake\Log\Log;
<ide> use Cake\TestSuite\TestCase;
<ide>
<ide> class ConsoleIoTest extends TestCase
<ide> public function setUp()
<ide> {
<ide> parent::setUp();
<add> Configure::write('App.namespace', 'TestApp');
<ide>
<ide> $this->out = $this->getMock('Cake\Console\ConsoleOutput', [], [], '', false);
<ide> $this->err = $this->getMock('Cake\Console\ConsoleOutput', [], [], '', false);
<ide> public function testStyles()
<ide> ->with('name', 'props');
<ide> $this->io->styles('name', 'props');
<ide> }
<add>
<add> /**
<add> * Test the macro method.
<add> *
<add> * @return void
<add> */
<add> public function testMacro()
<add> {
<add> $this->out->expects($this->exactly(2))
<add> ->method('write')
<add> ->with('It works!well ish');
<add> $this->io->macro('simple', ['well', 'ish']);
<add> $this->io->simple('well', 'ish');
<add> }
<ide> }
<ide><path>tests/test_app/TestApp/Shell/Macro/SimpleMacro.php
<ide> class SimpleMacro extends Macro
<ide> {
<ide> public function output($args)
<ide> {
<del> $this->_io->out('It works!');
<add> $this->_io->out('It works!' . implode(' ', $args));
<ide> }
<ide> }
| 3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.