content_type stringclasses 8 values | main_lang stringclasses 7 values | message stringlengths 1 50 | sha stringlengths 40 40 | patch stringlengths 52 962k | file_count int64 1 300 |
|---|---|---|---|---|---|
Ruby | Ruby | get formula from satisfy | fe117bf79b244c42b7e4049d353c3c003eae4880 | <ide><path>Library/Homebrew/requirement.rb
<ide> def initialize(tags = [])
<ide> @default_formula = self.class.default_formula
<ide> @cask ||= self.class.cask
<ide> @download ||= self.class.download
<add> @formula = nil
<ide> tags.each do |tag|
<ide> next unless tag.is_a? Hash
<ide> @cask ||= tag[:cask]
<ide> def message
<ide> def satisfied?
<ide> result = self.class.satisfy.yielder { |p| instance_eval(&p) }
<ide> @satisfied_result = result
<del> result ? true : false
<add> return false unless result
<add>
<add> if parent = satisfied_result_parent
<add> parent.to_s =~ %r{(#{Regexp.escape(HOMEBREW_CELLAR)}|#{Regexp.escape(HOMEBREW_PREFIX)}/opt)/([\w+-.@]+)}
<add> @formula = $2
<add> end
<add>
<add> true
<ide> end
<ide>
<ide> # Overriding #fatal? is deprecated.
<ide> def default_formula?
<ide> self.class.default_formula || false
<ide> end
<ide>
<add> def satisfied_result_parent
<add> return unless @satisfied_result.is_a?(Pathname)
<add> @satisfied_result.resolved_path.parent
<add> end
<add>
<ide> # Overriding #modify_build_environment is deprecated.
<ide> # Pass a block to the env DSL method instead.
<ide> # Note: #satisfied? should be called before invoking this method
<ide> def modify_build_environment
<ide> # satisfy { which("executable") }
<ide> # work, even under superenv where "executable" wouldn't normally be on the
<ide> # PATH.
<del> # This is undocumented magic and it should be removed, but we need to add
<del> # a way to declare path-based requirements that work with superenv first.
<del> return unless @satisfied_result.is_a?(Pathname)
<del> parent = @satisfied_result.parent
<del>
<add> parent = satisfied_result_parent
<add> return unless parent
<ide> return if ENV["PATH"].split(File::PATH_SEPARATOR).include?(parent.to_s)
<ide> ENV.append_path("PATH", parent)
<ide> end
<ide> def inspect
<ide> "#<#{self.class.name}: #{name.inspect} #{tags.inspect}>"
<ide> end
<ide>
<add> def formula
<add> @formula || self.class.default_formula
<add> end
<add>
<ide> def to_dependency
<del> f = self.class.default_formula
<del> raise "No default formula defined for #{inspect}" if f.nil?
<del> if f =~ HOMEBREW_TAP_FORMULA_REGEX
<del> TapDependency.new(f, tags, method(:modify_build_environment), name)
<del> else
<del> Dependency.new(f, tags, method(:modify_build_environment), name)
<add> if formula =~ HOMEBREW_TAP_FORMULA_REGEX
<add> TapDependency.new(formula, tags, method(:modify_build_environment), name)
<add> elsif formula
<add> Dependency.new(formula, tags, method(:modify_build_environment), name)
<ide> end
<ide> end
<ide> | 1 |
Python | Python | set version to v3.0.0a6 | 0fc13b2f14d044d8b44e86e05f237f4f2b14b77a | <ide><path>spacy/about.py
<ide> # fmt: off
<ide> __title__ = "spacy-nightly"
<del>__version__ = "3.0.0a5"
<add>__version__ = "3.0.0a6"
<ide> __release__ = True
<ide> __download_url__ = "https://github.com/explosion/spacy-models/releases/download"
<ide> __compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json" | 1 |
Ruby | Ruby | fix inreplace and yajl formula | bae67c110fd3a8ca23f17def7cc9dd2dadd5fbb8 | <ide><path>Library/Homebrew/brewkit.rb
<ide> def remove_from_cflags rx
<ide>
<ide> def inreplace(path, before, after)
<ide> before=Regexp.escape before.to_s
<del> after=Regexp.escape after.to_s
<del> before.gsub! "/", "\\\/"
<del> after.gsub! "/", "\\\/"
<del> before.gsub! "'", '\''
<del> after.gsub! "'", '\''
<add> after.gsub! "\\", "\\\\"
<add> after.gsub! "/", "\\/"
<ide>
<ide> # TODO this sucks
<ide> # either use 'ed', or allow regexp and use a proper ruby function
<del> `perl -pi -e $'s/#{before}/#{after}/g' "#{path}"`
<add> Kernel.system "perl", "-pi", "-e", "s/#{before}/#{after}/g", path
<ide> end | 1 |
Python | Python | use tokenbucket from kombu.utils.limits instead | b01b81c60ae07ad5d67a01cc8b1f192f32aa4a3d | <ide><path>celery/datastructures.py
<ide> def incr(self, key, delta=1):
<ide> newval = int(self.data.pop(key)) + delta
<ide> self[key] = str(newval)
<ide> return newval
<del>
<del>
<del>class TokenBucket(object):
<del> """Token Bucket Algorithm.
<del>
<del> See http://en.wikipedia.org/wiki/Token_Bucket
<del> Most of this code was stolen from an entry in the ASPN Python Cookbook:
<del> http://code.activestate.com/recipes/511490/
<del>
<del> .. admonition:: Thread safety
<del>
<del> This implementation may not be thread safe.
<del>
<del> """
<del>
<del> #: The rate in tokens/second that the bucket will be refilled
<del> fill_rate = None
<del>
<del> #: Maximum number of tokensin the bucket.
<del> capacity = 1
<del>
<del> #: Timestamp of the last time a token was taken out of the bucket.
<del> timestamp = None
<del>
<del> def __init__(self, fill_rate, capacity=1):
<del> self.capacity = float(capacity)
<del> self._tokens = capacity
<del> self.fill_rate = float(fill_rate)
<del> self.timestamp = time.time()
<del>
<del> def can_consume(self, tokens=1):
<del> """Returns :const:`True` if `tokens` number of tokens can be consumed
<del> from the bucket."""
<del> if tokens <= self._get_tokens():
<del> self._tokens -= tokens
<del> return True
<del> return False
<del>
<del> def expected_time(self, tokens=1):
<del> """Returns the expected time in seconds when a new token should be
<del> available.
<del>
<del> .. admonition:: Warning
<del>
<del> This consumes a token from the bucket.
<del>
<del> """
<del> _tokens = self._get_tokens()
<del> tokens = max(tokens, _tokens)
<del> return (tokens - _tokens) / self.fill_rate
<del>
<del> def _get_tokens(self):
<del> if self._tokens < self.capacity:
<del> now = time.time()
<del> delta = self.fill_rate * (now - self.timestamp)
<del> self._tokens = min(self.capacity, self._tokens + delta)
<del> self.timestamp = now
<del> return self._tokens
<ide><path>celery/events/snapshot.py
<ide>
<ide> import atexit
<ide>
<add>from kombu.utils.limits import TokenBucket
<add>
<ide> from .. import platforms
<ide> from ..app import app_or_default
<del>from ..datastructures import TokenBucket
<ide> from ..utils import timer2, instantiate, LOG_LEVELS
<ide> from ..utils.dispatch import Signal
<ide> from ..utils.timeutils import rate
<ide><path>celery/worker/buckets.py
<ide> from time import time, sleep
<ide> from Queue import Queue, Empty
<ide>
<del>from ..datastructures import TokenBucket
<add>from kombu.utils.limits import TokenBucket
<add>
<ide> from ..utils import timeutils
<ide> from ..utils.compat import zip_longest, chain_from_iterable
<ide> | 3 |
Javascript | Javascript | fix initial load error | 426526355cb57d17fb6aad781123fb10966279f3 | <ide><path>client/commonFramework/update-preview.js
<ide> window.common = (function(global) {
<ide> const {
<del> Rx: { Observable },
<add> Rx: { BehaviorSubject, Observable },
<ide> common = { init: [] }
<ide> } = global;
<ide>
<ide> window.common = (function(global) {
<ide> const iFrameScript$ =
<ide> common.getScriptContent$('/js/iFrameScripts.js').shareReplay();
<ide>
<add> // behavior subject allways remembers the last value
<add> // we use this to determine if runPreviewTest$ is defined
<add> // and prime it with false
<add> common.previewReady$ = new BehaviorSubject(false);
<add>
<ide> // runPreviewTests$ should be set up in the preview window
<ide> common.runPreviewTests$ =
<ide> () => Observable.throw({ err: new Error('run preview not enabled') });
<ide> window.common = (function(global) {
<ide> return iFrameScript$
<ide> .map(script => `<script>${script}</script>`)
<ide> .flatMap(script => {
<add> // we make sure to override the last value in the
<add> // subject to false here.
<add> common.previewReady$.onNext(false);
<ide> preview.open();
<ide> preview.write(libraryIncludes + code + '<!-- -->' + script);
<ide> preview.close();
<del> return Observable.fromCallback($(preview).ready, $(preview))()
<del> .first()
<del> // delay is need here for first initial run
<del> .delay(100);
<add> // now we filter false values and wait for the first true
<add> return common.previewReady$
<add> .filter(ready => ready)
<add> .first();
<ide> })
<ide> .map(() => code);
<ide> };
<ide><path>client/iFrameScripts.js
<ide> window.__$(function() {
<ide> .map(tests => ({ ...rest, tests }));
<ide> };
<ide>
<add> // now that the runPreviewTest$ is defined
<add> // we set the subject to true
<add> // this will let the updatePreview
<add> // script now that we are ready.
<add> common.previewReady$.onNext(true);
<ide> }); | 2 |
Ruby | Ruby | remove inaccurate comment | dd3446ded19ea88dc85bb5bff670c4b2a128c207 | <ide><path>Library/Homebrew/global.rb
<ide> def mkpath
<ide> end
<ide>
<ide> HOMEBREW_CACHE = cache
<del>undef cache # we use a function to prevent adding home_cache to the global scope
<add>undef cache
<ide>
<ide> # Where brews installed via URL are cached
<ide> HOMEBREW_CACHE_FORMULA = HOMEBREW_CACHE+"Formula" | 1 |
Ruby | Ruby | fix nullrelation#to_sql to return an actual query | 837c87e8107e7e9a7c2bf78e2fbd8cee4d0504a3 | <ide><path>activerecord/lib/active_record/null_relation.rb
<ide> def many?
<ide> false
<ide> end
<ide>
<del> def to_sql
<del> ""
<del> end
<del>
<ide> def calculate(operation, _column_name)
<ide> case operation
<ide> when :count, :sum
<ide><path>activerecord/test/cases/null_relation_test.rb
<ide> def test_null_relation_content_size_methods
<ide> end
<ide>
<ide> def test_null_relation_metadata_methods
<del> assert_equal "", Developer.none.to_sql
<add> assert_includes Developer.none.to_sql, " WHERE (1=0)"
<ide> assert_equal({}, Developer.none.where_values_hash)
<ide> end
<ide> | 2 |
Python | Python | fix tfdebertav2convlayer in tfdebertav2model | 2f463effb316f6c9e0ac9636327a3d7c13862f8d | <ide><path>src/transformers/models/deberta_v2/modeling_tf_deberta_v2.py
<ide> def call(
<ide> rmask = tf.cast(1 - input_mask, tf.bool)
<ide> out = tf.where(tf.broadcast_to(tf.expand_dims(rmask, -1), shape_list(out)), 0.0, out)
<ide> out = self.dropout(out, training=training)
<del> hidden_states = self.conv_act(out)
<add> out = self.conv_act(out)
<ide>
<ide> layer_norm_input = residual_states + out
<ide> output = self.LayerNorm(layer_norm_input)
<ide> def call(
<ide> else:
<ide> if len(shape_list(input_mask)) != len(shape_list(layer_norm_input)):
<ide> if len(shape_list(input_mask)) == 4:
<del> mask = tf.squeeze(tf.squeeze(input_mask, axis=1), axis=1)
<del> mask = tf.cast(tf.expand_dims(input_mask, axis=2), tf.float32)
<add> input_mask = tf.squeeze(tf.squeeze(input_mask, axis=1), axis=1)
<add> input_mask = tf.cast(tf.expand_dims(input_mask, axis=2), tf.float32)
<ide>
<del> output_states = output * mask
<add> output_states = output * input_mask
<ide>
<ide> return output_states
<ide> | 1 |
PHP | PHP | remove duplicate comment | 47b8f327ea9ef6462103aa802cd48a6f739f76a1 | <ide><path>src/Illuminate/Routing/RouteParameterBinder.php
<ide> public function __construct($route)
<ide> */
<ide> public function parameters($request)
<ide> {
<del> // If the route has a regular expression for the host part of the URI, we will
<del> // compile that and get the parameter matches for this domain. We will then
<del> // merge them into this parameters array so that this array is completed.
<ide> $parameters = $this->bindPathParameters($request);
<ide>
<ide> // If the route has a regular expression for the host part of the URI, we will | 1 |
Text | Text | update documentation concerning themes | d83adc3a27d3b35f3a7cb8a463a11bbdf186d788 | <ide><path>docs/customizing-atom.md
<ide> Atom comes bundled with two themes `atom-dark-*` and `atom-light-*`.
<ide> Because Atom themes are based on CSS, it's possible to have multiple themes
<ide> active at the same time.
<ide>
<del>VERIFY: Is this still true?
<del>
<del>For example, you'll usually select a theme for the UI
<del>and another theme for syntax highlighting. You can select themes by specifying
<del>them in the `core.themes` array in your `config.cson`:
<del>
<del>```coffee-script
<del>core:
<del> themes: ["atom-light-ui", "atom-light-syntax"]
<del> # or, if the sun is going down:
<del> # themes: ["atom-dark-ui", "atom-dark-syntax"]
<del>```
<add>For example, you'll usually select a theme for the UI and another theme for
<add>syntax highlighting. You can change themes from the preferences pane.
<ide>
<ide> You install new themes by placing them in the _~/.atom/themes_ directory. A
<del>theme can be a CSS file or a directory containing multiple CSS files.
<del>
<del>VERIFY: Where did we wind up with themes?
<add>theme can be a single LESS file or a directory containing multiple LESS files.
<ide>
<ide> ## Installing Packages
<ide>
<ide><path>docs/internals/keymaps.md
<ide> Note that the last example describes multiple keystrokes in succession:
<ide> - `p`
<ide> - `2`
<ide> - `ctrl-p`
<del>- `ctrl-alt-meta-p`
<add>- `ctrl-alt-cmd-p`
<ide> - `tab`
<ide> - `escape`
<ide> - `enter`
<ide> - `ctrl-w w`
<ide>
<ide> A semantic event is the name of the custom event that will be triggered on the
<ide> target of the keydown event when a key binding matches. You can use the command
<del>palette (bound to `meta-p`), to get a list of relevant events and their bindings
<add>palette (bound to `cmd-p`), to get a list of relevant events and their bindings
<ide> in any focused context in Atom.
<ide>
<ide> ### Rules for Mapping A Keydown Event to A Semantic Event
<ide>
<del>A keymap's job is to translate a physical keystroke event (like `meta-D`) into a
<add>A keymap's job is to translate a physical keystroke event (like `cmd-D`) into a
<ide> semantic event (like `editor:duplicate-line`). Whenever a keydown event occurs
<ide> on a focused element, it bubbles up the DOM as usual. As soon as an element on
<ide> the bubble path matches a key binding for the keystroke, the binding's semantic | 2 |
PHP | PHP | move comment to line it up with others | 997f789c47d4be159b600bbac82d358a96339821 | <ide><path>app/config/app.php
<ide>
<ide> 'debug' => true,
<ide>
<del>/*
<del>|--------------------------------------------------------------------------
<del>| Application URL
<del>|--------------------------------------------------------------------------
<del>|
<del>| This URL is used by the console to properly generate URLs when using
<del>| the Artisan command line tool. You should set this to the root of
<del>| your application so that it is used when running Artisan tasks.
<del>|
<del>*/
<add> /*
<add> |--------------------------------------------------------------------------
<add> | Application URL
<add> |--------------------------------------------------------------------------
<add> |
<add> | This URL is used by the console to properly generate URLs when using
<add> | the Artisan command line tool. You should set this to the root of
<add> | your application so that it is used when running Artisan tasks.
<add> |
<add> */
<ide>
<ide> 'url' => 'http://localhost',
<ide> | 1 |
Javascript | Javascript | use promise.all() in test-hash-seed | 1a3c7473ec9ca0e1acf0512996e05eecc870f7bf | <ide><path>test/pummel/test-hash-seed.js
<ide> 'use strict';
<ide>
<ide> // Check that spawn child doesn't create duplicated entries
<del>require('../common');
<del>const Countdown = require('../common/countdown');
<del>const REPETITIONS = 2;
<add>const common = require('../common');
<add>const kRepetitions = 2;
<ide> const assert = require('assert');
<ide> const fixtures = require('../common/fixtures');
<del>const { spawn } = require('child_process');
<add>const { promisify, debuglog } = require('util');
<add>const debug = debuglog('test');
<add>
<add>const { execFile } = require('child_process');
<add>const execFilePromise = promisify(execFile);
<ide> const targetScript = fixtures.path('guess-hash-seed.js');
<del>const seeds = [];
<ide>
<del>const requiredCallback = () => {
<del> console.log(`Seeds: ${seeds}`);
<add>const requiredCallback = common.mustCall((results) => {
<add> const seeds = results.map((val) => val.stdout.trim());
<add> debug(`Seeds: ${seeds}`);
<ide> assert.strictEqual(new Set(seeds).size, seeds.length);
<del> assert.strictEqual(seeds.length, REPETITIONS);
<del>};
<del>
<del>const countdown = new Countdown(REPETITIONS, requiredCallback);
<add> assert.strictEqual(seeds.length, kRepetitions);
<add>});
<ide>
<del>for (let i = 0; i < REPETITIONS; ++i) {
<del> let result = '';
<del> const subprocess = spawn(process.execPath, [targetScript]);
<del> subprocess.stdout.setEncoding('utf8');
<del> subprocess.stdout.on('data', (data) => { result += data; });
<add>const generateSeed = () => execFilePromise(process.execPath, [targetScript]);
<add>const subprocesses = [...new Array(kRepetitions)].map(generateSeed);
<ide>
<del> subprocess.on('exit', () => {
<del> seeds.push(result.trim());
<del> countdown.dec();
<del> });
<del>}
<add>Promise.all(subprocesses)
<add> .then(requiredCallback); | 1 |
Python | Python | use get_include instead of get_numpy_include | 7763133ec4c3d51d37b5205a1740e574f20963ae | <ide><path>numpy/doc/pyrex/setup.py
<ide>
<ide> pyx_ext = Extension('numpyx',
<ide> pyx_sources,
<del> include_dirs = [numpy.get_numpy_include()])
<add> include_dirs = [numpy.get_include()])
<ide>
<ide> # Call the routine which does the real work
<ide> setup(name = 'numpyx', | 1 |
Text | Text | add v3.10.0-beta.4 to changelog | ed4fb96f14e8acc360c253e8bdad8ce72b443807 | <ide><path>CHANGELOG.md
<ide> # Ember Changelog
<ide>
<add>### v3.10.0-beta.4 (April 22, 2019)
<add>
<add>- [#17930](https://github.com/emberjs/ember.js/pull/17930) [BUGFIX] Update assertion for events in tagless component to include method names
<add>
<ide> ### v3.10.0-beta.3 (April 15, 2019)
<ide>
<ide> - [#17859](https://github.com/emberjs/ember.js/pull/17859) [BUGFIX] Fixes a regression in the legacy build | 1 |
Text | Text | add note about legacy links | 1cfab85c0ea759fed753dc2c71cdb4b87ad62400 | <ide><path>man/docker-run.1.md
<ide> Create a 3rd container using the new --ipc=container:CONTAINERID option, now it
<ide>
<ide> ## Linking Containers
<ide>
<add>> **Note**: This section describes linking between containers on the
<add>> default (bridge) network, also known as "legacy links". Using `--link`
<add>> on user-defined networks uses the DNS-based discovery, which does not add
<add>> entries to `/etc/hosts`, and does not set environment variables for
<add>> discovery.
<add>
<ide> The link feature allows multiple containers to communicate with each other. For
<ide> example, a container whose Dockerfile has exposed port 80 can be run and named
<ide> as follows: | 1 |
Python | Python | use region names exposed by the provider | e3e171311c5f5dd83edac5eb89069fccea4875ac | <ide><path>libcloud/compute/drivers/elastichosts.py
<ide>
<ide> # API end-points
<ide> API_ENDPOINTS = {
<del> 'uk-1': {
<add> 'lon-p': {
<ide> 'name': 'London Peer 1',
<ide> 'country': 'United Kingdom',
<ide> 'host': 'api-lon-p.elastichosts.com'
<ide> },
<del> 'uk-2': {
<add> 'lon-b': {
<ide> 'name': 'London BlueSquare',
<ide> 'country': 'United Kingdom',
<ide> 'host': 'api-lon-b.elastichosts.com'
<ide> },
<del> 'us-1': {
<add> 'sat-p': {
<ide> 'name': 'San Antonio Peer 1',
<ide> 'country': 'United States',
<ide> 'host': 'api-sat-p.elastichosts.com'
<ide> },
<del> 'us-2': {
<add> 'lax-p': {
<ide> 'name': 'Los Angeles Peer 1',
<ide> 'country': 'United States',
<ide> 'host': 'api-lax-p.elastichosts.com'
<ide> },
<del> 'us-3': {
<add> 'sjc-c': {
<ide> 'name': 'San Jose (Silicon Valley)',
<ide> 'country': 'United States',
<ide> 'host': 'api-sjc-c.elastichosts.com'
<ide> },
<del> 'ca-1': {
<add> 'tor-p': {
<ide> 'name': 'Toronto Peer 1',
<ide> 'country': 'Canada',
<ide> 'host': 'api-tor-p.elastichosts.com'
<ide> },
<del> 'au-1': {
<add> 'syd-y': {
<ide> 'name': 'Sydney',
<ide> 'country': 'Australia',
<ide> 'host': 'api-syd-v.elastichosts.com'
<ide> }
<ide>
<ide> # Default API end-point for the base connection class.
<del>DEFAULT_REGION = 'us-1'
<add>DEFAULT_REGION = 'sat-p'
<ide>
<ide> # Retrieved from http://www.elastichosts.com/cloud-hosting/api
<ide> STANDARD_DRIVES = {
<ide> def _ex_connection_class_kwargs(self):
<ide> """
<ide> Return the host value based on the user supplied region.
<ide> """
<del> if self._host_argument_set:
<del> return {}
<del> else:
<del> return {'host': API_ENDPOINTS[self.region]['host']}
<add> kwargs = {}
<add> if not self._host_argument_set:
<add> kwargs['host'] = API_ENDPOINTS[self.region]['host']
<add>
<add> return kwargs
<ide>
<ide>
<ide> class ElasticHostsUK1NodeDriver(ElasticHostsNodeDriver):
<ide> """
<ide> ElasticHosts node driver for the London Peer 1 end-point
<ide> """
<del> _region = 'uk-1'
<add> _region = 'lon-p'
<ide>
<ide>
<ide> class ElasticHostsUK2NodeDriver(ElasticHostsNodeDriver):
<ide> """
<ide> ElasticHosts node driver for the London Bluesquare end-point
<ide> """
<del> _region = 'uk-2'
<add> _region = 'lon-b'
<ide>
<ide>
<ide> class ElasticHostsUS1NodeDriver(ElasticHostsNodeDriver):
<ide> """
<ide> ElasticHosts node driver for the San Antonio Peer 1 end-point
<ide> """
<del> _region = 'us-1'
<add> _region = 'sat-p'
<ide>
<ide>
<ide> class ElasticHostsUS2NodeDriver(ElasticHostsNodeDriver):
<ide> """
<ide> ElasticHosts node driver for the Los Angeles Peer 1 end-point
<ide> """
<del> _region = 'us-2'
<add> _region = 'lax-p'
<ide>
<ide>
<ide> class ElasticHostsUS3NodeDriver(ElasticHostsNodeDriver):
<ide> """
<ide> ElasticHosts node driver for the San Jose (Silicon Valley) end-point
<ide> """
<del> _region = 'us-3'
<add> _region = 'sjc-c'
<ide>
<ide>
<ide> class ElasticHostsCA1NodeDriver(ElasticHostsNodeDriver):
<ide> """
<ide> ElasticHosts node driver for the Toronto Peer 1 end-point
<ide> """
<del> _region = 'ca-1'
<add> _region = 'tor-p'
<ide>
<ide>
<ide> class ElasticHostsAU1NodeDriver(ElasticHostsNodeDriver):
<ide> """
<ide> ElasticHosts node driver for the Sydney end-point
<ide> """
<del> _region = 'au-1'
<add> _region = 'syd-y'
<ide>
<ide>
<ide> class ElasticHostsCN1NodeDriver(ElasticHostsNodeDriver): | 1 |
Ruby | Ruby | clarify cellar comment | 41e8245534854bb273dedc49aeda4af7b60c1aa4 | <ide><path>Library/Homebrew/global.rb
<ide> HOMEBREW_PREFIX = Pathname.new(HOMEBREW_BREW_FILE).dirname.parent # Where we link under
<ide> HOMEBREW_REPOSITORY = Pathname.new(HOMEBREW_BREW_FILE).realpath.dirname.parent # Where .git is found
<ide>
<del># Where should be build to?
<del># If /usr/local/Cellar exists, as a symlink or real folder, use that.
<del># Otherwise, build into a Cellar in the Repo. folder.
<add># Where we store built products; /usr/local/Cellar if it exists,
<add># otherwise a Cellar relative to the Repository.
<ide> if (HOMEBREW_PREFIX+'Cellar').exist?
<ide> HOMEBREW_CELLAR = HOMEBREW_PREFIX+'Cellar'
<ide> else | 1 |
Ruby | Ruby | restore rails 4.2’s value | 22288603645c95751f64245fc89fc259a27ad94c | <ide><path>actionpack/lib/action_controller/metal/strong_parameters.rb
<ide> class Parameters
<ide> cattr_accessor :permit_all_parameters, instance_accessor: false
<ide> cattr_accessor :action_on_unpermitted_parameters, instance_accessor: false
<ide>
<del> delegate :keys, :key?, :has_key?, :empty?, :include?, :inspect, to: :@parameters
<add> delegate :keys, :key?, :has_key?, :empty?, :include?, :inspect,
<add> :as_json, to: :@parameters
<ide>
<ide> # By default, never raise an UnpermittedParameters exception if these
<ide> # params are present. The default includes both 'controller' and 'action'
<ide><path>actionpack/test/controller/parameters/accessors_test.rb
<ide> class ParametersAccessorsTest < ActiveSupport::TestCase
<ide> assert_not @params[:person][:name].permitted?
<ide> end
<ide>
<add> test "as_json returns the JSON representation of the parameters hash" do
<add> assert_not @params.as_json.key? "parameters"
<add> assert_not @params.as_json.key? "permitted"
<add> assert @params.as_json.key? "person"
<add> end
<add>
<ide> test "each carries permitted status" do
<ide> @params.permit!
<ide> @params.each { |key, value| assert(value.permitted?) if key == "person" } | 2 |
Go | Go | remove double deprecation warning | 9260c06f7a7cb172205dc45af96870ec0d02ebcd | <ide><path>api/client/commands.go
<ide> func (cli *DockerCli) CmdCommit(args ...string) error {
<ide> env engine.Env
<ide> )
<ide> if *flConfig != "" {
<del> fmt.Fprintf(cli.err, "WARNING: 'commit --run' is deprecated and will be removed in a future version, in favor of inline Dockerfile-compatible commands.\n")
<ide> config = &runconfig.Config{}
<ide> if err := json.Unmarshal([]byte(*flConfig), config); err != nil {
<ide> return err | 1 |
Text | Text | fix anchor link on ubuntu installation page | 5dd28c1f9d5933c81c24c9d5fd478239438437f1 | <ide><path>docs/installation/ubuntulinux.md
<ide> To upgrade your kernel and install the additional packages, do the following:
<ide>
<ide> $ sudo reboot
<ide>
<del>5. After your system reboots, go ahead and [install Docker](#installing-docker-on-ubuntu).
<add>5. After your system reboots, go ahead and [install Docker](#installation).
<ide>
<ide> ## Installation
<ide> | 1 |
PHP | PHP | fix casing issue with guarded | 1b70bef5fd7cc5da74abcdf79e283f830fa3b0a4 | <ide><path>src/Illuminate/Database/Eloquent/Concerns/GuardsAttributes.php
<ide> public function isFillable($key)
<ide> */
<ide> public function isGuarded($key)
<ide> {
<del> return in_array($key, $this->getGuarded()) || $this->getGuarded() == ['*'];
<add> return $this->getGuarded() == ['*'] || ! empty(preg_grep('/^'.preg_quote($key).'$/i', $this->getGuarded()));
<ide> }
<ide>
<ide> /**
<ide><path>tests/Integration/Database/EloquentModelTest.php
<ide> protected function setUp(): void
<ide> });
<ide> }
<ide>
<add> public function testCantUpdateGuardedAttributesUsingDifferentCasing()
<add> {
<add> $model = new TestModel2;
<add>
<add> $model->fill(['ID' => 123]);
<add>
<add> $this->assertNull($model->ID);
<add> }
<add>
<ide> public function testUserCanUpdateNullableDate()
<ide> {
<ide> $user = TestModel1::create([ | 2 |
Javascript | Javascript | add test for socket.end callback | 628f95503f1964b727ede62c731ce46f1e116af6 | <ide><path>test/parallel/test-net-socket-end-callback.js
<add>'use strict';
<add>
<add>const common = require('../common');
<add>const net = require('net');
<add>
<add>const server = net.createServer((socket) => {
<add> socket.resume();
<add>}).unref();
<add>
<add>server.listen(common.mustCall(() => {
<add> const connect = (...args) => {
<add> const socket = net.createConnection(server.address().port, () => {
<add> socket.end(...args);
<add> });
<add> };
<add>
<add> const cb = common.mustCall(() => {}, 3);
<add>
<add> connect(cb);
<add> connect('foo', cb);
<add> connect('foo', 'utf8', cb);
<add>})); | 1 |
Java | Java | allow spel reserved words in type package names | edce2e7bcab12cb6ffac60f9a952a76889f987aa | <ide><path>spring-expression/src/main/java/org/springframework/expression/spel/standard/InternalSpelExpressionParser.java
<ide> package org.springframework.expression.spel.standard;
<ide>
<ide> import java.util.ArrayList;
<add>import java.util.LinkedList;
<ide> import java.util.List;
<ide> import java.util.Stack;
<add>import java.util.regex.Pattern;
<ide>
<ide> import org.springframework.expression.ParseException;
<ide> import org.springframework.expression.ParserContext;
<ide> import org.springframework.expression.spel.SpelParserConfiguration;
<ide> import org.springframework.expression.spel.ast.*;
<ide> import org.springframework.util.Assert;
<add>import org.springframework.util.StringUtils;
<ide>
<ide> /**
<ide> * Hand written SpEL parser. Instances are reusable but are not thread safe.
<ide> */
<ide> class InternalSpelExpressionParser extends TemplateAwareExpressionParser {
<ide>
<add> private static final Pattern VALID_QUALIFIED_ID_PATTERN = Pattern.compile("[\\p{L}\\p{N}_$]+");
<add>
<ide> // The expression being parsed
<ide> private String expressionString;
<ide>
<ide> private boolean maybeEatSelection(boolean nullSafeNavigation) {
<ide> * TODO AndyC Could create complete identifiers (a.b.c) here rather than a sequence of them? (a, b, c)
<ide> */
<ide> private SpelNodeImpl eatPossiblyQualifiedId() {
<del> List<SpelNodeImpl> qualifiedIdPieces = new ArrayList<SpelNodeImpl>();
<del> Token startnode = eatToken(TokenKind.IDENTIFIER);
<del> qualifiedIdPieces.add(new Identifier(startnode.stringValue(),toPos(startnode)));
<del> while (peekToken(TokenKind.DOT,true)) {
<del> Token node = eatToken(TokenKind.IDENTIFIER);
<del> qualifiedIdPieces.add(new Identifier(node.stringValue(),toPos(node)));
<del> }
<del> return new QualifiedIdentifier(toPos(startnode.startpos,qualifiedIdPieces.get(qualifiedIdPieces.size()-1).getEndPosition()),qualifiedIdPieces.toArray(new SpelNodeImpl[qualifiedIdPieces.size()]));
<add> LinkedList<SpelNodeImpl> qualifiedIdPieces = new LinkedList<SpelNodeImpl>();
<add> Token node = peekToken();
<add> while (isValidQualifiedId(node)) {
<add> nextToken();
<add> if(node.kind != TokenKind.DOT) {
<add> qualifiedIdPieces.add(new Identifier(node.stringValue(),toPos(node)));
<add> }
<add> node = peekToken();
<add> }
<add> if(qualifiedIdPieces.isEmpty()) {
<add> if(node == null) {
<add> raiseInternalException( expressionString.length(), SpelMessage.OOD);
<add> }
<add> raiseInternalException(node.startpos, SpelMessage.NOT_EXPECTED_TOKEN,
<add> "qualified ID", node.getKind().toString().toLowerCase());
<add> }
<add> int pos = toPos(qualifiedIdPieces.getFirst().getStartPosition(), qualifiedIdPieces.getLast().getEndPosition());
<add> return new QualifiedIdentifier(pos, qualifiedIdPieces.toArray(new SpelNodeImpl[qualifiedIdPieces.size()]));
<add> }
<add>
<add> private boolean isValidQualifiedId(Token node) {
<add> if(node == null || node.kind == TokenKind.LITERAL_STRING) {
<add> return false;
<add> }
<add> if(node.kind == TokenKind.DOT || node.kind == TokenKind.IDENTIFIER) {
<add> return true;
<add> }
<add> String value = node.stringValue();
<add> return StringUtils.hasLength(value) && VALID_QUALIFIED_ID_PATTERN.matcher(value).matches();
<ide> }
<ide>
<ide> // This is complicated due to the support for dollars in identifiers. Dollars are normally separate tokens but
<ide><path>spring-expression/src/test/java/org/springframework/expression/spel/ParserErrorMessagesTests.java
<ide> /*
<del> * Copyright 2002-2009 the original author or authors.
<add> * Copyright 2002-2012 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide>
<ide> /**
<ide> * Tests the messages and exceptions that come out for badly formed expressions
<del> *
<add> *
<ide> * @author Andy Clement
<ide> */
<ide> public class ParserErrorMessagesTests extends ExpressionTestCase {
<ide> public void testBrokenExpression07() {
<ide> // T() can only take an identifier (possibly qualified), not a literal
<ide> // message ought to say identifier rather than ID
<ide> parseAndCheckError("null instanceof T('a')", SpelMessage.NOT_EXPECTED_TOKEN, 18,
<del> "identifier","literal_string");
<add> "qualified ID","literal_string");
<ide> }
<ide>
<ide> }
<add><path>spring-expression/src/test/java/org/springframework/expression/spel/SpelReproTests.java
<del><path>spring-expression/src/test/java/org/springframework/expression/spel/SpringEL300Tests.java
<ide>
<ide> package org.springframework.expression.spel;
<ide>
<add>import java.lang.reflect.Field;
<add>import java.lang.reflect.Method;
<add>import java.util.ArrayList;
<add>import java.util.HashMap;
<add>import java.util.LinkedHashMap;
<add>import java.util.List;
<add>import java.util.Map;
<add>import java.util.Properties;
<add>
<ide> import junit.framework.Assert;
<add>
<ide> import org.junit.Ignore;
<ide> import org.junit.Test;
<add>
<ide> import org.springframework.core.convert.TypeDescriptor;
<del>import org.springframework.expression.*;
<add>import org.springframework.expression.AccessException;
<add>import org.springframework.expression.BeanResolver;
<add>import org.springframework.expression.EvaluationContext;
<add>import org.springframework.expression.EvaluationException;
<add>import org.springframework.expression.Expression;
<add>import org.springframework.expression.ExpressionParser;
<add>import org.springframework.expression.MethodExecutor;
<add>import org.springframework.expression.MethodResolver;
<add>import org.springframework.expression.ParserContext;
<add>import org.springframework.expression.PropertyAccessor;
<add>import org.springframework.expression.TypedValue;
<ide> import org.springframework.expression.spel.standard.SpelExpression;
<ide> import org.springframework.expression.spel.standard.SpelExpressionParser;
<ide> import org.springframework.expression.spel.support.ReflectiveMethodResolver;
<ide> import org.springframework.expression.spel.support.ReflectivePropertyAccessor;
<ide> import org.springframework.expression.spel.support.StandardEvaluationContext;
<ide> import org.springframework.expression.spel.support.StandardTypeLocator;
<add>import org.springframework.expression.spel.testresources.le.div.mod.reserved.Reserver;
<ide>
<del>import java.lang.reflect.Field;
<del>import java.lang.reflect.Method;
<del>import java.util.*;
<del>
<del>import static org.junit.Assert.assertEquals;
<del>import static org.junit.Assert.fail;
<add>import static org.junit.Assert.*;
<ide>
<ide> /**
<del> * Tests based on Jiras up to the release of Spring 3.0.0
<add> * Reproduction tests cornering various SpEL JIRA issues.
<ide> *
<ide> * @author Andy Clement
<ide> * @author Clark Duplichien
<ide> */
<del>public class SpringEL300Tests extends ExpressionTestCase {
<add>public class SpelReproTests extends ExpressionTestCase {
<ide>
<ide> @Test
<ide> public void testNPE_SPR5661() {
<ide> public void testSPR5905_InnerTypeReferences() throws Exception {
<ide> Expression expr = new SpelExpressionParser().parseRaw("T(java.util.Map$Entry)");
<ide> Assert.assertEquals(Map.Entry.class,expr.getValue(eContext));
<ide>
<del> expr = new SpelExpressionParser().parseRaw("T(org.springframework.expression.spel.SpringEL300Tests$Outer$Inner).run()");
<add> expr = new SpelExpressionParser().parseRaw("T(org.springframework.expression.spel.SpelReproTests$Outer$Inner).run()");
<ide> Assert.assertEquals(12,expr.getValue(eContext));
<ide>
<del> expr = new SpelExpressionParser().parseRaw("new org.springframework.expression.spel.SpringEL300Tests$Outer$Inner().run2()");
<add> expr = new SpelExpressionParser().parseRaw("new org.springframework.expression.spel.SpelReproTests$Outer$Inner().run2()");
<ide> Assert.assertEquals(13,expr.getValue(eContext));
<del>}
<add> }
<ide>
<ide> static class Outer {
<ide> static class Inner {
<ide> public Reserver getReserver() {
<ide> Assert.assertEquals("abc",exp.getValue(ctx));
<ide> }
<ide>
<add> @Test
<add> public void testReservedWordProperties_9862() throws Exception {
<add> StandardEvaluationContext ctx = new StandardEvaluationContext();
<add> SpelExpressionParser parser = new SpelExpressionParser();
<add> SpelExpression expression = parser.parseRaw("T(org.springframework.expression.spel.testresources.le.div.mod.reserved.Reserver).CONST");
<add> Object value = expression.getValue(ctx);
<add> assertEquals(value, Reserver.CONST);
<add> }
<add>
<ide> /**
<ide> * We add property accessors in the order:
<ide> * First, Second, Third, Fourth.
<ide><path>spring-expression/src/test/java/org/springframework/expression/spel/testresources/le/div/mod/reserved/Reserver.java
<add>/*
<add> * Copyright 2002-2012 the original author or authors.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License");
<add> * you may not use this file except in compliance with the License.
<add> * You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software
<add> * distributed under the License is distributed on an "AS IS" BASIS,
<add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add> * See the License for the specific language governing permissions and
<add> * limitations under the License.
<add> */
<add>
<add>package org.springframework.expression.spel.testresources.le.div.mod.reserved;
<add>
<add>/**
<add> * For use when testing that the SpEL expression parser can accommodate SpEL's own
<add> * reserved words being used in package names.
<add> *
<add> * @author Phillip Webb
<add> */
<add>public class Reserver {
<add>
<add> public static final String CONST = "Const";
<add>
<add>} | 4 |
Mixed | Text | add docs and changelog entry for 73aab03 [ci skip] | 6bd2573869eda8b1e4eaa9df2966f814fd9c5d5c | <ide><path>activerecord/CHANGELOG.md
<ide>
<ide> *Ryuta Kamizono*
<ide>
<add>* Allow proc defaults to be passed to the attributes API. See documentation
<add> for examples.
<add>
<add> *Sean Griffin, Kir Shatrov*
<add>
<ide> * SQLite: `:collation` support for string and text columns.
<ide>
<ide> Example:
<ide><path>activerecord/lib/active_record/attributes.rb
<ide> module ClassMethods
<ide> #
<ide> # StoreListing.new.my_string # => "new default"
<ide> #
<add> # class Product < ActiveRecord::Base
<add> # attribute :my_default_proc, :datetime, default: -> { Time.now }
<add> # end
<add> #
<add> # Product.new.my_default_proc # => 2015-05-30 11:04:48 -0600
<add> # sleep 1
<add> # Product.new.my_default_proc # => 2015-05-30 11:04:49 -0600
<add> #
<ide> # Attributes do not need to be backed by a database column.
<ide> #
<ide> # class MyModel < ActiveRecord::Base
<ide> def attribute(name, cast_type, **options)
<ide> #
<ide> # +default+ The default value to use when no value is provided. If this option
<ide> # is not passed, the previous default value (if any) will be used.
<del> # Otherwise, the default will be +nil+.
<add> # Otherwise, the default will be +nil+. A proc can also be passed, and
<add> # will be called once each time a new value is needed.
<ide> #
<ide> # +user_provided_default+ Whether the default value should be cast using
<ide> # +cast+ or +deserialize+. | 2 |
Text | Text | add v16 changelog link to iojs changelog | 7904331c1f18a381ea5130fc269901178652a609 | <ide><path>doc/changelogs/CHANGELOG_IOJS.md
<ide>
<ide> * Other Versions
<ide> * [17.x](CHANGELOG_V17.md)
<add> * [16.x](CHANGELOG_V16.md)
<ide> * [15.x](CHANGELOG_V15.md)
<ide> * [14.x](CHANGELOG_V14.md)
<ide> * [13.x](CHANGELOG_V13.md) | 1 |
Javascript | Javascript | fix typos and simplify emberarray definition | 4ac76231331f444c78ecba99461f9ded016d3a6c | <ide><path>packages/@ember/-internals/runtime/lib/mixins/array.js
<ide> function mapBy(key) {
<ide> concrete implementation, but it can be used up by other classes that want
<ide> to appear like arrays.
<ide>
<del> For example, ArrayProxy is a concrete classes that can
<del> be instantiated to implement array-like behavior. Both of these classes use
<del> the Array Mixin by way of the MutableArray mixin, which allows observable
<del> changes to be made to the underlying array.
<add> For example, ArrayProxy is a concrete class that can be instantiated to
<add> implement array-like behavior. This class uses the Array Mixin by way of
<add> the MutableArray mixin, which allows observable changes to be made to the
<add> underlying array.
<ide>
<ide> This mixin defines methods specifically for collections that provide
<ide> index-ordered access to their contents. When you are designing code that
<ide> function mapBy(key) {
<ide> as controllers and collections.
<ide>
<ide> You can use the methods defined in this module to access and modify array
<del> contents in a KVO-friendly way. You can also be notified whenever the
<del> membership of an array changes by using `.observes('myArray.[]')`.
<add> contents in an observable-friendly way. You can also be notified whenever
<add> the membership of an array changes by using `.observes('myArray.[]')`.
<ide>
<ide> To support `EmberArray` in your own class, you must override two
<ide> primitives to use it: `length()` and `objectAt()`. | 1 |
Python | Python | fix tpu convergence bug introduced by pr | 24107c2c83e79d195826f18f66892feab6b000e9 | <ide><path>src/transformers/trainer.py
<ide> def train(self, model_path: Optional[str] = None):
<ide>
<ide> if is_torch_tpu_available():
<ide> xm.optimizer_step(self.optimizer)
<del> if self.args.fp16 and _use_native_amp:
<add> elif self.args.fp16 and _use_native_amp:
<ide> self.scaler.step(self.optimizer)
<ide> self.scaler.update()
<ide> else: | 1 |
Go | Go | fix error removing diff path | 276b44608b04f08bdf46ce7c816b1f744bf24b7d | <ide><path>daemon/graphdriver/aufs/aufs.go
<ide> func Init(root string, options []string, uidMaps, gidMaps []idtools.IDMap) (grap
<ide> }
<ide> }
<ide>
<add> for _, path := range []string{"mnt", "diff"} {
<add> p := filepath.Join(root, path)
<add> dirs, err := ioutil.ReadDir(p)
<add> if err != nil {
<add> logrus.WithError(err).WithField("dir", p).Error("error reading dir entries")
<add> continue
<add> }
<add> for _, dir := range dirs {
<add> if strings.HasSuffix(dir.Name(), "-removing") {
<add> logrus.WithField("dir", dir.Name()).Debug("Cleaning up stale layer dir")
<add> if err := system.EnsureRemoveAll(filepath.Join(p, dir.Name())); err != nil {
<add> logrus.WithField("dir", dir.Name()).WithError(err).Error("Error removing stale layer dir")
<add> }
<add> }
<add> }
<add> }
<add>
<ide> a.naiveDiff = graphdriver.NewNaiveDiffDriver(a, uidMaps, gidMaps)
<ide> return a, nil
<ide> }
<ide> func (a *Driver) Remove(id string) error {
<ide> retries++
<ide> logger.Warnf("unmount failed due to EBUSY: retry count: %d", retries)
<ide> time.Sleep(100 * time.Millisecond)
<del> continue
<ide> }
<ide>
<del> // Atomically remove each directory in turn by first moving it out of the
<del> // way (so that docker doesn't find it anymore) before doing removal of
<del> // the whole tree.
<del> tmpMntPath := path.Join(a.mntPath(), fmt.Sprintf("%s-removing", id))
<del> if err := os.Rename(mountpoint, tmpMntPath); err != nil && !os.IsNotExist(err) {
<del> if err == unix.EBUSY {
<del> logger.WithField("dir", mountpoint).WithError(err).Warn("os.Rename err due to EBUSY")
<del> }
<del> return errors.Wrapf(err, "error preparing atomic delete of aufs mountpoint for id: %s", id)
<del> }
<del> if err := system.EnsureRemoveAll(tmpMntPath); err != nil {
<del> return errors.Wrapf(err, "error removing aufs layer %s", id)
<add> // Remove the layers file for the id
<add> if err := os.Remove(path.Join(a.rootPath(), "layers", id)); err != nil && !os.IsNotExist(err) {
<add> return errors.Wrapf(err, "error removing layers dir for %s", id)
<ide> }
<ide>
<del> tmpDiffpath := path.Join(a.diffPath(), fmt.Sprintf("%s-removing", id))
<del> if err := os.Rename(a.getDiffPath(id), tmpDiffpath); err != nil && !os.IsNotExist(err) {
<del> return errors.Wrapf(err, "error preparing atomic delete of aufs diff dir for id: %s", id)
<add> if err := atomicRemove(a.getDiffPath(id)); err != nil {
<add> return errors.Wrapf(err, "could not remove diff path for id %s", id)
<ide> }
<ide>
<del> // Remove the layers file for the id
<del> if err := os.Remove(path.Join(a.rootPath(), "layers", id)); err != nil && !os.IsNotExist(err) {
<del> return errors.Wrapf(err, "error removing layers dir for %s", id)
<add> // Atomically remove each directory in turn by first moving it out of the
<add> // way (so that docker doesn't find it anymore) before doing removal of
<add> // the whole tree.
<add> if err := atomicRemove(mountpoint); err != nil {
<add> if errors.Cause(err) == unix.EBUSY {
<add> logger.WithField("dir", mountpoint).WithError(err).Warn("error performing atomic remove due to EBUSY")
<add> }
<add> return errors.Wrapf(err, "could not remove mountpoint for id %s", id)
<ide> }
<ide>
<ide> a.pathCacheLock.Lock()
<ide> func (a *Driver) Remove(id string) error {
<ide> return nil
<ide> }
<ide>
<add>func atomicRemove(source string) error {
<add> target := source + "-removing"
<add>
<add> err := os.Rename(source, target)
<add> switch {
<add> case err == nil, os.IsNotExist(err):
<add> case os.IsExist(err):
<add> // Got error saying the target dir already exists, maybe the source doesn't exist due to a previous (failed) remove
<add> if _, e := os.Stat(source); !os.IsNotExist(e) {
<add> return errors.Wrapf(err, "target rename dir '%s' exists but should not, this needs to be manually cleaned up")
<add> }
<add> default:
<add> return errors.Wrapf(err, "error preparing atomic delete")
<add> }
<add>
<add> return system.EnsureRemoveAll(target)
<add>}
<add>
<ide> // Get returns the rootfs path for the id.
<ide> // This will mount the dir at its given path
<ide> func (a *Driver) Get(id, mountLabel string) (string, error) {
<ide><path>daemon/graphdriver/aufs/aufs_test.go
<ide> import (
<ide> "sync"
<ide> "testing"
<ide>
<add> "path/filepath"
<add>
<ide> "github.com/docker/docker/daemon/graphdriver"
<ide> "github.com/docker/docker/pkg/archive"
<ide> "github.com/docker/docker/pkg/reexec"
<ide> func TestRemoveImage(t *testing.T) {
<ide>
<ide> for _, p := range paths {
<ide> if _, err := os.Stat(path.Join(tmp, p, "1")); err == nil {
<del> t.Fatalf("Error should not be nil because dirs with id 1 should be delted: %s", p)
<add> t.Fatalf("Error should not be nil because dirs with id 1 should be deleted: %s", p)
<add> }
<add> if _, err := os.Stat(path.Join(tmp, p, "1-removing")); err == nil {
<add> t.Fatalf("Error should not be nil because dirs with id 1-removing should be deleted: %s", p)
<ide> }
<ide> }
<ide> }
<ide> func BenchmarkConcurrentAccess(b *testing.B) {
<ide> }
<ide> }
<ide> }
<add>
<add>func TestInitStaleCleanup(t *testing.T) {
<add> if err := os.MkdirAll(tmp, 0755); err != nil {
<add> t.Fatal(err)
<add> }
<add> defer os.RemoveAll(tmp)
<add>
<add> for _, d := range []string{"diff", "mnt"} {
<add> if err := os.MkdirAll(filepath.Join(tmp, d, "123-removing"), 0755); err != nil {
<add> t.Fatal(err)
<add> }
<add> }
<add>
<add> testInit(tmp, t)
<add> for _, d := range []string{"diff", "mnt"} {
<add> if _, err := os.Stat(filepath.Join(tmp, d, "123-removing")); err == nil {
<add> t.Fatal("cleanup failed")
<add> }
<add> }
<add>} | 2 |
Javascript | Javascript | rebuild main chunk in fewer cases | 66b095566d56ff870f50bba29760c8700074a946 | <ide><path>lib/MainTemplate.js
<ide> function MainTemplate(outputOptions) {
<ide> var chunkFilename = this.outputOptions.chunkFilename || "";
<ide> if(Template.REGEXP_HASH.test(publicPath) || Template.REGEXP_CHUNKHASH.test(publicPath) || Template.REGEXP_NAME.test(publicPath))
<ide> return true;
<del> if(Template.REGEXP_HASH.test(filename) || Template.REGEXP_CHUNKHASH.test(filename) || Template.REGEXP_NAME.test(filename))
<add> if(Template.REGEXP_HASH.test(filename))
<ide> return true;
<ide> if(Template.REGEXP_HASH.test(chunkFilename) || Template.REGEXP_CHUNKHASH.test(chunkFilename) || Template.REGEXP_NAME.test(chunkFilename))
<ide> return true; | 1 |
Text | Text | add example code for fs.existssync() | d4549e75d47f9824d080b9eac9b1a9e696634449 | <ide><path>doc/api/fs.md
<ide> this API: [`fs.exists()`][].
<ide> parameter to `fs.exists()` accepts parameters that are inconsistent with other
<ide> Node.js callbacks. `fs.existsSync()` does not use a callback.
<ide>
<add>```js
<add>if (fs.existsSync('/etc/passwd')) {
<add> console.log('The file exists.');
<add>}
<add>```
<ide>
<ide> ## fs.fchmod(fd, mode, callback)
<ide> <!-- YAML | 1 |
Mixed | Ruby | safebuffer strings as yaml | debe7aedda3665702d1f99a3ffb4a123a6c44e9c | <ide><path>activesupport/CHANGELOG.md
<add>* Fixed a roundtrip problem with AS::SafeBuffer where primitive-like strings
<add> will be dumped as primitives:
<add>
<add> Before:
<add>
<add> YAML.load ActiveSupport::SafeBuffer.new("Hello").to_yaml # => "Hello"
<add> YAML.load ActiveSupport::SafeBuffer.new("true").to_yaml # => true
<add> YAML.load ActiveSupport::SafeBuffer.new("false").to_yaml # => false
<add> YAML.load ActiveSupport::SafeBuffer.new("1").to_yaml # => 1
<add> YAML.load ActiveSupport::SafeBuffer.new("1.1").to_yaml # => 1.1
<add>
<add> After:
<add>
<add> YAML.load ActiveSupport::SafeBuffer.new("Hello").to_yaml # => "Hello"
<add> YAML.load ActiveSupport::SafeBuffer.new("true").to_yaml # => "true"
<add> YAML.load ActiveSupport::SafeBuffer.new("false").to_yaml # => "false"
<add> YAML.load ActiveSupport::SafeBuffer.new("1").to_yaml # => "1"
<add> YAML.load ActiveSupport::SafeBuffer.new("1.1").to_yaml # => "1.1"
<add>
<add> *Godfrey Chan*
<add>
<ide> * Enable number_to_percentage to keep the number's precision by allowing :precision to be nil
<ide>
<ide> *Jack Xu*
<ide><path>activesupport/lib/active_support/core_ext/string/output_safety.rb
<ide> def to_param
<ide> end
<ide>
<ide> def encode_with(coder)
<del> coder.represent_scalar nil, to_str
<add> coder.represent_object nil, to_str
<ide> end
<ide>
<ide> UNSAFE_STRING_METHODS.each do |unsafe_method|
<ide><path>activesupport/test/safe_buffer_test.rb
<ide> def test_titleize
<ide> assert_equal({'str' => str}, YAML.load(yaml))
<ide> end
<ide>
<add> test "Should work with primitive-like-strings in to_yaml conversion" do
<add> assert_equal 'true', YAML.load(ActiveSupport::SafeBuffer.new('true').to_yaml)
<add> assert_equal 'false', YAML.load(ActiveSupport::SafeBuffer.new('false').to_yaml)
<add> assert_equal '1', YAML.load(ActiveSupport::SafeBuffer.new('1').to_yaml)
<add> assert_equal '1.1', YAML.load(ActiveSupport::SafeBuffer.new('1.1').to_yaml)
<add> end
<add>
<ide> test "Should work with underscore" do
<ide> str = "MyTest".html_safe.underscore
<ide> assert_equal "my_test", str | 3 |
Python | Python | fix output of best model | c83fccfe2ae6c37f8953c96b70e30129ff39dcc2 | <ide><path>spacy/cli/train.py
<ide> def _collate_best_model(meta, output_path, components):
<ide> shutil.copytree(output_path / 'model-final', best_dest)
<ide> for component, best_component_src in bests.items():
<ide> shutil.rmtree(best_dest / component)
<del> shutil.copytree(best_component_src, best_dest / component)
<add> shutil.copytree(best_component_src / component, best_dest / component)
<ide> with (best_component_src / 'accuracy.json').open() as file_:
<ide> accs = json.load(file_)
<ide> for metric in _get_metrics(component): | 1 |
Python | Python | change doctest line | 7a8696cd6d5611a7e2f929ca13f54e684126b6f2 | <ide><path>blockchain/chinese_remainder_theorem.py
<ide> def chinese_remainder_theorem2(n1, r1, n2, r2):
<ide> return (n % m + m) % m
<ide>
<ide>
<del># import testmod for testing our function
<del>from doctest import testmod
<del>
<ide> if __name__ == "__main__":
<add> from doctest import testmod
<add>
<ide> testmod(name="chinese_remainder_theorem", verbose=True)
<ide> testmod(name="chinese_remainder_theorem2", verbose=True)
<ide> testmod(name="invert_modulo", verbose=True) | 1 |
Ruby | Ruby | reword documentation for update_all | 91c0c277698ab6ca4132a580f4212aa913492a8c | <ide><path>activerecord/lib/active_record/relation.rb
<ide> def scoping
<ide> klass.current_scope = previous
<ide> end
<ide>
<del> # Updates all records in the current scope (respecting the <tt>default_scope</tt>, <tt>where</tt>,
<del> # <tt>limit</tt> and <tt>order</tt> specified) with details given. This method constructs a single SQL update_all
<add> # Updates all records in the current relation with details given. This method constructs a single SQL UPDATE
<ide> # statement and sends it straight to the database. It does not instantiate the involved models and it does not
<ide> # trigger Active Record callbacks or validations. Values passed to `update_all` will not go through
<ide> # ActiveRecord's type-casting behavior. It should receive only values that can be passed as-is to the SQL
<ide> def scoping
<ide> # # Update all customers with the given attributes
<ide> # Customer.update_all wants_email: true
<ide> #
<del> # # Update all active accounts with the given attributes
<del> # class Account < ActiveRecord::Base
<del> # default_scope -> { where active: true }
<del> # end
<del> # Account.update_all(failed_logins: 0)
<del> #
<ide> # # Update all books with 'Rails' in their title
<ide> # Book.where('title LIKE ?', '%Rails%').update_all(author: 'David')
<ide> # | 1 |
Mixed | Javascript | promote _getcursorpos to public api | a68729cf3d02798ec88fffa20575074b6332ade8 | <ide><path>doc/api/readline.md
<ide> reading input from a TTY stream. The position of cursor determines the
<ide> portion of the input string that will be modified as input is processed,
<ide> as well as the column where the terminal caret will be rendered.
<ide>
<add>### rl.getCursorPos()
<add><!-- YAML
<add>added: REPLACEME
<add>-->
<add>
<add>* Returns: {Object}
<add> * `rows` {number} the row of the prompt the cursor currently lands on
<add> * `cols` {number} the screen column the cursor currently lands on
<add>
<add>Returns the real position of the cursor in relation to the input
<add>prompt + string. Long input (wrapping) strings, as well as multiple
<add>line prompts are included in the calculations.
<add>
<ide> ## readline.clearLine(stream, dir\[, callback\])
<ide> <!-- YAML
<ide> added: v0.7.7
<ide><path>lib/readline.js
<ide> Interface.prototype._refreshLine = function() {
<ide> const lineRows = dispPos.rows;
<ide>
<ide> // cursor position
<del> const cursorPos = this._getCursorPos();
<add> const cursorPos = this.getCursorPos();
<ide>
<ide> // First move to the bottom of the current line, based on cursor pos
<ide> const prevRows = this.prevRows || 0;
<ide> Interface.prototype._insertString = function(c) {
<ide> this.line += c;
<ide> this.cursor += c.length;
<ide>
<del> if (this._getCursorPos().cols === 0) {
<add> if (this.getCursorPos().cols === 0) {
<ide> this._refreshLine();
<ide> } else {
<ide> this._writeToOutput(c);
<ide> Interface.prototype._getDisplayPos = function(str) {
<ide>
<ide>
<ide> // Returns current cursor's position and line
<del>Interface.prototype._getCursorPos = function() {
<add>Interface.prototype.getCursorPos = function() {
<ide> const columns = this.columns;
<ide> const strBeforeCursor = this._prompt + this.line.substring(0, this.cursor);
<ide> const dispPos = this._getDisplayPos(
<ide> Interface.prototype._getCursorPos = function() {
<ide> }
<ide> return { cols: cols, rows: rows };
<ide> };
<add>Interface.prototype._getCursorPos = Interface.prototype.getCursorPos;
<ide>
<ide>
<ide> // This function moves cursor dx places to the right
<ide> // (-dx for left) and refreshes the line if it is needed
<ide> Interface.prototype._moveCursor = function(dx) {
<ide> const oldcursor = this.cursor;
<del> const oldPos = this._getCursorPos();
<add> const oldPos = this.getCursorPos();
<ide> this.cursor += dx;
<ide>
<ide> // bounds check
<ide> if (this.cursor < 0) this.cursor = 0;
<ide> else if (this.cursor > this.line.length) this.cursor = this.line.length;
<ide>
<del> const newPos = this._getCursorPos();
<add> const newPos = this.getCursorPos();
<ide>
<ide> // Check if cursors are in the same line
<ide> if (oldPos.rows === newPos.rows) {
<ide><path>test/parallel/test-readline-interface.js
<ide> function isWarned(emitter) {
<ide> rli.question(expectedLines[0], function() {
<ide> rli.close();
<ide> });
<del> const cursorPos = rli._getCursorPos();
<add> const cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, expectedLines[0].length);
<ide> rli.close();
<ide> function isWarned(emitter) {
<ide> rli.question(expectedLines.join('\n'), function() {
<ide> rli.close();
<ide> });
<del> const cursorPos = rli._getCursorPos();
<add> const cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, expectedLines.length - 1);
<ide> assert.strictEqual(cursorPos.cols, expectedLines.slice(-1)[0].length);
<ide> rli.close();
<ide> function isWarned(emitter) {
<ide> });
<ide> fi.emit('data', 'the quick brown fox');
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'a' });
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 0);
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'e' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 19);
<ide> rli.close();
<ide> function isWarned(emitter) {
<ide> terminal: terminal
<ide> });
<ide> fi.emit('data', 'the quick brown fox');
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 19);
<ide>
<ide> // Back one character
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'b' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 18);
<ide> // Back one character
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'b' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 17);
<ide> // Forward one character
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'f' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 18);
<ide> // Forward one character
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'f' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 19);
<ide> rli.close();
<ide> function isWarned(emitter) {
<ide>
<ide> // Move left one character/code point
<ide> fi.emit('keypress', '.', { name: 'left' });
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 0);
<ide>
<ide> // Move right one character/code point
<ide> fi.emit('keypress', '.', { name: 'right' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> if (common.hasIntl) {
<ide> assert.strictEqual(cursorPos.cols, 2);
<ide> function isWarned(emitter) {
<ide>
<ide> // Move left one character/code point
<ide> fi.emit('keypress', '.', { name: 'left' });
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 0);
<ide>
<ide> fi.emit('data', '🐕');
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide>
<ide> if (common.hasIntl) {
<ide> function isWarned(emitter) {
<ide>
<ide> // Move left one character/code point
<ide> fi.emit('keypress', '.', { name: 'right' });
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> if (common.hasIntl) {
<ide> assert.strictEqual(cursorPos.cols, 2);
<ide> function isWarned(emitter) {
<ide> }
<ide>
<ide> fi.emit('data', '🐕');
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> if (common.hasIntl) {
<ide> assert.strictEqual(cursorPos.cols, 4);
<ide> function isWarned(emitter) {
<ide> });
<ide> fi.emit('data', 'the quick brown fox');
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'left' });
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 16);
<ide> fi.emit('keypress', '.', { meta: true, name: 'b' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 10);
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'right' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 16);
<ide> fi.emit('keypress', '.', { meta: true, name: 'f' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 19);
<ide> rli.close();
<ide> function isWarned(emitter) {
<ide> terminal: terminal
<ide> });
<ide> fi.emit('data', 'the quick brown fox');
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 19);
<ide>
<ide> // Delete left character
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'h' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 18);
<ide> rli.on('line', common.mustCall((line) => {
<ide> function isWarned(emitter) {
<ide> terminal: terminal
<ide> });
<ide> fi.emit('data', '💻');
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> if (common.hasIntl) {
<ide> assert.strictEqual(cursorPos.cols, 2);
<ide> function isWarned(emitter) {
<ide> }
<ide> // Delete left character
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'h' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 0);
<ide> rli.on('line', common.mustCall((line) => {
<ide> function isWarned(emitter) {
<ide>
<ide> // Go to the start of the line
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'a' });
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 0);
<ide>
<ide> // Delete right character
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'd' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 0);
<ide> rli.on('line', common.mustCall((line) => {
<ide> function isWarned(emitter) {
<ide>
<ide> // Go to the start of the line
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'a' });
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 0);
<ide>
<ide> // Delete right character
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'd' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 0);
<ide> rli.on('line', common.mustCall((line) => {
<ide> function isWarned(emitter) {
<ide> terminal: terminal
<ide> });
<ide> fi.emit('data', 'the quick brown fox');
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 19);
<ide>
<ide> // Delete from current to start of line
<ide> fi.emit('keypress', '.', { ctrl: true, shift: true, name: 'backspace' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 0);
<ide> rli.on('line', common.mustCall((line) => {
<ide> function isWarned(emitter) {
<ide>
<ide> // Go to the start of the line
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'a' });
<del> let cursorPos = rli._getCursorPos();
<add> let cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 0);
<ide>
<ide> // Delete from current to end of line
<ide> fi.emit('keypress', '.', { ctrl: true, shift: true, name: 'delete' });
<del> cursorPos = rli._getCursorPos();
<add> cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 0);
<ide> rli.on('line', common.mustCall((line) => {
<ide> function isWarned(emitter) {
<ide> });
<ide> fi.columns = 10;
<ide> fi.emit('data', 'multi-line text');
<del> const cursorPos = rli._getCursorPos();
<add> const cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 1);
<ide> assert.strictEqual(cursorPos.cols, 5);
<ide> rli.close();
<ide> function isWarned(emitter) {
<ide> });
<ide> fi.columns = 10;
<ide> fi.emit('data', 't');
<del> const cursorPos = rli._getCursorPos();
<add> const cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 4);
<ide> assert.strictEqual(cursorPos.cols, 3);
<ide> rli.close();
<ide> function isWarned(emitter) {
<ide> const lines = ['line 1', 'line 2', 'line 3'];
<ide> fi.emit('data', lines.join('\n'));
<ide> fi.emit('keypress', '.', { ctrl: true, name: 'l' });
<del> const cursorPos = rli._getCursorPos();
<add> const cursorPos = rli.getCursorPos();
<ide> assert.strictEqual(cursorPos.rows, 0);
<ide> assert.strictEqual(cursorPos.cols, 6);
<ide> rli.on('line', common.mustCall((line) => {
<ide><path>test/parallel/test-readline-position.js
<ide> const ctrlU = { ctrl: true, name: 'u' };
<ide>
<ide> for (const [cursor, string] of tests) {
<ide> rl.write(string);
<del> assert.strictEqual(rl._getCursorPos().cols, cursor);
<add> assert.strictEqual(rl.getCursorPos().cols, cursor);
<ide> rl.write(null, ctrlU);
<ide> }
<ide> } | 4 |
Ruby | Ruby | move mysql2 adapter into core | 188855501b454d6732fb1fd1d76cc26d6119f2eb | <ide><path>activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb
<add># encoding: utf-8
<add>
<add>require 'mysql2' unless defined? Mysql2
<add>
<add>module ActiveRecord
<add> class Base
<add> def self.mysql2_connection(config)
<add> config[:username] = 'root' if config[:username].nil?
<add> client = Mysql2::Client.new(config.symbolize_keys)
<add> options = [config[:host], config[:username], config[:password], config[:database], config[:port], config[:socket], 0]
<add> ConnectionAdapters::Mysql2Adapter.new(client, logger, options, config)
<add> end
<add> end
<add>
<add> module ConnectionAdapters
<add> class Mysql2Column < Column
<add> BOOL = "tinyint(1)"
<add> def extract_default(default)
<add> if sql_type =~ /blob/i || type == :text
<add> if default.blank?
<add> return null ? nil : ''
<add> else
<add> raise ArgumentError, "#{type} columns cannot have a default value: #{default.inspect}"
<add> end
<add> elsif missing_default_forged_as_empty_string?(default)
<add> nil
<add> else
<add> super
<add> end
<add> end
<add>
<add> def has_default?
<add> return false if sql_type =~ /blob/i || type == :text #mysql forbids defaults on blob and text columns
<add> super
<add> end
<add>
<add> # Returns the Ruby class that corresponds to the abstract data type.
<add> def klass
<add> case type
<add> when :integer then Fixnum
<add> when :float then Float
<add> when :decimal then BigDecimal
<add> when :datetime then Time
<add> when :date then Date
<add> when :timestamp then Time
<add> when :time then Time
<add> when :text, :string then String
<add> when :binary then String
<add> when :boolean then Object
<add> end
<add> end
<add>
<add> def type_cast(value)
<add> return nil if value.nil?
<add> case type
<add> when :string then value
<add> when :text then value
<add> when :integer then value.to_i rescue value ? 1 : 0
<add> when :float then value.to_f # returns self if it's already a Float
<add> when :decimal then self.class.value_to_decimal(value)
<add> when :datetime, :timestamp then value.class == Time ? value : self.class.string_to_time(value)
<add> when :time then value.class == Time ? value : self.class.string_to_dummy_time(value)
<add> when :date then value.class == Date ? value : self.class.string_to_date(value)
<add> when :binary then value
<add> when :boolean then self.class.value_to_boolean(value)
<add> else value
<add> end
<add> end
<add>
<add> def type_cast_code(var_name)
<add> case type
<add> when :string then nil
<add> when :text then nil
<add> when :integer then "#{var_name}.to_i rescue #{var_name} ? 1 : 0"
<add> when :float then "#{var_name}.to_f"
<add> when :decimal then "#{self.class.name}.value_to_decimal(#{var_name})"
<add> when :datetime, :timestamp then "#{var_name}.class == Time ? #{var_name} : #{self.class.name}.string_to_time(#{var_name})"
<add> when :time then "#{var_name}.class == Time ? #{var_name} : #{self.class.name}.string_to_dummy_time(#{var_name})"
<add> when :date then "#{var_name}.class == Date ? #{var_name} : #{self.class.name}.string_to_date(#{var_name})"
<add> when :binary then nil
<add> when :boolean then "#{self.class.name}.value_to_boolean(#{var_name})"
<add> else nil
<add> end
<add> end
<add>
<add> private
<add> def simplified_type(field_type)
<add> return :boolean if Mysql2Adapter.emulate_booleans && field_type.downcase.index(BOOL)
<add> return :string if field_type =~ /enum/i or field_type =~ /set/i
<add> return :integer if field_type =~ /year/i
<add> return :binary if field_type =~ /bit/i
<add> super
<add> end
<add>
<add> def extract_limit(sql_type)
<add> case sql_type
<add> when /blob|text/i
<add> case sql_type
<add> when /tiny/i
<add> 255
<add> when /medium/i
<add> 16777215
<add> when /long/i
<add> 2147483647 # mysql only allows 2^31-1, not 2^32-1, somewhat inconsistently with the tiny/medium/normal cases
<add> else
<add> super # we could return 65535 here, but we leave it undecorated by default
<add> end
<add> when /^bigint/i; 8
<add> when /^int/i; 4
<add> when /^mediumint/i; 3
<add> when /^smallint/i; 2
<add> when /^tinyint/i; 1
<add> else
<add> super
<add> end
<add> end
<add>
<add> # MySQL misreports NOT NULL column default when none is given.
<add> # We can't detect this for columns which may have a legitimate ''
<add> # default (string) but we can for others (integer, datetime, boolean,
<add> # and the rest).
<add> #
<add> # Test whether the column has default '', is not null, and is not
<add> # a type allowing default ''.
<add> def missing_default_forged_as_empty_string?(default)
<add> type != :string && !null && default == ''
<add> end
<add> end
<add>
<add> class Mysql2Adapter < AbstractAdapter
<add> cattr_accessor :emulate_booleans
<add> self.emulate_booleans = true
<add>
<add> ADAPTER_NAME = 'Mysql2'
<add> PRIMARY = "PRIMARY"
<add>
<add> LOST_CONNECTION_ERROR_MESSAGES = [
<add> "Server shutdown in progress",
<add> "Broken pipe",
<add> "Lost connection to MySQL server during query",
<add> "MySQL server has gone away" ]
<add>
<add> QUOTED_TRUE, QUOTED_FALSE = '1', '0'
<add>
<add> NATIVE_DATABASE_TYPES = {
<add> :primary_key => "int(11) DEFAULT NULL auto_increment PRIMARY KEY",
<add> :string => { :name => "varchar", :limit => 255 },
<add> :text => { :name => "text" },
<add> :integer => { :name => "int", :limit => 4 },
<add> :float => { :name => "float" },
<add> :decimal => { :name => "decimal" },
<add> :datetime => { :name => "datetime" },
<add> :timestamp => { :name => "datetime" },
<add> :time => { :name => "time" },
<add> :date => { :name => "date" },
<add> :binary => { :name => "blob" },
<add> :boolean => { :name => "tinyint", :limit => 1 }
<add> }
<add>
<add> def initialize(connection, logger, connection_options, config)
<add> super(connection, logger)
<add> @connection_options, @config = connection_options, config
<add> @quoted_column_names, @quoted_table_names = {}, {}
<add> configure_connection
<add> end
<add>
<add> def adapter_name
<add> ADAPTER_NAME
<add> end
<add>
<add> def supports_migrations?
<add> true
<add> end
<add>
<add> def supports_primary_key?
<add> true
<add> end
<add>
<add> def supports_savepoints?
<add> true
<add> end
<add>
<add> def native_database_types
<add> NATIVE_DATABASE_TYPES
<add> end
<add>
<add> # QUOTING ==================================================
<add>
<add> def quote(value, column = nil)
<add> if value.kind_of?(String) && column && column.type == :binary && column.class.respond_to?(:string_to_binary)
<add> s = column.class.string_to_binary(value).unpack("H*")[0]
<add> "x'#{s}'"
<add> elsif value.kind_of?(BigDecimal)
<add> value.to_s("F")
<add> else
<add> super
<add> end
<add> end
<add>
<add> def quote_column_name(name) #:nodoc:
<add> @quoted_column_names[name] ||= "`#{name}`"
<add> end
<add>
<add> def quote_table_name(name) #:nodoc:
<add> @quoted_table_names[name] ||= quote_column_name(name).gsub('.', '`.`')
<add> end
<add>
<add> def quote_string(string)
<add> @connection.escape(string)
<add> end
<add>
<add> def quoted_true
<add> QUOTED_TRUE
<add> end
<add>
<add> def quoted_false
<add> QUOTED_FALSE
<add> end
<add>
<add> # REFERENTIAL INTEGRITY ====================================
<add>
<add> def disable_referential_integrity(&block) #:nodoc:
<add> old = select_value("SELECT @@FOREIGN_KEY_CHECKS")
<add>
<add> begin
<add> update("SET FOREIGN_KEY_CHECKS = 0")
<add> yield
<add> ensure
<add> update("SET FOREIGN_KEY_CHECKS = #{old}")
<add> end
<add> end
<add>
<add> # CONNECTION MANAGEMENT ====================================
<add>
<add> def active?
<add> return false unless @connection
<add> @connection.query 'select 1'
<add> true
<add> rescue Mysql2::Error
<add> false
<add> end
<add>
<add> def reconnect!
<add> disconnect!
<add> connect
<add> end
<add>
<add> # this is set to true in 2.3, but we don't want it to be
<add> def requires_reloading?
<add> false
<add> end
<add>
<add> def disconnect!
<add> unless @connection.nil?
<add> @connection.close
<add> @connection = nil
<add> end
<add> end
<add>
<add> def reset!
<add> disconnect!
<add> connect
<add> end
<add>
<add> # DATABASE STATEMENTS ======================================
<add>
<add> # FIXME: re-enable the following once a "better" query_cache solution is in core
<add> #
<add> # The overrides below perform much better than the originals in AbstractAdapter
<add> # because we're able to take advantage of mysql2's lazy-loading capabilities
<add> #
<add> # # Returns a record hash with the column names as keys and column values
<add> # # as values.
<add> # def select_one(sql, name = nil)
<add> # result = execute(sql, name)
<add> # result.each(:as => :hash) do |r|
<add> # return r
<add> # end
<add> # end
<add> #
<add> # # Returns a single value from a record
<add> # def select_value(sql, name = nil)
<add> # result = execute(sql, name)
<add> # if first = result.first
<add> # first.first
<add> # end
<add> # end
<add> #
<add> # # Returns an array of the values of the first column in a select:
<add> # # select_values("SELECT id FROM companies LIMIT 3") => [1,2,3]
<add> # def select_values(sql, name = nil)
<add> # execute(sql, name).map { |row| row.first }
<add> # end
<add>
<add> # Returns an array of arrays containing the field values.
<add> # Order is the same as that returned by +columns+.
<add> def select_rows(sql, name = nil)
<add> execute(sql, name).to_a
<add> end
<add>
<add> # Executes the SQL statement in the context of this connection.
<add> def execute(sql, name = nil)
<add> # make sure we carry over any changes to ActiveRecord::Base.default_timezone that have been
<add> # made since we established the connection
<add> @connection.query_options[:database_timezone] = ActiveRecord::Base.default_timezone
<add> if name == :skip_logging
<add> @connection.query(sql)
<add> else
<add> log(sql, name) { @connection.query(sql) }
<add> end
<add> rescue ActiveRecord::StatementInvalid => exception
<add> if exception.message.split(":").first =~ /Packets out of order/
<add> raise ActiveRecord::StatementInvalid, "'Packets out of order' error was received from the database. Please update your mysql bindings (gem install mysql) and read http://dev.mysql.com/doc/mysql/en/password-hashing.html for more information. If you're on Windows, use the Instant Rails installer to get the updated mysql bindings."
<add> else
<add> raise
<add> end
<add> end
<add>
<add> def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
<add> super
<add> id_value || @connection.last_id
<add> end
<add> alias :create :insert_sql
<add>
<add> def update_sql(sql, name = nil)
<add> super
<add> @connection.affected_rows
<add> end
<add>
<add> def begin_db_transaction
<add> execute "BEGIN"
<add> rescue Exception
<add> # Transactions aren't supported
<add> end
<add>
<add> def commit_db_transaction
<add> execute "COMMIT"
<add> rescue Exception
<add> # Transactions aren't supported
<add> end
<add>
<add> def rollback_db_transaction
<add> execute "ROLLBACK"
<add> rescue Exception
<add> # Transactions aren't supported
<add> end
<add>
<add> def create_savepoint
<add> execute("SAVEPOINT #{current_savepoint_name}")
<add> end
<add>
<add> def rollback_to_savepoint
<add> execute("ROLLBACK TO SAVEPOINT #{current_savepoint_name}")
<add> end
<add>
<add> def release_savepoint
<add> execute("RELEASE SAVEPOINT #{current_savepoint_name}")
<add> end
<add>
<add> def add_limit_offset!(sql, options)
<add> limit, offset = options[:limit], options[:offset]
<add> if limit && offset
<add> sql << " LIMIT #{offset.to_i}, #{sanitize_limit(limit)}"
<add> elsif limit
<add> sql << " LIMIT #{sanitize_limit(limit)}"
<add> elsif offset
<add> sql << " OFFSET #{offset.to_i}"
<add> end
<add> sql
<add> end
<add>
<add> # SCHEMA STATEMENTS ========================================
<add>
<add> def structure_dump
<add> if supports_views?
<add> sql = "SHOW FULL TABLES WHERE Table_type = 'BASE TABLE'"
<add> else
<add> sql = "SHOW TABLES"
<add> end
<add>
<add> select_all(sql).inject("") do |structure, table|
<add> table.delete('Table_type')
<add> structure += select_one("SHOW CREATE TABLE #{quote_table_name(table.to_a.first.last)}")["Create Table"] + ";\n\n"
<add> end
<add> end
<add>
<add> def recreate_database(name, options = {})
<add> drop_database(name)
<add> create_database(name, options)
<add> end
<add>
<add> # Create a new MySQL database with optional <tt>:charset</tt> and <tt>:collation</tt>.
<add> # Charset defaults to utf8.
<add> #
<add> # Example:
<add> # create_database 'charset_test', :charset => 'latin1', :collation => 'latin1_bin'
<add> # create_database 'matt_development'
<add> # create_database 'matt_development', :charset => :big5
<add> def create_database(name, options = {})
<add> if options[:collation]
<add> execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}` COLLATE `#{options[:collation]}`"
<add> else
<add> execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}`"
<add> end
<add> end
<add>
<add> def drop_database(name) #:nodoc:
<add> execute "DROP DATABASE IF EXISTS `#{name}`"
<add> end
<add>
<add> def current_database
<add> select_value 'SELECT DATABASE() as db'
<add> end
<add>
<add> # Returns the database character set.
<add> def charset
<add> show_variable 'character_set_database'
<add> end
<add>
<add> # Returns the database collation strategy.
<add> def collation
<add> show_variable 'collation_database'
<add> end
<add>
<add> def tables(name = nil)
<add> tables = []
<add> execute("SHOW TABLES", name).each do |field|
<add> tables << field.first
<add> end
<add> tables
<add> end
<add>
<add> def drop_table(table_name, options = {})
<add> super(table_name, options)
<add> end
<add>
<add> def indexes(table_name, name = nil)
<add> indexes = []
<add> current_index = nil
<add> result = execute("SHOW KEYS FROM #{quote_table_name(table_name)}", name)
<add> result.each(:symbolize_keys => true, :as => :hash) do |row|
<add> if current_index != row[:Key_name]
<add> next if row[:Key_name] == PRIMARY # skip the primary key
<add> current_index = row[:Key_name]
<add> indexes << IndexDefinition.new(row[:Table], row[:Key_name], row[:Non_unique] == 0, [])
<add> end
<add>
<add> indexes.last.columns << row[:Column_name]
<add> end
<add> indexes
<add> end
<add>
<add> def columns(table_name, name = nil)
<add> sql = "SHOW FIELDS FROM #{quote_table_name(table_name)}"
<add> columns = []
<add> result = execute(sql, :skip_logging)
<add> result.each(:symbolize_keys => true, :as => :hash) { |field|
<add> columns << Mysql2Column.new(field[:Field], field[:Default], field[:Type], field[:Null] == "YES")
<add> }
<add> columns
<add> end
<add>
<add> def create_table(table_name, options = {})
<add> super(table_name, options.reverse_merge(:options => "ENGINE=InnoDB"))
<add> end
<add>
<add> def rename_table(table_name, new_name)
<add> execute "RENAME TABLE #{quote_table_name(table_name)} TO #{quote_table_name(new_name)}"
<add> end
<add>
<add> def add_column(table_name, column_name, type, options = {})
<add> add_column_sql = "ALTER TABLE #{quote_table_name(table_name)} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
<add> add_column_options!(add_column_sql, options)
<add> add_column_position!(add_column_sql, options)
<add> execute(add_column_sql)
<add> end
<add>
<add> def change_column_default(table_name, column_name, default)
<add> column = column_for(table_name, column_name)
<add> change_column table_name, column_name, column.sql_type, :default => default
<add> end
<add>
<add> def change_column_null(table_name, column_name, null, default = nil)
<add> column = column_for(table_name, column_name)
<add>
<add> unless null || default.nil?
<add> execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
<add> end
<add>
<add> change_column table_name, column_name, column.sql_type, :null => null
<add> end
<add>
<add> def change_column(table_name, column_name, type, options = {})
<add> column = column_for(table_name, column_name)
<add>
<add> unless options_include_default?(options)
<add> options[:default] = column.default
<add> end
<add>
<add> unless options.has_key?(:null)
<add> options[:null] = column.null
<add> end
<add>
<add> change_column_sql = "ALTER TABLE #{quote_table_name(table_name)} CHANGE #{quote_column_name(column_name)} #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
<add> add_column_options!(change_column_sql, options)
<add> add_column_position!(change_column_sql, options)
<add> execute(change_column_sql)
<add> end
<add>
<add> def rename_column(table_name, column_name, new_column_name)
<add> options = {}
<add> if column = columns(table_name).find { |c| c.name == column_name.to_s }
<add> options[:default] = column.default
<add> options[:null] = column.null
<add> else
<add> raise ActiveRecordError, "No such column: #{table_name}.#{column_name}"
<add> end
<add> current_type = select_one("SHOW COLUMNS FROM #{quote_table_name(table_name)} LIKE '#{column_name}'")["Type"]
<add> rename_column_sql = "ALTER TABLE #{quote_table_name(table_name)} CHANGE #{quote_column_name(column_name)} #{quote_column_name(new_column_name)} #{current_type}"
<add> add_column_options!(rename_column_sql, options)
<add> execute(rename_column_sql)
<add> end
<add>
<add> # Maps logical Rails types to MySQL-specific data types.
<add> def type_to_sql(type, limit = nil, precision = nil, scale = nil)
<add> return super unless type.to_s == 'integer'
<add>
<add> case limit
<add> when 1; 'tinyint'
<add> when 2; 'smallint'
<add> when 3; 'mediumint'
<add> when nil, 4, 11; 'int(11)' # compatibility with MySQL default
<add> when 5..8; 'bigint'
<add> else raise(ActiveRecordError, "No integer type has byte size #{limit}")
<add> end
<add> end
<add>
<add> def add_column_position!(sql, options)
<add> if options[:first]
<add> sql << " FIRST"
<add> elsif options[:after]
<add> sql << " AFTER #{quote_column_name(options[:after])}"
<add> end
<add> end
<add>
<add> def show_variable(name)
<add> variables = select_all("SHOW VARIABLES LIKE '#{name}'")
<add> variables.first['Value'] unless variables.empty?
<add> end
<add>
<add> def pk_and_sequence_for(table)
<add> keys = []
<add> result = execute("describe #{quote_table_name(table)}")
<add> result.each(:symbolize_keys => true, :as => :hash) do |row|
<add> keys << row[:Field] if row[:Key] == "PRI"
<add> end
<add> keys.length == 1 ? [keys.first, nil] : nil
<add> end
<add>
<add> # Returns just a table's primary key
<add> def primary_key(table)
<add> pk_and_sequence = pk_and_sequence_for(table)
<add> pk_and_sequence && pk_and_sequence.first
<add> end
<add>
<add> def case_sensitive_equality_operator
<add> "= BINARY"
<add> end
<add>
<add> def limited_update_conditions(where_sql, quoted_table_name, quoted_primary_key)
<add> where_sql
<add> end
<add>
<add> protected
<add> def quoted_columns_for_index(column_names, options = {})
<add> length = options[:length] if options.is_a?(Hash)
<add>
<add> quoted_column_names = case length
<add> when Hash
<add> column_names.map {|name| length[name] ? "#{quote_column_name(name)}(#{length[name]})" : quote_column_name(name) }
<add> when Fixnum
<add> column_names.map {|name| "#{quote_column_name(name)}(#{length})"}
<add> else
<add> column_names.map {|name| quote_column_name(name) }
<add> end
<add> end
<add>
<add> def translate_exception(exception, message)
<add> return super unless exception.respond_to?(:error_number)
<add>
<add> case exception.error_number
<add> when 1062
<add> RecordNotUnique.new(message, exception)
<add> when 1452
<add> InvalidForeignKey.new(message, exception)
<add> else
<add> super
<add> end
<add> end
<add>
<add> private
<add> def connect
<add> @connection = Mysql2::Client.new(@config)
<add> configure_connection
<add> end
<add>
<add> def configure_connection
<add> @connection.query_options.merge!(:as => :array)
<add> encoding = @config[:encoding]
<add> execute("SET NAMES '#{encoding}'", :skip_logging) if encoding
<add>
<add> # By default, MySQL 'where id is null' selects the last inserted id.
<add> # Turn this off. http://dev.rubyonrails.org/ticket/6778
<add> execute("SET SQL_AUTO_IS_NULL=0", :skip_logging)
<add> end
<add>
<add> # Returns an array of record hashes with the column names as keys and
<add> # column values as values.
<add> def select(sql, name = nil)
<add> execute(sql, name).each(:as => :hash)
<add> end
<add>
<add> def supports_views?
<add> version[0] >= 5
<add> end
<add>
<add> def version
<add> @version ||= @connection.info[:version].scan(/^(\d+)\.(\d+)\.(\d+)/).flatten.map { |v| v.to_i }
<add> end
<add>
<add> def column_for(table_name, column_name)
<add> unless column = columns(table_name).find { |c| c.name == column_name.to_s }
<add> raise "No such column: #{table_name}.#{column_name}"
<add> end
<add> column
<add> end
<add> end
<add> end
<add>end | 1 |
Text | Text | improve wording in getting started guide | b62eed173f924af075540d87e57b79ef90db8316 | <ide><path>guides/source/getting_started.md
<ide> For more installation methods for most Operating Systems take a look at
<ide> [ruby-lang.org](https://www.ruby-lang.org/en/documentation/installation/).
<ide>
<ide> If you are working on Windows, you should also install the
<del>[Ruby Installer Development Kit](http://rubyinstaller.org/downloads/).
<add>[Ruby Installer Development Kit](https://rubyinstaller.org/downloads/).
<ide>
<ide> You will also need an installation of the SQLite3 database.
<ide> Many popular UNIX-like OSes ship with an acceptable version of SQLite3.
<ide> of the files and folders that Rails created by default:
<ide> | File/Folder | Purpose |
<ide> | ----------- | ------- |
<ide> |app/|Contains the controllers, models, views, helpers, mailers, channels, jobs and assets for your application. You'll focus on this folder for the remainder of this guide.|
<del>|bin/|Contains the rails script that starts your app and can contain other scripts you use to setup, update, deploy or run your application.|
<add>|bin/|Contains the rails script that starts your app and can contain other scripts you use to setup, update, deploy, or run your application.|
<ide> |config/|Configure your application's routes, database, and more. This is covered in more detail in [Configuring Rails Applications](configuring.html).|
<ide> |config.ru|Rack configuration for Rack based servers used to start the application. For more information about Rack, see the [Rack website](https://rack.github.io/).|
<ide> |db/|Contains your current database schema, as well as the database migrations.|
<ide> Now that you've seen how to create a controller, an action and a view, let's
<ide> create something with a bit more substance.
<ide>
<ide> In the Blog application, you will now create a new _resource_. A resource is the
<del>term used for a collection of similar objects, such as articles, people or
<add>term used for a collection of similar objects, such as articles, people, or
<ide> animals.
<del>You can create, read, update and destroy items for a resource and these
<add>You can create, read, update, and destroy items for a resource and these
<ide> operations are referred to as _CRUD_ operations.
<ide>
<ide> Rails provides a `resources` method which can be used to declare a standard REST
<ide> write this content in it:
<ide> ```
<ide>
<ide> When you refresh <http://localhost:3000/articles/new> you'll now see that the
<del>page has a title. The route, controller, action and view are now working
<add>page has a title. The route, controller, action, and view are now working
<ide> harmoniously! It's time to create the form for a new article.
<ide>
<ide> ### The first form
<ide> that otherwise `@article` would be `nil` in our view, and calling
<ide> `@article.errors.any?` would throw an error.
<ide>
<ide> TIP: Rails automatically wraps fields that contain an error with a div
<del>with class `field_with_errors`. You can define a css rule to make them
<add>with class `field_with_errors`. You can define a CSS rule to make them
<ide> standout.
<ide>
<ide> Now you'll get a nice error message when saving an article without title when
<ide> it look as follows:
<ide> This time we point the form to the `update` action, which is not defined yet
<ide> but will be very soon.
<ide>
<del>Passing the article object to the method, will automagically create url for submitting the edited article form.
<del>This option tells Rails that we want this form to be submitted
<del>via the `PATCH` HTTP method which is the HTTP method you're expected to use to
<del>**update** resources according to the REST protocol.
<add>Passing the article object to the method will automatically set the URL for
<add>submitting the edited article form. This option tells Rails that we want this
<add>form to be submitted via the `PATCH` HTTP method, which is the HTTP method you're
<add>expected to use to **update** resources according to the REST protocol.
<ide>
<ide> The arguments to `form_with` could be model objects, say, `model: @article` which would
<ide> cause the helper to fill in the form with the fields of the object. Passing in a
<ide> appear.
<ide> TIP: Learn more about Unobtrusive JavaScript on
<ide> [Working With JavaScript in Rails](working_with_javascript_in_rails.html) guide.
<ide>
<del>Congratulations, you can now create, show, list, update and destroy
<add>Congratulations, you can now create, show, list, update, and destroy
<ide> articles.
<ide>
<ide> TIP: In general, Rails encourages using resources objects instead of
<ide> getting up and running with Rails, feel free to consult these support
<ide> resources:
<ide>
<ide> * The [Ruby on Rails Guides](index.html)
<del>* The [Ruby on Rails Tutorial](http://railstutorial.org/book)
<del>* The [Ruby on Rails mailing list](http://groups.google.com/group/rubyonrails-talk)
<add>* The [Ruby on Rails Tutorial](https://www.railstutorial.org/book)
<add>* The [Ruby on Rails mailing list](https://groups.google.com/group/rubyonrails-talk)
<ide> * The [#rubyonrails](irc://irc.freenode.net/#rubyonrails) channel on irc.freenode.net
<ide>
<ide> | 1 |
Ruby | Ruby | fix primary_abstract_class in lazy loaded envs | 00bad6102cdfa352c02de909258167ba07554dea | <ide><path>activerecord/lib/active_record/core.rb
<ide> def self.application_record_class? # :nodoc:
<ide> self == Base.application_record_class
<ide> else
<ide> if defined?(ApplicationRecord) && self == ApplicationRecord
<del> Base.application_record_class = self
<ide> true
<ide> end
<ide> end
<ide><path>activerecord/test/cases/primary_class_test.rb
<ide> def test_primary_abstract_class_is_used_over_application_record_if_set
<ide> Object.send(:remove_const, :ApplicationRecord)
<ide> end
<ide>
<add> def test_setting_primary_abstract_class_explicitly_wins_over_application_record_set_implicitly
<add> Object.const_set(:ApplicationRecord, ApplicationRecord)
<add>
<add> assert_predicate ApplicationRecord, :primary_class?
<add> assert_predicate ApplicationRecord, :application_record_class?
<add> assert_predicate ApplicationRecord, :abstract_class?
<add>
<add> PrimaryClassTest::PrimaryAppRecord.primary_abstract_class
<add>
<add> assert_predicate PrimaryClassTest::PrimaryAppRecord, :primary_class?
<add> assert_predicate PrimaryClassTest::PrimaryAppRecord, :application_record_class?
<add> assert_predicate PrimaryClassTest::PrimaryAppRecord, :abstract_class?
<add>
<add> assert_not_predicate ApplicationRecord, :primary_class?
<add> assert_not_predicate ApplicationRecord, :application_record_class?
<add> assert_predicate ApplicationRecord, :abstract_class?
<add> ensure
<add> ActiveRecord::Base.application_record_class = nil
<add> Object.send(:remove_const, :ApplicationRecord)
<add> end
<add>
<ide> unless in_memory_db?
<ide> def test_application_record_shares_a_connection_with_active_record_by_default
<ide> Object.const_set(:ApplicationRecord, ApplicationRecord) | 2 |
Text | Text | add docs for volume plugin ls/get | 5d1dd8069bd8217f946da531c2575280a68bc085 | <ide><path>docs/extend/plugins_volume.md
<ide> containers.
<ide> ### /VolumeDriver.Create
<ide>
<ide> **Request**:
<del>```
<add>```json
<ide> {
<ide> "Name": "volume_name",
<ide> "Opts": {}
<ide> volume on the filesystem yet (until Mount is called).
<ide> Opts is a map of driver specific options passed through from the user request.
<ide>
<ide> **Response**:
<del>```
<add>```json
<ide> {
<ide> "Err": ""
<ide> }
<ide> Respond with a string error if an error occurred.
<ide> ### /VolumeDriver.Remove
<ide>
<ide> **Request**:
<del>```
<add>```json
<ide> {
<ide> "Name": "volume_name"
<ide> }
<ide> Respond with a string error if an error occurred.
<ide> Delete the specified volume from disk. This request is issued when a user invokes `docker rm -v` to remove volumes associated with a container.
<ide>
<ide> **Response**:
<del>```
<add>```json
<ide> {
<ide> "Err": ""
<ide> }
<ide> Respond with a string error if an error occurred.
<ide> ### /VolumeDriver.Mount
<ide>
<ide> **Request**:
<del>```
<add>```json
<ide> {
<ide> "Name": "volume_name"
<ide> }
<ide> more than once, the plugin may need to keep track of each new mount request and
<ide> at the first mount request and deprovision at the last corresponding unmount request.
<ide>
<ide> **Response**:
<del>```
<add>```json
<ide> {
<ide> "Mountpoint": "/path/to/directory/on/host",
<ide> "Err": ""
<ide> available, and/or a string error if an error occurred.
<ide> ### /VolumeDriver.Path
<ide>
<ide> **Request**:
<del>```
<add>```json
<ide> {
<ide> "Name": "volume_name"
<ide> }
<ide> available, and/or a string error if an error occurred.
<ide> Docker needs reminding of the path to the volume on the host.
<ide>
<ide> **Response**:
<del>```
<add>```json
<ide> {
<ide> "Mountpoint": "/path/to/directory/on/host",
<ide> "Err": ""
<ide> available, and/or a string error if an error occurred.
<ide> ### /VolumeDriver.Unmount
<ide>
<ide> **Request**:
<del>```
<add>```json
<ide> {
<ide> "Name": "volume_name"
<ide> }
<ide> per container stop. Plugin may deduce that it is safe to deprovision it at
<ide> this point.
<ide>
<ide> **Response**:
<del>```
<add>```json
<ide> {
<ide> "Err": ""
<ide> }
<ide> ```
<ide>
<ide> Respond with a string error if an error occurred.
<ide>
<add>
<add>### /VolumeDriver.Get
<add>
<add>**Request**:
<add>```json
<add>{
<add> "Name": "volume_name"
<add>}
<add>```
<add>
<add>Get the volume info.
<add>
<add>
<add>**Response**:
<add>```json
<add>{
<add> "Volume": {
<add> "Name": "volume_name",
<add> "Mountpoint": "/path/to/directory/on/host",
<add> },
<add> "Err": ""
<add>}
<add>```
<add>
<add>Respond with a string error if an error occurred.
<add>
<add>
<add>### /VolumeDriver.List
<add>
<add>**Request**:
<add>```json
<add>{}
<add>```
<add>
<add>Get the list of volumes registered with the plugin.
<add>
<add>**Response**:
<add>```json
<add>{
<add> "Volumes": [
<add> {
<add> "Name": "volume_name",
<add> "Mountpoint": "/path/to/directory/on/host"
<add> }
<add> ],
<add> "Err": ""
<add>}
<add>```
<add>
<add>Respond with a string error if an error occurred.
<add> | 1 |
Javascript | Javascript | fix references to reactcomponent / reactelement | 421e7880d98cf18575a0429bff8e632a5e94bfc5 | <ide><path>Libraries/Experimental/WindowedListView.js
<ide> const infoLog = require('infoLog');
<ide> const invariant = require('fbjs/lib/invariant');
<ide> const nullthrows = require('fbjs/lib/nullthrows');
<ide>
<del>import type ReactComponent from 'ReactComponent';
<del>
<ide> const DEBUG = false;
<ide>
<ide> /**
<ide> class WindowedListView extends React.Component {
<ide> _willComputeRowsToRender: boolean = false;
<ide> _viewableRows: Array<number> = [];
<ide> _cellsInProgress: Set<string> = new Set();
<del> _scrollRef: ?Object;
<add> _scrollRef: ?ScrollView;
<ide>
<ide> static defaultProps = {
<ide> initialNumToRender: 10,
<ide> class WindowedListView extends React.Component {
<ide> lastRow: Math.min(this.props.data.length, this.props.initialNumToRender) - 1,
<ide> };
<ide> }
<del> getScrollResponder(): ?ReactComponent {
<add> getScrollResponder(): ?ScrollView {
<ide> return this._scrollRef &&
<ide> this._scrollRef.getScrollResponder &&
<ide> this._scrollRef.getScrollResponder(); | 1 |
Python | Python | fix small typos in partition docstring | 9e9bdc5b42b549af1955d1a5c9e59478b679e56f | <ide><path>numpy/core/fromnumeric.py
<ide> def partition(a, kth, axis=-1, kind='introselect', order=None):
<ide> """
<ide> Return a partitioned copy of an array.
<ide>
<del> Creates a copy of the array with its elements rearranges in such a way that
<add> Creates a copy of the array with its elements rearranged in such a way that
<ide> the value of the element in kth position is in the position it would be in
<ide> a sorted array. All elements smaller than the kth element are moved before
<ide> this element and all equal or greater are moved behind it. The ordering of
<ide> def argpartition(a, kth, axis=-1, kind='introselect', order=None):
<ide> Returns
<ide> -------
<ide> index_array : ndarray, int
<del> Array of indices that partition`a` along the specified axis.
<add> Array of indices that partition `a` along the specified axis.
<ide> In other words, ``a[index_array]`` yields a sorted `a`.
<ide>
<ide> See Also | 1 |
Javascript | Javascript | avoid multiple conversions to string | 34c9fc2e4e12fa9aa58d94a3283193444302a3ab | <ide><path>lib/fs.js
<ide> function encodeRealpathResult(result, options) {
<ide> fs.realpathSync = function realpathSync(p, options) {
<ide> options = getOptions(options, {});
<ide> handleError((p = getPathFromURL(p)));
<add> if (typeof p !== 'string')
<add> p += '';
<ide> nullCheck(p);
<del>
<del> p = p.toString('utf8');
<ide> p = pathModule.resolve(p);
<ide>
<del> const seenLinks = {};
<del> const knownHard = {};
<ide> const cache = options[internalFS.realpathCacheKey];
<del> const original = p;
<del>
<ide> const maybeCachedResult = cache && cache.get(p);
<ide> if (maybeCachedResult) {
<ide> return maybeCachedResult;
<ide> }
<ide>
<add> const seenLinks = {};
<add> const knownHard = {};
<add> const original = p;
<add>
<ide> // current character position in p
<ide> var pos;
<ide> // the partial path so far, including a trailing slash if any
<ide> fs.realpath = function realpath(p, options, callback) {
<ide> options = getOptions(options, {});
<ide> if (handleError((p = getPathFromURL(p)), callback))
<ide> return;
<add> if (typeof p !== 'string')
<add> p += '';
<ide> if (!nullCheck(p, callback))
<ide> return;
<del>
<del> p = p.toString('utf8');
<ide> p = pathModule.resolve(p);
<ide>
<ide> const seenLinks = {}; | 1 |
Text | Text | define bridge networks | 1e74df3637c1497f7adc3bd7aebd95baaa4c8e22 | <ide><path>docs/reference/glossary.md
<ide> An image that has no parent is a **base image**.
<ide> [boot2docker](http://boot2docker.io/) is a lightweight Linux distribution made
<ide> specifically to run Docker containers. The boot2docker management tool for Mac and Windows was deprecated and replaced by [`docker-machine`](#machine) which you can install with the Docker Toolbox.
<ide>
<add>## bridge
<add>
<add>In terms of generic networking, a bridge is a Link Layer device which forwards
<add>traffic between network segments. A bridge can be a hardware device or a
<add>software device running within a host machine's kernel.
<add>
<add>In terms of Docker, a bridge network uses a software bridge which allows
<add>containers connected to the same bridge network to communicate, while providing
<add>isolation from containers which are not connected to that bridge network.
<add>The Docker bridge driver automatically installs rules in the host machine so
<add>that containers on different bridge networks cannot communicate directly with
<add>each other.
<add>
<add>The default bridge network, which is also named `bridge`, behaves differently
<add>from user-defined bridge networks. Containers connected to the default `bridge`
<add>network can communicate with each other across the bridge by IP address but
<add>cannot resolve each other's container name to an IP address unless they are
<add>explicitly linked using the `--link` flag to `docker run`.
<add>
<add>For more information about Docker networking, see
<add>[Understand container communication](https://docs.docker.com/engine/userguide/networking/default_network/container-communication/).
<add>
<ide> ## btrfs
<ide>
<ide> btrfs (B-tree file system) is a Linux [filesystem](#filesystem) that Docker | 1 |
Python | Python | update insert sort | 43f92490fe2edd92d09887eb732eaf6ac5ef698e | <ide><path>sorts/i_sort.py
<del>def insertionSort(arr):
<del> """
<del> >>> a = arr[:]
<del> >>> insertionSort(a)
<del> >>> a == sorted(a)
<del> True
<del> """
<del> for i in range(1, len(arr)):
<del> key = arr[i]
<del> j = i - 1
<del> while j >= 0 and key < arr[j]:
<del> arr[j + 1] = arr[j]
<del> j -= 1
<del> arr[j + 1] = key
<del>
<del>
<del>arr = [12, 11, 13, 5, 6]
<del>insertionSort(arr)
<del>print("Sorted array is:")
<del>for i in range(len(arr)):
<del> print("%d" % arr[i])
<ide><path>sorts/insertion_sort.py
<ide> def insertion_sort(collection: list) -> list:
<ide> Examples:
<ide> >>> insertion_sort([0, 5, 3, 2, 2])
<ide> [0, 2, 2, 3, 5]
<del>
<del> >>> insertion_sort([])
<del> []
<del>
<del> >>> insertion_sort([-2, -5, -45])
<del> [-45, -5, -2]
<add> >>> insertion_sort([]) == sorted([])
<add> True
<add> >>> insertion_sort([-2, -5, -45]) == sorted([-2, -5, -45])
<add> True
<add> >>> insertion_sort(['d', 'a', 'b', 'e', 'c']) == sorted(['d', 'a', 'b', 'e', 'c'])
<add> True
<add> >>> import random
<add> >>> collection = random.sample(range(-50, 50), 100)
<add> >>> insertion_sort(collection) == sorted(collection)
<add> True
<add> >>> import string
<add> >>> collection = random.choices(string.ascii_letters + string.digits, k=100)
<add> >>> insertion_sort(collection) == sorted(collection)
<add> True
<ide> """
<ide>
<del> for loop_index in range(1, len(collection)):
<del> insertion_index = loop_index
<del> while (
<del> insertion_index > 0
<del> and collection[insertion_index - 1] > collection[insertion_index]
<del> ):
<del> collection[insertion_index], collection[insertion_index - 1] = (
<del> collection[insertion_index - 1],
<del> collection[insertion_index],
<del> )
<del> insertion_index -= 1
<del>
<add> for insert_index, insert_value in enumerate(collection[1:]):
<add> temp_index = insert_index
<add> while insert_index >= 0 and insert_value < collection[insert_index]:
<add> collection[insert_index + 1] = collection[insert_index]
<add> insert_index -= 1
<add> if insert_index != temp_index:
<add> collection[insert_index + 1] = insert_value
<ide> return collection
<ide>
<ide>
<ide> if __name__ == "__main__":
<add> from doctest import testmod
<add>
<add> testmod()
<add>
<ide> user_input = input("Enter numbers separated by a comma:\n").strip()
<ide> unsorted = [int(item) for item in user_input.split(",")]
<ide> print(f"{insertion_sort(unsorted) = }") | 2 |
Ruby | Ruby | remove side effects in `normalize_defaults` | a4f780947299cc35f14c9e1825278155ae88ee44 | <ide><path>actionpack/lib/action_dispatch/routing/mapper.rb
<ide> def initialize(set, path, defaults, as, controller, default_action, modyoule, to
<ide>
<ide> @requirements = formats[:requirements].merge Hash[requirements]
<ide> @conditions = Hash[conditions]
<del> @defaults = formats[:defaults].merge @defaults
<add> @defaults = formats[:defaults].merge(@defaults).merge(normalize_defaults(options))
<ide>
<ide> @conditions[:required_defaults] = (split_options[:required_defaults] || []).map(&:first)
<ide> @conditions[:path_info] = path
<ide> @conditions[:parsed_path_info] = ast
<ide>
<ide> add_request_method(via, @conditions)
<del> normalize_defaults!(options)
<ide> end
<ide>
<ide> def to_route
<ide> def verify_regexp_requirements(requirements)
<ide> end
<ide> end
<ide>
<del> def normalize_defaults!(options)
<del> options.each_pair do |key, default|
<del> unless Regexp === default
<del> @defaults[key] = default
<del> end
<del> end
<add> def normalize_defaults(options)
<add> Hash[options.reject { |_, default| Regexp === default }]
<ide> end
<ide>
<ide> def add_request_method(via, conditions) | 1 |
PHP | PHP | use real classname for seeders | da88b26a56bcd35e1e62f362c5920e023be6a9aa | <ide><path>src/Illuminate/Database/Seeder.php
<ide> public function call($class, $silent = false)
<ide> $classes = Arr::wrap($class);
<ide>
<ide> foreach ($classes as $class) {
<add> $seeder = $this->resolve($class);
<add>
<ide> if ($silent === false && isset($this->command)) {
<del> $this->command->getOutput()->writeln("<info>Seeding:</info> $class");
<add> $this->command->getOutput()->writeln('<info>Seeding:</info> '.get_class($seeder));
<ide> }
<ide>
<del> $this->resolve($class)->__invoke();
<add> $seeder->__invoke();
<ide> }
<ide>
<ide> return $this; | 1 |
Text | Text | create tar file | d073722880833f755ef486e6d0b0e7d44f79f321 | <ide><path>guide/english/bash/bash-tar/index.md
<add>---
<add>title: Bash tar
<add>---
<add>
<add>## Bash command: tar
<add>
<add>**GNU 'tar' saves many files together into a single tape or disk archive, and can restore individual files from the archive.**
<add>
<add>Examples :
<add>
<add>_Create archive.tar from files file1 and file2_
<add>```
<add>tar -cf archive.tar file1 file2
<add>```
<add>_Extract all files in current directory_
<add>```
<add>tar -xf archive.tar
<add>``` | 1 |
Javascript | Javascript | remove usage of internal stylesheet type | d89517d60a8a6cabc9013b603fa3f63a1face6a2 | <ide><path>Libraries/StyleSheet/StyleSheet.js
<ide> const StyleSheetValidation = require('StyleSheetValidation');
<ide> const flatten = require('flattenStyle');
<ide>
<ide> import type {
<del> StyleSheetStyle as _StyleSheetStyle,
<add> ____StyleSheetInternalStyleIdentifier_Internal as StyleSheetInternalStyleIdentifier,
<ide> Styles as _Styles,
<ide> ____StyleObj_Internal,
<ide> ____ViewStyleProp_Internal,
<ide> export type TextStyleProp = ____TextStyleProp_Internal;
<ide> export type ImageStyleProp = ____ImageStyleProp_Internal;
<ide>
<ide> export type Styles = _Styles;
<del>export type StyleSheetStyle = _StyleSheetStyle;
<del>type StyleSheet<+S: Styles> = $ObjMap<S, (Object) => StyleSheetStyle>;
<ide>
<ide> let hairlineWidth = PixelRatio.roundToNearestPixel(0.4);
<ide> if (hairlineWidth === 0) {
<ide> const absoluteFillObject: LayoutStyle = {
<ide> top: 0,
<ide> bottom: 0,
<ide> };
<del>const absoluteFill: StyleSheetStyle =
<add>const absoluteFill: StyleSheetInternalStyleIdentifier =
<ide> ReactNativePropRegistry.register(absoluteFillObject); // This also freezes it
<ide>
<ide> /**
<ide> module.exports = {
<ide> /**
<ide> * Creates a StyleSheet style reference from the given object.
<ide> */
<del> create<S: Styles>(obj: S): StyleSheet<S> {
<add> create<+S: Styles>(obj: S): $ObjMap<S, (Object) => StyleSheetInternalStyleIdentifier> {
<ide> const result = {};
<ide> for (const key in obj) {
<ide> StyleSheetValidation.validateStyle(key, obj);
<ide><path>Libraries/StyleSheet/StyleSheetTypes.js
<ide>
<ide> import AnimatedNode from 'AnimatedNode';
<ide>
<del>export opaque type StyleSheetStyle: number = number;
<add>export opaque type ____StyleSheetInternalStyleIdentifier_Internal: number = number;
<ide>
<ide> export type ColorValue = null | string;
<ide> export type DimensionValue = null | number | string | AnimatedNode;
<ide> type GenericStyleProp<+T> =
<ide> | null
<ide> | void
<ide> | T
<del> | StyleSheetStyle
<add> | ____StyleSheetInternalStyleIdentifier_Internal
<ide> | number
<ide> | false
<ide> | '' | 2 |
Python | Python | remove hack for user | 67400063d9dcf7a89b80b27896d5410e3fee70f3 | <ide><path>airflow/default_login.py
<ide> login_manager.login_message = None
<ide>
<ide>
<del>class User(models.BaseUser):
<add>class DefaultUser(object):
<add> def __init__(self, user):
<add> self.user = user
<ide>
<ide> def is_active(self):
<ide> '''Required by flask_login'''
<ide> def is_superuser(self):
<ide> '''Access all the things'''
<ide> return True
<ide>
<del>models.User = User # hack!
<del>del User
<add>#models.User = User # hack!
<add>#del User
<ide>
<ide>
<ide> @login_manager.user_loader
<ide> def load_user(userid):
<ide> session.expunge_all()
<ide> session.commit()
<ide> session.close()
<del> return user
<add> return DefaultUser(user)
<ide>
<ide>
<ide> def login(self, request):
<ide> def login(self, request):
<ide> is_superuser=True)
<ide> session.merge(user)
<ide> session.commit()
<del> flask_login.login_user(user)
<add> flask_login.login_user(DefaultUser(user))
<ide> session.commit()
<ide> session.close()
<ide> return redirect(request.args.get("next") or url_for("index"))
<ide><path>airflow/models.py
<ide> def paused_dags(self):
<ide> return dag_ids
<ide>
<ide>
<del>class BaseUser(Base):
<add>class User(Base):
<ide> __tablename__ = "user"
<ide>
<ide> id = Column(Integer, primary_key=True)
<ide> username = Column(String(ID_LEN), unique=True)
<ide> email = Column(String(500))
<add> superuser = False
<ide>
<ide> def __repr__(self):
<ide> return self.username
<ide>
<ide> def get_id(self):
<ide> return str(self.id)
<ide>
<add> def is_superuser(self):
<add> return self.superuser
<add>
<ide>
<ide> class Connection(Base):
<ide> """ | 2 |
Text | Text | add v3.16.10 to changelog.md | 7328532bbea63778fae478722b6a4972372f4ec3 | <ide><path>CHANGELOG.md
<ide> - [#18694](https://github.com/emberjs/ember.js/pull/18694) [BUGFIX] Ensure tag updates are buffered, remove error message
<ide> - [#18709](https://github.com/emberjs/ember.js/pull/18709) [BUGFIX] Fix `this` in `@tracked` initializer
<ide>
<add>### v3.16.10 (August 5, 2020)
<add>
<add>- [#19028](https://github.com/emberjs/ember.js/pull/19028) [BUGFIX] Ensure setter CP's with dependent keys on curly components can be two way bound
<add>
<ide> ### v3.16.9 (July 29, 2020)
<ide>
<ide> - [#19001](https://github.com/emberjs/ember.js/pull/19001) [BUGFIX] Invoke methods correctly in `TextSupport` `sendAction` | 1 |
Javascript | Javascript | avoid accidental removal of last keyframe. | 61bbbfa3d1165b17a40167f57c0ea9c9e4b36a70 | <ide><path>src/animation/KeyframeTrackPrototype.js
<ide> KeyframeTrackPrototype = {
<ide> values = this.values,
<ide> stride = this.getValueSize(),
<ide>
<del> writeIndex = 1;
<add> smoothInterpolation = this.getInterpolation() === InterpolateSmooth,
<ide>
<del> for( var i = 1, n = times.length - 1; i <= n; ++ i ) {
<add> writeIndex = 1,
<add> lastIndex = times.length - 1;
<add>
<add> for( var i = 1; i < lastIndex; ++ i ) {
<ide>
<ide> var keep = false;
<ide>
<ide> KeyframeTrackPrototype = {
<ide>
<ide> if ( time !== timeNext && ( i !== 1 || time !== time[ 0 ] ) ) {
<ide>
<del> // remove unnecessary keyframes same as their neighbors
<del> var offset = i * stride,
<del> offsetP = offset - stride,
<del> offsetN = offset + stride;
<add> if ( ! smoothInterpolation ) {
<add>
<add> // remove unnecessary keyframes same as their neighbors
<ide>
<del> for ( var j = 0; j !== stride; ++ j ) {
<add> var offset = i * stride,
<add> offsetP = offset - stride,
<add> offsetN = offset + stride;
<ide>
<del> var value = values[ offset + j ];
<add> for ( var j = 0; j !== stride; ++ j ) {
<ide>
<del> if ( value !== values[ offsetP + j ] ||
<del> value !== values[ offsetN + j ] ) {
<add> var value = values[ offset + j ];
<ide>
<del> keep = true;
<del> break;
<add> if ( value !== values[ offsetP + j ] ||
<add> value !== values[ offsetN + j ] ) {
<add>
<add> keep = true;
<add> break;
<add>
<add> }
<ide>
<ide> }
<ide>
<del> }
<add> } else keep = true;
<ide>
<ide> }
<ide>
<ide> KeyframeTrackPrototype = {
<ide> var readOffset = i * stride,
<ide> writeOffset = writeIndex * stride;
<ide>
<del> for ( var j = 0; j !== stride; ++ j ) {
<add> for ( var j = 0; j !== stride; ++ j )
<ide>
<ide> values[ writeOffset + j ] = values[ readOffset + j ];
<ide>
<del> }
<del>
<del>
<ide> }
<ide>
<ide> ++ writeIndex;
<ide> KeyframeTrackPrototype = {
<ide>
<ide> }
<ide>
<add> // flush last keyframe (compaction looks ahead)
<add>
<add> times[ writeIndex ++ ] = times[ lastIndex ];
<add>
<add> for ( var readOffset = lastIndex * stride, j = 0; j !== stride; ++ j )
<add>
<add> values[ writeOffset + j ] = values[ readOffset + j ];
<add>
<add>
<ide> if ( writeIndex !== times.length ) {
<ide>
<ide> this.times = AnimationUtils.arraySlice( times, 0, writeIndex );
<ide> KeyframeTrackPrototype = {
<ide>
<ide> }
<ide>
<del>export { KeyframeTrackPrototype };
<ide>\ No newline at end of file
<add>export { KeyframeTrackPrototype }; | 1 |
Text | Text | fix a typo | 6fdc33203ab71b0b6716780f18aee910538c057c | <ide><path>website/docs/usage/models.md
<ide> best-matching package compatible with your spaCy installation.
<ide> >
<ide> > ```diff
<ide> > - python -m spacy download en
<del>> + python -m spacy dowmload en_core_web_sm
<add>> + python -m spacy download en_core_web_sm
<ide> > ```
<ide> >
<ide> > ```diff | 1 |
Javascript | Javascript | add fade between cascades | 52ef30f65d0ab028c0b24ff60ef3d1ada7b54668 | <ide><path>examples/jsm/csm/Shader.js
<ide> IncidentLight directLight;
<ide> DirectionalLightShadow directionalLightShadow;
<ide> #endif
<ide>
<del> #pragma unroll_loop
<ide> for ( int i = 0; i < NUM_DIR_LIGHTS; i ++ ) {
<ide>
<ide> directionalLight = directionalLights[ i ];
<ide> getDirectionalDirectLightIrradiance( directionalLight, geometry, directLight );
<del> #if defined( USE_SHADOWMAP ) && ( UNROLLED_LOOP_INDEX < NUM_DIR_LIGHT_SHADOWS )
<ide>
<del> directionalLightShadow = directionalLightShadows[ i ];
<del> if(linearDepth >= CSM_cascades[UNROLLED_LOOP_INDEX].x && linearDepth < CSM_cascades[UNROLLED_LOOP_INDEX].y) directLight.color *= all( bvec2( directLight.visible, receiveShadow ) ) ? getShadow( directionalShadowMap[ i ], directionalLightShadow.shadowMapSize, directionalLightShadow.shadowBias, directionalLightShadow.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;
<add> float margin = ( 0.25 * ( pow( linearDepth, 2.0 ) ) );
<add> float csmx = CSM_cascades[ i ].x - margin;
<add> float csmy = CSM_cascades[ i ].y + margin;
<add> if(i < NUM_DIR_LIGHT_SHADOWS && linearDepth >=csmx - margin && linearDepth < csmy + margin ) {
<ide>
<del> #endif
<add> float dist = min( linearDepth -csmx, csmy - linearDepth );
<add> dist = min( dist / margin, 1.0 );
<add>
<add> directionalLightShadow = directionalLightShadows[ i ];
<add> float mult = all( bvec2( directLight.visible, receiveShadow ) ) ? getShadow( directionalShadowMap[ i ], directionalLightShadow.shadowMapSize, directionalLightShadow.shadowBias, directionalLightShadow.shadowRadius, vDirectionalShadowCoord[ i ] ) : 1.0;
<ide>
<del> if(linearDepth >= CSM_cascades[UNROLLED_LOOP_INDEX].x && (linearDepth < CSM_cascades[UNROLLED_LOOP_INDEX].y || UNROLLED_LOOP_INDEX == CSM_CASCADES - 1)) RE_Direct( directLight, geometry, material, reflectedLight );
<add> directLight.color = mix( directLight.color, directLight.color * mult, dist );
<ide>
<add> }
<add> if(linearDepth >= CSM_cascades[ i ].x && (linearDepth < CSM_cascades[ i ].y || i == CSM_CASCADES - 1)) {
<add>
<add> RE_Direct( directLight, geometry, material, reflectedLight );
<add>
<add> }
<ide>
<ide>
<ide> } | 1 |
Text | Text | add extra material | e02ace2b1d0a97d4ba169a7ade959d1f00493d14 | <ide><path>guide/chinese/bsd-os/index.md
<ide> localeTitle: BSD操作系统
<ide>
<ide> ### 概观
<ide>
<add>BSD (Berkeley Software Distribution,伯克利软件套件)是Unix的衍生系统,在1977至1995年间由加州大学伯克利分校开发和发布的。历史上, BSD曾经被认为是UNIX的一支——"BSD UNIX", 因为它和AT&T UNIX操作系统共享基础代码和设计。在20世纪80年代,衍生出了许多变形的UNIX授权软件。比较著名的如DEC的Ultrix及Sun公司的SunOS。1990年代,BSD很大程度上被System V4.x版以及OSF/1系统所取代,晚期BSD版本为几个开源软件开发提供了平台并且一直沿用至今。今天,“BSD”并不特指任何一个BSD衍生版本,而是类UNIX操作系统中的一个分支的总称。
<add>
<ide> Berkeley Software Distribution(BSD)是一组相关的开源类Unix操作系统(OS),起源于贝尔实验室的早期版本的研究Unix。 FreeBSD是最受欢迎的成员。
<ide>
<ide> BSD配置为在一个系统上进行互联网托管,虚拟主机和托管许多服务器。 这是第一个添加了互联网协议的操作系统。 BSD操作系统具有非常强大的时间共享系统,这使得它们在进程之间共享资源的情况下最有用。 作为一种比较,众所周知,Linux操作系统更适用于单任务流程,如超级计算机和台式机。 分时有效的BSD多任务强制中断部分妨碍了专用的单个进程。
<ide> BSD包含一个“Jails”系统,它有点类似于Linux容器 - 除了在实
<ide>
<ide> [NetBSD(Wikipedia.org)](https://en.wikipedia.org/wiki/NetBSD)
<ide>
<del>[BSD操作系统的比较(Wikipedia.org)](https://en.wikipedia.org/wiki/Comparison_of_BSD_operating_systems)
<ide>\ No newline at end of file
<add>[BSD操作系统的比较(Wikipedia.org)](https://en.wikipedia.org/wiki/Comparison_of_BSD_operating_systems) | 1 |
Go | Go | use cgo for apparmor profile switch | f0f833c6d795fc8b3fb4bb379f9916745f5c7ac9 | <ide><path>pkg/libcontainer/apparmor/apparmor.go
<ide> package apparmor
<ide>
<add>// #cgo LDFLAGS: -lapparmor
<add>// #include <sys/apparmor.h>
<add>// #include <stdlib.h>
<add>import "C"
<ide> import (
<del> "fmt"
<ide> "io/ioutil"
<del> "os"
<add> "unsafe"
<ide> )
<ide>
<ide> func IsEnabled() bool {
<ide> func ApplyProfile(pid int, name string) error {
<ide> return nil
<ide> }
<ide>
<del> f, err := os.OpenFile(fmt.Sprintf("/proc/%d/attr/current", pid), os.O_WRONLY, 0)
<del> if err != nil {
<del> return err
<del> }
<del> defer f.Close()
<add> cName := C.CString(name)
<add> defer C.free(unsafe.Pointer(cName))
<ide>
<del> if _, err := fmt.Fprintf(f, "changeprofile %s", name); err != nil {
<add> if _, err := C.aa_change_onexec(cName); err != nil {
<ide> return err
<ide> }
<ide> return nil
<ide><path>pkg/libcontainer/nsinit/init.go
<ide> func (ns *linuxNs) Init(container *libcontainer.Container, uncleanRootfs, consol
<ide> return fmt.Errorf("setup mount namespace %s", err)
<ide> }
<ide>
<del> if err := apparmor.ApplyProfile(os.Getpid(), container.Context["apparmor_profile"]); err != nil {
<del> return err
<del> }
<del>
<ide> if err := setupNetwork(container, context); err != nil {
<ide> return fmt.Errorf("setup networking %s", err)
<ide> }
<ide> func (ns *linuxNs) Init(container *libcontainer.Container, uncleanRootfs, consol
<ide> return fmt.Errorf("finalize namespace %s", err)
<ide> }
<ide>
<add> if err := apparmor.ApplyProfile(os.Getpid(), container.Context["apparmor_profile"]); err != nil {
<add> return err
<add> }
<ide> return system.Execv(args[0], args[0:], container.Env)
<ide> }
<ide> | 2 |
Python | Python | make use of log_smooth_count | 3c210f45fadd1134f1efa3eaabf9e48620212040 | <ide><path>bin/init_model.py
<ide> def _read_freqs(loc, max_length=100, min_doc_freq=0, min_freq=200):
<ide> word = key
<ide> smooth_count = counts.smoother(int(freq))
<ide> log_smooth_count = math.log(smooth_count)
<del> probs[word] = math.log(smooth_count) - log_total
<add> probs[word] = log_smooth_count - log_total
<ide> oov_prob = math.log(counts.smoother(0)) - log_total
<ide> return probs, oov_prob
<ide> | 1 |
Go | Go | remove argsasstring because its a util function | 9bd7d09871d8495d06af94a8d866404569d75b8e | <ide><path>runtime/container.go
<ide> func populateCommand(c *Container, env []string) {
<ide> c.command.Env = env
<ide> }
<ide>
<del>func (container *Container) ArgsAsString() string {
<del> var args []string
<del> for _, arg := range container.Args {
<del> if strings.Contains(arg, " ") {
<del> args = append(args, fmt.Sprintf("'%s'", arg))
<del> } else {
<del> args = append(args, arg)
<del> }
<del> }
<del> return strings.Join(args, " ")
<del>}
<del>
<ide> func (container *Container) Start() (err error) {
<ide> container.Lock()
<ide> defer container.Unlock()
<ide><path>server/server.go
<ide> func (srv *Server) Containers(job *engine.Job) engine.Status {
<ide> out.SetList("Names", names[container.ID])
<ide> out.Set("Image", srv.runtime.Repositories().ImageName(container.Image))
<ide> if len(container.Args) > 0 {
<del> out.Set("Command", fmt.Sprintf("\"%s %s\"", container.Path, container.ArgsAsString()))
<add> args := []string{}
<add> for _, arg := range container.Args {
<add> if strings.Contains(arg, " ") {
<add> args = append(args, fmt.Sprintf("'%s'", arg))
<add> } else {
<add> args = append(args, arg)
<add> }
<add> }
<add> argsAsString := strings.Join(args, " ")
<add>
<add> out.Set("Command", fmt.Sprintf("\"%s %s\"", container.Path, argsAsString))
<ide> } else {
<ide> out.Set("Command", fmt.Sprintf("\"%s\"", container.Path))
<ide> } | 2 |
Javascript | Javascript | use optional chaining in `web/pdf_history.js` | 322b1072afa14c243ea443bda5044981747dede0 | <ide><path>web/pdf_history.js
<ide> class PDFHistory {
<ide> return;
<ide> }
<ide>
<del> if (this._destination && this._destination.page === pageNumber) {
<add> if (this._destination?.page === pageNumber) {
<ide> // When the new page is identical to the one in `this._destination`, we
<ide> // don't want to add a potential duplicate entry in the browser history.
<ide> return;
<ide> class PDFHistory {
<ide> if (
<ide> typeof PDFJSDev !== "undefined" &&
<ide> PDFJSDev.test("CHROME") &&
<del> window.history.state &&
<del> window.history.state.chromecomState
<add> window.history.state?.chromecomState
<ide> ) {
<ide> // history.state.chromecomState is managed by chromecom.js.
<ide> newState.chromecomState = window.history.state.chromecomState;
<ide> }
<ide> this._updateInternalState(destination, newState.uid);
<ide>
<ide> let newUrl;
<del> if (this._updateUrl && destination && destination.hash) {
<add> if (this._updateUrl && destination?.hash) {
<ide> const baseUrl = document.location.href.split("#")[0];
<ide> // Prevent errors in Firefox.
<ide> if (!baseUrl.startsWith("file://")) {
<ide> class PDFHistory {
<ide> return false;
<ide> }
<ide> const [perfEntry] = performance.getEntriesByType("navigation");
<del> if (!perfEntry || perfEntry.type !== "reload") {
<add> if (perfEntry?.type !== "reload") {
<ide> return false;
<ide> }
<ide> } else {
<ide> class PDFHistory {
<ide> clearTimeout(this._updateViewareaTimeout);
<ide> this._updateViewareaTimeout = null;
<ide> }
<del> if (removeTemporary && destination && destination.temporary) {
<add> if (removeTemporary && destination?.temporary) {
<ide> // When the `destination` comes from the browser history,
<ide> // we no longer treat it as a *temporary* position.
<ide> delete destination.temporary;
<ide> class PDFHistory {
<ide> if (
<ide> (typeof PDFJSDev !== "undefined" &&
<ide> PDFJSDev.test("CHROME") &&
<del> state &&
<del> state.chromecomState &&
<add> state?.chromecomState &&
<ide> !this._isValidState(state)) ||
<ide> !state
<ide> ) { | 1 |
Javascript | Javascript | fix route#render with slash notation | a60bd5fd92a7c3b4a5c407dbab52ae8d3030089b | <ide><path>packages/ember-routing/lib/system/route.js
<ide> Ember.Route = Ember.Object.extend({
<ide> name = this.templateName;
<ide> }
<ide>
<del> name = name || this.templateName;
<add> name = name ? name.replace(/\//g, '.') : this.templateName;
<ide>
<ide> var container = this.container,
<ide> view = container.lookup('view:' + name),
<ide><path>packages/ember/tests/routing/basic_test.js
<ide> test("The Homepage with explicit template name in renderTemplate", function() {
<ide> equal(Ember.$('h3:contains(Megatroll) + p:contains(YES I AM HOME)', '#qunit-fixture').length, 1, "The homepage template was rendered");
<ide> });
<ide>
<add>test("Renders correct view with slash notation", function() {
<add> Ember.TEMPLATES['home/page'] = compile("<p>{{view.name}}</p>");
<add>
<add> Router.map(function(match) {
<add> this.route("home", { path: "/" });
<add> });
<add>
<add> App.HomeRoute = Ember.Route.extend({
<add> renderTemplate: function() {
<add> this.render('home/page');
<add> }
<add> });
<add>
<add> App.HomePageView = Ember.View.extend({
<add> name: "Home/Page"
<add> });
<add>
<add> bootApplication();
<add>
<add> Ember.run(function() {
<add> router.handleURL("/");
<add> });
<add>
<add> equal(Ember.$('p:contains(Home/Page)', '#qunit-fixture').length, 1, "The homepage template was rendered");
<add>});
<add>
<ide> test('render does not replace templateName if user provided', function() {
<ide> Router.map(function(match) {
<ide> this.route("home", { path: "/" }); | 2 |
PHP | PHP | fix style ci | 4c3b4664c1d860425d6d9d06d1bdd1eb009cdc73 | <ide><path>tests/Integration/Http/ResourceTest.php
<ide> public function work()
<ide> 'id' => 1,
<ide> 'title' => 'Test Title 1',
<ide> ]));
<del>
<add>
<ide> return $this->filter([
<ide> new MergeValue($postResource),
<ide> 'user' => 'test user', | 1 |
Text | Text | update courses flight manual | e9aaff5fd163a318d143fb2cc4271f5774e7f242 | <ide><path>docs/courses-vscode-extension.md
<ide> This details the maintenance guidelines for the [freeCodeCamp/courses-vscode-ext
<ide>
<ide> ## Publishing the Extension
<ide>
<del>A GitHub Action automagically publishes the extension to the Visual Studio Marketplace, on pushes to the `prod` branch.
<add>A GitHub Action automagically publishes the extension to the Visual Studio Marketplace, on the release of a new GitHub Release.
<ide>
<del>Ensure the `main` branch is checked out.
<add>1. Package a new version of the extension:
<ide>
<ide> ```bash
<del>git checkout main
<add>npm run pack -- <tag_type>
<ide> ```
<ide>
<del>Update the local repository with `upstream`, and reset `main`.
<add>Where `<tag_type>` is one of: `major`, `minor`, `patch`.
<ide>
<del>```bash
<del>git fetch upstream
<del>git reset --hard upstream/main
<del>```
<del>
<del>Checkout the `prod` branch.
<add>2. Push the new version to `main`:
<ide>
<ide> ```bash
<del>git checkout prod
<add>git commit -am "<tag_type>(<version>): <description>"
<add>git push
<ide> ```
<ide>
<del>Merge the commits wanted for deployment into `prod`.
<del>
<del>```bash
<del>git merge main
<del>```
<add>Optionally, you can push directly to `upstream/main`, but opening a new PR is recommended for a sanity check.
<ide>
<del>Push the local branch to `upstream`.
<add>3. Create a new GitHub Release using the GitHub UI:
<ide>
<del>```bash
<del>git push upstream
<del>```
<add>- Correctly increment the version number, when creating a new tag.
<add>- Upload the `.vsix` file with the release.
<add>- Publish the release, and confirm the action succeeded.
<ide>
<ide> > [!NOTE]
<del>> Pushing to `upstream` requires write access to the `freeCodeCamp/courses-vscode-extension` repository.
<add>> Creating a release requires write access to the `freeCodeCamp/courses-vscode-extension` repository.
<ide>
<ide> ## Manually Publishing the Extension
<ide> | 1 |
Java | Java | fix merge bug | f9a972272ab9cf5265c652efa8e8eda9d465008d | <ide><path>rxjava-core/src/main/java/rx/internal/operators/OperatorMerge.java
<ide> public Subscriber<Observable<? extends T>> call(final Subscriber<? super T> chil
<ide> private int wip;
<ide> private boolean completed;
<ide>
<del> private SubscriptionIndexedRingBuffer<InnerSubscriber<T>> childrenSubscribers;
<add> private volatile SubscriptionIndexedRingBuffer<InnerSubscriber<T>> childrenSubscribers;
<ide>
<ide> private RxRingBuffer scalarValueQueue = null;
<ide>
<ide> private boolean drainQueuesIfNeeded() {
<ide> emitted = drainScalarValueQueue();
<ide> drainChildrenQueues();
<ide> } finally {
<del> if (!releaseEmitLock()) {
<del> return true;
<del> }
<add> boolean moreToDrain = releaseEmitLock();
<ide> // request outside of lock
<ide> request(emitted);
<add> if (!moreToDrain) {
<add> return true;
<add> }
<ide> // otherwise we'll loop and get whatever was added
<ide> }
<ide> } else {
<ide> public void onCompleted() {
<ide> }
<ide> if (c) {
<ide> // complete outside of lock
<del> actual.onCompleted();
<add> drainAndComplete();
<ide> }
<ide> }
<ide>
<ide> void completeInner(InnerSubscriber<T> s) {
<del> try {
<del> boolean sendOnComplete = false;
<del> synchronized (this) {
<del> wip--;
<del> if (wip == 0 && completed) {
<del> sendOnComplete = true;
<del> }
<del> }
<del> if (sendOnComplete) {
<del> actual.onCompleted();
<add> boolean sendOnComplete = false;
<add> synchronized (this) {
<add> wip--;
<add> if (wip == 0 && completed) {
<add> sendOnComplete = true;
<ide> }
<del> } finally {
<del> childrenSubscribers.remove(s.sindex);
<add> }
<add> childrenSubscribers.remove(s.sindex);
<add> if (sendOnComplete) {
<add> drainAndComplete();
<ide> }
<ide> }
<ide>
<add> private void drainAndComplete() {
<add> drainQueuesIfNeeded(); // TODO need to confirm whether this is needed or not
<add> actual.onCompleted();
<add> }
<add>
<ide> }
<ide>
<ide> private static final class MergeProducer<T> implements Producer {
<ide> public void request(long n) {
<ide> final MergeSubscriber<T> parentSubscriber;
<ide> final MergeProducer<T> producer;
<ide> /** Make sure the inner termination events are delivered only once. */
<del> volatile int once;
<add> volatile int terminated;
<ide> @SuppressWarnings("rawtypes")
<del> static final AtomicIntegerFieldUpdater<InnerSubscriber> ONCE_UPDATER = AtomicIntegerFieldUpdater.newUpdater(InnerSubscriber.class, "once");
<add> static final AtomicIntegerFieldUpdater<InnerSubscriber> ONCE_TERMINATED = AtomicIntegerFieldUpdater.newUpdater(InnerSubscriber.class, "terminated");
<add>
<ide> private final RxRingBuffer q = RxRingBuffer.getSpmcInstance();
<ide> /* protected by emitLock */
<ide> int emitted = 0;
<ide> public void onNext(T t) {
<ide> @Override
<ide> public void onError(Throwable e) {
<ide> // it doesn't go through queues, it immediately onErrors and tears everything down
<del> if (ONCE_UPDATER.compareAndSet(this, 0, 1)) {
<add> if (ONCE_TERMINATED.compareAndSet(this, 0, 1)) {
<ide> parentSubscriber.onError(e);
<ide> }
<ide> }
<ide>
<ide> @Override
<ide> public void onCompleted() {
<del> if (ONCE_UPDATER.compareAndSet(this, 0, 1)) {
<add> if (ONCE_TERMINATED.compareAndSet(this, 0, 1)) {
<ide> emit(null, true);
<ide> }
<ide> }
<ide><path>rxjava-core/src/main/java/rx/internal/util/IndexedRingBuffer.java
<ide> public int add(E e) {
<ide> }
<ide>
<ide> public E remove(int index) {
<del> try {
<del> E e;
<del> if (index < SIZE) {
<del> // fast-path when we are in the first section
<del> e = elements.array.getAndSet(index, null);
<del> } else {
<del> int sectionIndex = index % SIZE;
<del> e = getElementSection(index).array.getAndSet(sectionIndex, null);
<del> }
<del> pushRemovedIndex(index);
<del> return e;
<del> } catch (NullPointerException ne) {
<del> ne.printStackTrace();
<del> throw ne;
<add> E e;
<add> if (index < SIZE) {
<add> // fast-path when we are in the first section
<add> e = elements.array.getAndSet(index, null);
<add> } else {
<add> int sectionIndex = index % SIZE;
<add> e = getElementSection(index).array.getAndSet(sectionIndex, null);
<ide> }
<add> pushRemovedIndex(index);
<add> return e;
<ide> }
<ide>
<ide> private IndexSection getIndexSection(int index) {
<ide><path>rxjava-core/src/main/java/rx/internal/util/RxRingBuffer.java
<ide> public int count() {
<ide> }
<ide> return queue.size();
<ide> }
<add>
<add> public boolean isEmpty() {
<add> if (queue == null) {
<add> return true;
<add> }
<add> return queue.isEmpty();
<add> }
<ide>
<ide> public Object poll() {
<ide> if (queue == null) {
<ide><path>rxjava-core/src/main/java/rx/internal/util/SubscriptionList.java
<ide> public void unsubscribe() {
<ide> }
<ide> // we will only get here once
<ide> unsubscribeFromAll(subscriptions);
<add> subscriptions = null;
<ide> }
<ide>
<ide> private static void unsubscribeFromAll(Collection<Subscription> subscriptions) { | 4 |
Go | Go | export noraw=1 to disable raw mode in the client | 8c142c612737f45fb97d9ad949b1c3bd42d38c71 | <ide><path>docker/docker.go
<ide> func Fatal(err error) {
<ide>
<ide> func main() {
<ide> var err error
<del> if IsTerminal(0) {
<add> if IsTerminal(0) && os.Getenv("NORAW") == "" {
<ide> oldState, err = MakeRaw(0)
<ide> if err != nil {
<ide> panic(err)
<ide> func main() {
<ide> if err := <-receive_stdout; err != nil {
<ide> Fatal(err)
<ide> }
<del> if IsTerminal(0) {
<add> if IsTerminal(0) && os.Getenv("NORAW") == "" {
<ide> Restore(0, oldState)
<ide> } else {
<ide> if err := <-send_stdin; err != nil { | 1 |
Python | Python | use native theano bn | f3e7245910eabd1bcaddc0d3e6bfd0f3eabd4eb1 | <ide><path>keras/backend/theano_backend.py
<ide> def normalize_batch_in_training(x, gamma, beta,
<ide> def batch_normalization(x, mean, std, beta, gamma, epsilon=0.0001):
<ide> '''Apply batch normalization on x given mean, std, beta and gamma.
<ide> '''
<del> normed = (x - mean) * (gamma * T.inv(std + epsilon)) + beta
<add> normed = T.nnet.bn.batch_normalization(x, gamma, beta, mean, std + epsilon,
<add> mode='high_mem')
<ide> return normed
<ide>
<ide> | 1 |
Java | Java | improve javadoc on reactive classes | 3e096ce810d6d7c19529a975f1b68eb0e91e4d47 | <ide><path>spring-core/src/main/java/org/springframework/core/codec/AbstractDecoder.java
<ide> import org.springframework.util.MimeType;
<ide>
<ide> /**
<add> * Abstract base class for {@link Decoder} implementations.
<add> *
<ide> * @author Sebastien Deleuze
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide>
<ide> private List<MimeType> decodableMimeTypes = Collections.emptyList();
<ide>
<add>
<ide> protected AbstractDecoder(MimeType... supportedMimeTypes) {
<ide> this.decodableMimeTypes = Arrays.asList(supportedMimeTypes);
<ide> }
<ide> public boolean canDecode(ResolvableType elementType, MimeType mimeType, Object..
<ide> if (mimeType == null) {
<ide> return true;
<ide> }
<del> return this.decodableMimeTypes.stream().
<del> anyMatch(mt -> mt.isCompatibleWith(mimeType));
<add> return this.decodableMimeTypes.stream().anyMatch(m -> m.isCompatibleWith(mimeType));
<ide> }
<ide>
<ide> @Override
<del> public Mono<T> decodeToMono(Publisher<DataBuffer> inputStream, ResolvableType elementType, MimeType mimeType, Object... hints) {
<add> public Mono<T> decodeToMono(Publisher<DataBuffer> inputStream, ResolvableType elementType,
<add> MimeType mimeType, Object... hints) {
<add>
<ide> throw new UnsupportedOperationException();
<ide> }
<add>
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/core/codec/AbstractEncoder.java
<ide> import org.springframework.util.MimeType;
<ide>
<ide> /**
<add> * Abstract base class for {@link Decoder} implementations.
<add> *
<ide> * @author Sebastien Deleuze
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide>
<ide> private List<MimeType> encodableMimeTypes = Collections.emptyList();
<ide>
<add>
<ide> protected AbstractEncoder(MimeType... supportedMimeTypes) {
<ide> this.encodableMimeTypes = Arrays.asList(supportedMimeTypes);
<ide> }
<ide> public boolean canEncode(ResolvableType elementType, MimeType mimeType, Object..
<ide> if (mimeType == null) {
<ide> return true;
<ide> }
<del> return this.encodableMimeTypes.stream().
<del> anyMatch(mt -> mt.isCompatibleWith(mimeType));
<add> return this.encodableMimeTypes.stream().anyMatch(m -> m.isCompatibleWith(mimeType));
<ide> }
<ide>
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/core/codec/AbstractSingleValueEncoder.java
<ide> import org.springframework.util.MimeType;
<ide>
<ide> /**
<del> * Abstract base class for {@link org.springframework.core.codec.Encoder} classes that
<del> * can only deal with a single value.
<add> * Abstract base class for {@link org.springframework.core.codec.Encoder}
<add> * classes that can only deal with a single value.
<add> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide> */
<ide> public abstract class AbstractSingleValueEncoder<T> extends AbstractEncoder<T> {
<ide>
<add>
<ide> public AbstractSingleValueEncoder(MimeType... supportedMimeTypes) {
<ide> super(supportedMimeTypes);
<ide> }
<ide>
<add>
<ide> @Override
<ide> public final Flux<DataBuffer> encode(Publisher<? extends T> inputStream,
<ide> DataBufferFactory bufferFactory, ResolvableType elementType, MimeType mimeType,
<ide> Object... hints) {
<add>
<ide> return Flux.from(inputStream).
<ide> take(1).
<ide> concatMap(t -> {
<ide> public final Flux<DataBuffer> encode(Publisher<? extends T> inputStream,
<ide> }
<ide>
<ide> /**
<del> * Encodes {@code T} to an output {@link DataBuffer} stream.
<add> * Encode {@code T} to an output {@link DataBuffer} stream.
<ide> * @param t the value to process
<ide> * @param dataBufferFactory a buffer factory used to create the output
<ide> * @param type the stream element type to process
<ide> public final Flux<DataBuffer> encode(Publisher<? extends T> inputStream,
<ide> protected abstract Flux<DataBuffer> encode(T t, DataBufferFactory dataBufferFactory,
<ide> ResolvableType type, MimeType mimeType, Object... hints) throws Exception;
<ide>
<del>
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/core/codec/ByteBufferDecoder.java
<ide> import org.springframework.util.MimeTypeUtils;
<ide>
<ide> /**
<add> * Decoder for {@link ByteBuffer}s.
<add> *
<ide> * @author Sebastien Deleuze
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide> public boolean canDecode(ResolvableType elementType, MimeType mimeType, Object..
<ide> @Override
<ide> public Flux<ByteBuffer> decode(Publisher<DataBuffer> inputStream, ResolvableType elementType,
<ide> MimeType mimeType, Object... hints) {
<add>
<ide> return Flux.from(inputStream).map((dataBuffer) -> {
<ide> ByteBuffer copy = ByteBuffer.allocate(dataBuffer.readableByteCount());
<ide> copy.put(dataBuffer.asByteBuffer());
<ide><path>spring-core/src/main/java/org/springframework/core/codec/ByteBufferEncoder.java
<ide> import org.springframework.util.MimeTypeUtils;
<ide>
<ide> /**
<add> * Encoder for {@link ByteBuffer}s.
<add> *
<ide> * @author Sebastien Deleuze
<ide> * @since 5.0
<ide> */
<ide> public class ByteBufferEncoder extends AbstractEncoder<ByteBuffer> {
<ide>
<add>
<ide> public ByteBufferEncoder() {
<ide> super(MimeTypeUtils.ALL);
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/core/codec/ResourceDecoder.java
<ide> import org.springframework.util.MimeTypeUtils;
<ide>
<ide> /**
<del> * A decoder for {@link Resource}s.
<add> * Decoder for {@link Resource}s.
<ide> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide> */
<ide> public class ResourceDecoder extends AbstractDecoder<Resource> {
<ide>
<add>
<ide> public ResourceDecoder() {
<ide> super(MimeTypeUtils.ALL);
<ide> }
<ide>
<add>
<ide> @Override
<ide> public boolean canDecode(ResolvableType elementType, MimeType mimeType, Object... hints) {
<ide> Class<?> clazz = elementType.getRawClass();
<ide> public boolean canDecode(ResolvableType elementType, MimeType mimeType, Object..
<ide> @Override
<ide> public Flux<Resource> decode(Publisher<DataBuffer> inputStream, ResolvableType elementType,
<ide> MimeType mimeType, Object... hints) {
<add>
<ide> Class<?> clazz = elementType.getRawClass();
<ide>
<ide> Mono<byte[]> byteArray = Flux.from(inputStream).
<ide> public Flux<Resource> decode(Publisher<DataBuffer> inputStream, ResolvableType e
<ide>
<ide>
<ide> if (InputStreamResource.class.equals(clazz)) {
<del> return Flux.from(byteArray.
<del> map(ByteArrayInputStream::new).
<del> map(InputStreamResource::new));
<add> return Flux.from(byteArray.map(ByteArrayInputStream::new).map(InputStreamResource::new));
<ide> }
<ide> else if (clazz.isAssignableFrom(ByteArrayResource.class)) {
<del> return Flux.from(byteArray.
<del> map(ByteArrayResource::new));
<add> return Flux.from(byteArray.map(ByteArrayResource::new));
<ide> }
<ide> else {
<del> return Flux.error(new IllegalStateException(
<del> "Unsupported resource class: " + clazz));
<add> return Flux.error(new IllegalStateException("Unsupported resource class: " + clazz));
<ide> }
<ide> }
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/core/codec/ResourceEncoder.java
<ide> import org.springframework.util.StreamUtils;
<ide>
<ide> /**
<del> * An encoder for {@link Resource}s.
<add> * Encoder for {@link Resource}s.
<add> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide> */
<ide> public class ResourceEncoder extends AbstractSingleValueEncoder<Resource> {
<ide>
<ide> public static final int DEFAULT_BUFFER_SIZE = StreamUtils.BUFFER_SIZE;
<ide>
<add>
<ide> private final int bufferSize;
<ide>
<add>
<ide> public ResourceEncoder() {
<ide> this(DEFAULT_BUFFER_SIZE);
<ide> }
<ide> public ResourceEncoder(int bufferSize) {
<ide> this.bufferSize = bufferSize;
<ide> }
<ide>
<add>
<ide> @Override
<ide> public boolean canEncode(ResolvableType elementType, MimeType mimeType, Object... hints) {
<ide> Class<?> clazz = elementType.getRawClass();
<del> return (super.canEncode(elementType, mimeType, hints) &&
<del> Resource.class.isAssignableFrom(clazz));
<add> return (super.canEncode(elementType, mimeType, hints) && Resource.class.isAssignableFrom(clazz));
<ide> }
<ide>
<ide> @Override
<del> protected Flux<DataBuffer> encode(Resource resource,
<del> DataBufferFactory dataBufferFactory,
<add> protected Flux<DataBuffer> encode(Resource resource, DataBufferFactory dataBufferFactory,
<ide> ResolvableType type, MimeType mimeType, Object... hints) throws IOException {
<add>
<ide> InputStream is = resource.getInputStream();
<ide> return DataBufferUtils.read(is, dataBufferFactory, bufferSize);
<ide> }
<ide><path>spring-core/src/main/java/org/springframework/core/codec/StringEncoder.java
<ide> public class StringEncoder extends AbstractEncoder<String> {
<ide>
<ide> public static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8;
<ide>
<add>
<ide> public StringEncoder() {
<ide> super(new MimeType("text", "plain", DEFAULT_CHARSET));
<ide> }
<ide> public boolean canEncode(ResolvableType elementType, MimeType mimeType, Object..
<ide>
<ide> @Override
<ide> public Flux<DataBuffer> encode(Publisher<? extends String> inputStream,
<del> DataBufferFactory bufferFactory, ResolvableType elementType, MimeType mimeType,
<del> Object... hints) {
<add> DataBufferFactory bufferFactory, ResolvableType elementType,
<add> MimeType mimeType, Object... hints) {
<add>
<ide> Charset charset;
<ide> if (mimeType != null && mimeType.getCharset() != null) {
<ide> charset = mimeType.getCharset();
<ide><path>spring-core/src/main/java/org/springframework/core/convert/support/MonoToCompletableFutureConverter.java
<ide> import org.springframework.core.convert.converter.GenericConverter;
<ide>
<ide> /**
<add> * Converter to adapt {@link CompletableFuture} to Reactive Streams and
<add> * Reactor {@link Mono}.
<add> *
<ide> * @author Sebastien Deleuze
<ide> * @since 5.0
<ide> */
<ide> public class MonoToCompletableFutureConverter implements GenericConverter {
<ide>
<add>
<ide> @Override
<ide> public Set<GenericConverter.ConvertiblePair> getConvertibleTypes() {
<ide> Set<GenericConverter.ConvertiblePair> pairs = new LinkedHashSet<>(2);
<ide> public Set<GenericConverter.ConvertiblePair> getConvertibleTypes() {
<ide> return pairs;
<ide> }
<ide>
<add>
<ide> @Override
<ide> public Object convert(Object source, TypeDescriptor sourceType, TypeDescriptor targetType) {
<ide> if (source == null) {
<ide><path>spring-core/src/main/java/org/springframework/core/convert/support/ReactorToRxJava1Converter.java
<ide> import org.springframework.core.convert.converter.GenericConverter;
<ide>
<ide> /**
<add> * Converter to adapt RxJava1 {@link Observable}, {@link Single}, and
<add> * {@link Completable} to Reactive Streams and Reactor types.
<add> *
<ide> * @author Stephane Maldini
<ide> * @author Sebastien Deleuze
<ide> * @since 5.0
<ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/DataBufferFactory.java
<ide> import java.nio.ByteBuffer;
<ide>
<ide> /**
<del> * A factory for {@link DataBuffer}s, allowing for allocation and wrapping of data
<del> * buffers.
<add> * A factory for {@link DataBuffer}s,allowing for allocation and wrapping of
<add> * data buffers.
<ide> *
<ide> * @author Arjen Poutsma
<ide> * @see DataBuffer
<ide> public interface DataBufferFactory {
<ide>
<ide> /**
<del> * Allocates a data buffer of a default initial capacity. Depending on the underlying
<del> * implementation and its configuration, this will be heap-based or direct buffer.
<add> * Allocate a data buffer of a default initial capacity. Depending on the
<add> * underlying implementation and its configuration, this will be heap-based
<add> * or direct buffer.
<ide> * @return the allocated buffer
<ide> */
<ide> DataBuffer allocateBuffer();
<ide>
<ide> /**
<del> * Allocates a data buffer of the given initial capacity. Depending on the underlying
<del> * implementation and its configuration, this will be heap-based or direct buffer.
<del> * @param initialCapacity the initial capacity of the buffer to allocateBuffer
<add> * Allocate a data buffer of the given initial capacity. Depending on the
<add> * underlying implementation and its configuration, this will be heap-based
<add> * or direct buffer.
<add> * @param initialCapacity the initial capacity of the buffer to allocate
<ide> * @return the allocated buffer
<ide> */
<ide> DataBuffer allocateBuffer(int initialCapacity);
<ide>
<ide> /**
<del> * Wraps the given {@link ByteBuffer} in a {@code DataBuffer}.
<add> * Wrap the given {@link ByteBuffer} in a {@code DataBuffer}.
<ide> * @param byteBuffer the NIO byte buffer to wrap
<ide> * @return the wrapped buffer
<ide> */
<ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/DefaultDataBuffer.java
<ide> public class DefaultDataBuffer implements DataBuffer {
<ide> private int writePosition;
<ide>
<ide> /**
<del> * Creates a new {@code DefaultDataBuffer} based on the given {@code ByteBuffer}. Both
<del> * reading and writing position of this buffer are based on the current {@linkplain
<add> * Create a new {@code DefaultDataBuffer} based on the given
<add> * {@code ByteBuffer}. Both reading and writing position of this buffer are
<add> * based on the current {@linkplain
<ide> * ByteBuffer#position() position} of the given buffer.
<ide> * @param byteBuffer the buffer to base this buffer on
<ide> */
<ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/DefaultDataBufferFactory.java
<ide>
<ide> /**
<ide> * Default implementation of the {@code DataBufferFactory} interface. Allows for
<del> * specification of the default initial capacity at construction time, as well as whether
<del> * heap-based or direct buffers are to be preferred.
<add> * specification of the default initial capacity at construction time, as well
<add> * as whether heap-based or direct buffers are to be preferred.
<ide> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide> public class DefaultDataBufferFactory implements DataBufferFactory {
<ide>
<ide> private final int defaultInitialCapacity;
<ide>
<add>
<ide> /**
<ide> * Creates a new {@code DefaultDataBufferFactory} with default settings.
<ide> */
<ide> public DefaultDataBufferFactory() {
<ide> }
<ide>
<ide> /**
<del> * Creates a new {@code DefaultDataBufferFactory}, indicating whether direct buffers
<del> * should be created by {@link #allocateBuffer()} and {@link #allocateBuffer(int)}.
<del> * @param preferDirect {@code true} if direct buffers are to be preferred; {@code
<del> * false} otherwise
<add> * Creates a new {@code DefaultDataBufferFactory}, indicating whether direct
<add> * buffers should be created by {@link #allocateBuffer()} and
<add> * {@link #allocateBuffer(int)}.
<add> * @param preferDirect {@code true} if direct buffers are to be preferred;
<add> * {@code false} otherwise
<ide> */
<ide> public DefaultDataBufferFactory(boolean preferDirect) {
<ide> this(preferDirect, DEFAULT_INITIAL_CAPACITY);
<ide> }
<ide>
<ide> /**
<del> * Creates a new {@code DefaultDataBufferFactory}, indicating whether direct buffers
<del> * should be created by {@link #allocateBuffer()} and {@link #allocateBuffer(int)},
<del> * and what the capacity is to be used for {@link #allocateBuffer()}.
<del> * @param preferDirect {@code true} if direct buffers are to be preferred; {@code
<del> * false} otherwise
<add> * Creates a new {@code DefaultDataBufferFactory}, indicating whether direct
<add> * buffers should be created by {@link #allocateBuffer()} and
<add> * {@link #allocateBuffer(int)}, and what the capacity is to be used for
<add> * {@link #allocateBuffer()}.
<add> * @param preferDirect {@code true} if direct buffers are to be preferred;
<add> * {@code false} otherwise
<ide> */
<ide> public DefaultDataBufferFactory(boolean preferDirect, int defaultInitialCapacity) {
<ide> Assert.isTrue(defaultInitialCapacity > 0,
<ide> public DefaultDataBufferFactory(boolean preferDirect, int defaultInitialCapacity
<ide> this.defaultInitialCapacity = defaultInitialCapacity;
<ide> }
<ide>
<add>
<ide> @Override
<ide> public DefaultDataBuffer allocateBuffer() {
<ide> return allocateBuffer(this.defaultInitialCapacity);
<ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/FlushingDataBuffer.java
<ide> import java.util.function.IntPredicate;
<ide>
<ide> /**
<del> * Empty {@link DataBuffer} that indicates to the file or the socket writing it that
<del> * previously buffered data should be flushed.
<add> * Empty {@link DataBuffer} that indicates to the file or the socket writing it
<add> * that previously buffered data should be flushed.
<ide> *
<ide> * @author Sebastien Deleuze
<ide> * @since 5.0
<ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/NettyDataBuffer.java
<ide> import org.springframework.util.ObjectUtils;
<ide>
<ide> /**
<del> * Implementation of the {@code DataBuffer} interface that wraps a Netty {@link ByteBuf}.
<del> * Typically constructed using the {@link NettyDataBufferFactory}.
<add> * Implementation of the {@code DataBuffer} interface that wraps a Netty
<add> * {@link ByteBuf}. Typically constructed with {@link NettyDataBufferFactory}.
<ide> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/NettyDataBufferFactory.java
<ide> public class NettyDataBufferFactory implements DataBufferFactory {
<ide>
<ide> private final ByteBufAllocator byteBufAllocator;
<ide>
<add>
<ide> /**
<ide> * Creates a new {@code NettyDataBufferFactory} based on the given factory.
<ide> * @param byteBufAllocator the factory to use
<ide> public NettyDataBufferFactory(ByteBufAllocator byteBufAllocator) {
<ide> this.byteBufAllocator = byteBufAllocator;
<ide> }
<ide>
<add>
<ide> @Override
<ide> public NettyDataBuffer allocateBuffer() {
<ide> ByteBuf byteBuf = this.byteBufAllocator.buffer();
<ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/PooledDataBuffer.java
<ide> package org.springframework.core.io.buffer;
<ide>
<ide> /**
<del> * Extension of {@link DataBuffer} that allows for buffer that share a memory pool.
<del> * Introduces methods for reference counting.
<add> * Extension of {@link DataBuffer} that allows for buffer that share a memory
<add> * pool. Introduces methods for reference counting.
<ide> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide> public interface PooledDataBuffer extends DataBuffer {
<ide> PooledDataBuffer retain();
<ide>
<ide> /**
<del> * Decreases the reference count for this buffer by one, and releases it once the
<del> * count reaches zero.
<add> * Decreases the reference count for this buffer by one, and releases it
<add> * once the count reaches zero.
<ide> * @return {@code true} if the buffer was released; {@code false} otherwise.
<ide> */
<ide> boolean release();
<ide><path>spring-core/src/main/java/org/springframework/core/io/buffer/support/DataBufferUtils.java
<ide> public static Flux<DataBuffer> read(ReadableByteChannel channel,
<ide>
<ide> /**
<ide> * Relays buffers from the given {@link Publisher} until the total
<del> * {@linkplain DataBuffer#readableByteCount() byte count} reaches the given maximum
<del> * byte count, or until the publisher is complete.
<add> * {@linkplain DataBuffer#readableByteCount() byte count} reaches the given
<add> * maximum byte count, or until the publisher is complete.
<ide> * @param publisher the publisher to filter
<ide> * @param maxByteCount the maximum byte count
<ide> * @return a flux whose maximum byte count is {@code maxByteCount}
<ide><path>spring-web-reactive/src/main/java/org/springframework/web/reactive/accept/MappingContentTypeResolver.java
<ide> import org.springframework.http.MediaType;
<ide>
<ide> /**
<del> * An extension of {@link RequestedContentTypeResolver} that maintains a mapping between
<del> * keys (e.g. file extension, query parameter) and media types.
<add> * An extension of {@link RequestedContentTypeResolver} that maintains a mapping
<add> * between keys (e.g. file extension, query parameter) and media types.
<ide> *
<ide> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide><path>spring-web-reactive/src/main/java/org/springframework/web/reactive/accept/ParameterContentTypeResolver.java
<ide> import org.springframework.web.server.ServerWebExchange;
<ide>
<ide> /**
<del> * A {@link RequestedContentTypeResolver} that extracts the media type lookup key from a
<del> * known query parameter named "format" by default.
<add> * A {@link RequestedContentTypeResolver} that extracts the media type lookup
<add> * key from a known query parameter named "format" by default.
<ide> *
<ide> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide><path>spring-web-reactive/src/main/java/org/springframework/web/reactive/accept/PathExtensionContentTypeResolver.java
<ide> import org.springframework.web.util.WebUtils;
<ide>
<ide> /**
<del> * A {@link RequestedContentTypeResolver} that extracts the file extension from the
<del> * request path and uses that as the media type lookup key.
<add> * A {@link RequestedContentTypeResolver} that extracts the file extension from
<add> * the request path and uses that as the media type lookup key.
<ide> *
<ide> * <p>If the file extension is not found in the explicit registrations provided
<ide> * to the constructor, the Java Activation Framework (JAF) is used as a fallback
<ide><path>spring-web-reactive/src/main/java/org/springframework/web/reactive/accept/RequestedContentTypeResolver.java
<ide> import org.springframework.web.server.ServerWebExchange;
<ide>
<ide> /**
<del> * Strategy for resolving the requested media types for a {@code ServerWebExchange}.
<add> * Strategy for resolving the requested media types for a
<add> * {@code ServerWebExchange}.
<ide> *
<ide> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide><path>spring-web-reactive/src/main/java/org/springframework/web/reactive/accept/RequestedContentTypeResolverBuilder.java
<ide>
<ide> /**
<ide> * Factory to create a {@link CompositeContentTypeResolver} and configure it with
<del> * one or more {@link RequestedContentTypeResolver} instances with build style methods.
<del> * The following table shows methods, resulting strategy instances, and if in
<del> * use by default:
<add> * one or more {@link RequestedContentTypeResolver} instances with build style
<add> * methods. The following table shows methods, resulting strategy instances, and
<add> * if in use by default:
<ide> *
<ide> * <table>
<ide> * <tr>
<ide> * </tr>
<ide> * </table>
<ide> *
<del> * <p>The order in which resolvers are configured is fixed. Config methods may only
<del> * turn individual resolvers on or off. If you need a custom order for any
<del> * reason simply instantiate {@code {@link CompositeContentTypeResolver}} directly.
<add> * <p>The order in which resolvers are configured is fixed. Config methods may
<add> * only turn individual resolvers on or off. If you need a custom order for any
<add> * reason simply instantiate {@code {@link CompositeContentTypeResolver}}
<add> * directly.
<ide> *
<ide> * <p>For the path extension and parameter resolvers you may explicitly add
<ide> * {@link #mediaTypes(Map)}. This will be used to resolve path extensions or a
<ide><path>spring-web-reactive/src/main/java/org/springframework/web/reactive/result/method/InvocableHandlerMethod.java
<ide>
<ide>
<ide> /**
<add> * Extension of HandlerMethod that can invoke the target method after resolving
<add> * its method arguments.
<add> *
<ide> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide> */
<ide><path>spring-web-reactive/src/main/java/org/springframework/web/reactive/result/view/AbstractView.java
<ide> import org.springframework.web.server.ServerWebExchange;
<ide>
<ide> /**
<add> * Base class for {@link View} implementations.
<ide> *
<ide> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/AbstractClientHttpRequest.java
<ide> public abstract class AbstractClientHttpRequest implements ClientHttpRequest {
<ide>
<ide> private final List<Supplier<? extends Mono<Void>>> beforeCommitActions = new ArrayList<>(4);
<ide>
<add>
<ide> public AbstractClientHttpRequest() {
<ide> this(new HttpHeaders());
<ide> }
<ide> public AbstractClientHttpRequest(HttpHeaders headers) {
<ide> this.cookies = new LinkedMultiValueMap<>();
<ide> }
<ide>
<add>
<ide> @Override
<ide> public HttpHeaders getHeaders() {
<ide> if (State.COMITTED.equals(this.state.get())) {
<ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/ClientHttpConnector.java
<ide> import org.springframework.http.HttpMethod;
<ide>
<ide> /**
<del> * Client abstraction for HTTP client runtimes.
<del> * {@link ClientHttpConnector} drives the underlying HTTP client implementation
<del> * so as to connect to the origin server and provide all the necessary infrastructure
<del> * to send the actual {@link ClientHttpRequest} and receive the {@link ClientHttpResponse}
<add> * Abstraction over HTTP clients driving the underlying HTTP client to connect
<add> * to the origin server and provide all necessary infrastructure to send a
<add> * {@link ClientHttpRequest} and receive a {@link ClientHttpResponse}.
<ide> *
<ide> * @author Brian Clozel
<ide> * @since 5.0
<ide> */
<ide> public interface ClientHttpConnector {
<ide>
<ide> /**
<del> * Connect to the origin server using the given {@code HttpMethod} and {@code URI},
<del> * then apply the given {@code requestCallback} on the {@link ClientHttpRequest}
<del> * once the connection has been established.
<add> * Connect to the origin server using the given {@code HttpMethod} and
<add> * {@code URI}, then apply the given {@code requestCallback} on the
<add> * {@link ClientHttpRequest} once the connection has been established.
<ide> * <p>Return a publisher of the {@link ClientHttpResponse}.
<ide> *
<ide> * @param method the HTTP request method
<ide> * @param uri the HTTP request URI
<ide> * @param requestCallback a function that prepares and writes the request,
<del> * returning a publisher that signals when it's done interacting with the request.
<del> * Implementations should return a {@code Mono<Void>} by calling
<add> * returning a publisher that signals when it's done interacting with the
<add> * request. Implementations should return a {@code Mono<Void>} by calling
<ide> * {@link ClientHttpRequest#writeWith} or {@link ClientHttpRequest#setComplete}.
<ide> * @return a publisher of the {@link ClientHttpResponse}
<ide> */
<ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/ClientHttpRequest.java
<ide> import org.springframework.util.MultiValueMap;
<ide>
<ide> /**
<del> * Represents a reactive client-side HTTP request.
<add> * Represents a client-side reactive HTTP request.
<ide> *
<ide> * @author Arjen Poutsma
<ide> * @author Brian Clozel
<ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/ClientHttpResponse.java
<ide> import org.springframework.util.MultiValueMap;
<ide>
<ide> /**
<del> * Represents a reactive client-side HTTP response.
<add> * Represents a client-side reactive HTTP response.
<ide> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/ReactorClientHttpConnector.java
<ide> public Mono<ClientHttpResponse> connect(HttpMethod method, URI uri,
<ide> .otherwise(HttpException.class, exc -> Mono.just(exc.getChannel()))
<ide> .map(httpInbound -> new ReactorClientHttpResponse(httpInbound));
<ide> }
<add>
<ide> }
<ide>\ No newline at end of file
<ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/ReactorClientHttpRequest.java
<ide> import org.springframework.http.HttpMethod;
<ide>
<ide> /**
<del> * {@link ClientHttpRequest} implementation for the Reactor-Netty HTTP client
<add> * {@link ClientHttpRequest} implementation for the Reactor-Netty HTTP client.
<ide> *
<ide> * @author Brian Clozel
<ide> * @since 5.0
<ide> public class ReactorClientHttpRequest extends AbstractClientHttpRequest {
<ide>
<ide> private final NettyDataBufferFactory bufferFactory;
<ide>
<add>
<ide> public ReactorClientHttpRequest(HttpMethod httpMethod, URI uri, HttpClientRequest httpRequest) {
<ide> this.httpMethod = httpMethod;
<ide> this.uri = uri;
<ide> this.httpRequest = httpRequest;
<ide> this.bufferFactory = new NettyDataBufferFactory(httpRequest.delegate().alloc());
<ide> }
<ide>
<add>
<ide> @Override
<ide> public DataBufferFactory bufferFactory() {
<ide> return this.bufferFactory;
<ide><path>spring-web/src/main/java/org/springframework/http/client/reactive/ReactorClientHttpResponse.java
<ide> import org.springframework.util.MultiValueMap;
<ide>
<ide> /**
<del> * {@link ClientHttpResponse} implementation for the Reactor-Netty HTTP client
<add> * {@link ClientHttpResponse} implementation for the Reactor-Netty HTTP client.
<ide> *
<ide> * @author Brian Clozel
<ide> * @see reactor.io.netty.http.HttpClient
<ide> public class ReactorClientHttpResponse implements ClientHttpResponse {
<ide>
<ide> private final HttpInbound response;
<ide>
<add>
<ide> public ReactorClientHttpResponse(HttpInbound response) {
<ide> this.response = response;
<ide> this.dataBufferFactory = new NettyDataBufferFactory(response.delegate().alloc());
<ide> }
<ide>
<add>
<ide> @Override
<ide> public Flux<DataBuffer> getBody() {
<ide> return response.receive()
<ide><path>spring-web/src/main/java/org/springframework/http/codec/SseEvent.java
<ide> import org.springframework.http.codec.SseEventEncoder;
<ide>
<ide> /**
<del> * Represent a Server-Sent Event.
<del> *
<del> * <p>{@code Flux<SseEvent>} is Spring Web Reactive equivalent to Spring MVC
<del> * {@code SseEmitter} type. It allows to send Server-Sent Events in a reactive way.
<add> * Representation for a Server-Sent Event for use with Spring's reactive Web
<add> * support. {@code Flux<SseEvent>} or {@code Observable<SseEvent>} is the
<add> * reactive equivalent to Spring MVC's {@code SseEmitter}.
<ide> *
<ide> * @author Sebastien Deleuze
<ide> * @since 5.0
<ide> public class SseEvent {
<ide>
<ide> private String comment;
<ide>
<add>
<ide> /**
<ide> * Create an empty instance.
<ide> */
<ide> public SseEvent(Object data, MediaType mediaType) {
<ide> this.mediaType = mediaType;
<ide> }
<ide>
<add>
<ide> /**
<ide> * Set the {@code id} SSE field
<ide> */
<ide><path>spring-web/src/main/java/org/springframework/http/codec/SseEventEncoder.java
<ide> import org.springframework.util.MimeType;
<ide>
<ide> /**
<del> * An encoder for {@link SseEvent}s that also supports any other kind of {@link Object}
<del> * (in that case, the object will be the data of the {@link SseEvent}).
<add> * Encoder that supports a stream of {@link SseEvent}s and also plain
<add> * {@link Object}s which is the same as an {@link SseEvent} with data
<add> * only.
<add> *
<ide> * @author Sebastien Deleuze
<ide> * @since 5.0
<ide> */
<ide><path>spring-web/src/main/java/org/springframework/http/codec/xml/Jaxb2Decoder.java
<ide> import org.springframework.util.xml.StaxUtils;
<ide>
<ide> /**
<del> * Decode from a bytes stream of XML elements to a stream of {@code Object} (POJO).
<add> * Decode from a bytes stream containing XML elements to a stream of
<add> * {@code Object}s (POJOs).
<ide> *
<ide> * @author Sebastien Deleuze
<ide> * @author Arjen Poutsma
<ide> public class Jaxb2Decoder extends AbstractDecoder<Object> {
<ide>
<ide> private final JaxbContextContainer jaxbContexts = new JaxbContextContainer();
<ide>
<add>
<ide> public Jaxb2Decoder() {
<ide> super(MimeTypeUtils.APPLICATION_XML, MimeTypeUtils.TEXT_XML);
<ide> }
<ide>
<add>
<ide> @Override
<ide> public boolean canDecode(ResolvableType elementType, MimeType mimeType, Object... hints) {
<ide> if (super.canDecode(elementType, mimeType, hints)) {
<ide> public boolean canDecode(ResolvableType elementType, MimeType mimeType, Object..
<ide> @Override
<ide> public Flux<Object> decode(Publisher<DataBuffer> inputStream, ResolvableType elementType,
<ide> MimeType mimeType, Object... hints) {
<add>
<ide> Class<?> outputClass = elementType.getRawClass();
<ide> Flux<XMLEvent> xmlEventFlux =
<ide> this.xmlEventDecoder.decode(inputStream, null, mimeType);
<ide><path>spring-web/src/main/java/org/springframework/http/codec/xml/Jaxb2Encoder.java
<ide> import org.springframework.util.MimeTypeUtils;
<ide>
<ide> /**
<del> * Encode from an {@code Object} stream to a byte stream of XML elements.
<add> * Encode from {@code Object} stream to a byte stream containing XML elements.
<ide> *
<ide> * @author Sebastien Deleuze
<ide> * @author Arjen Poutsma
<ide><path>spring-web/src/main/java/org/springframework/http/codec/xml/JaxbContextContainer.java
<ide> */
<ide> final class JaxbContextContainer {
<ide>
<add>
<ide> private final ConcurrentMap<Class<?>, JAXBContext> jaxbContexts =
<ide> new ConcurrentHashMap<>(64);
<ide>
<add>
<ide> public Marshaller createMarshaller(Class<?> clazz) throws JAXBException {
<ide> JAXBContext jaxbContext = getJaxbContext(clazz);
<ide> return jaxbContext.createMarshaller();
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/AbstractRequestBodyPublisher.java
<ide>
<ide> /**
<ide> * Abstract base class for {@code Publisher} implementations that bridge between
<del> * event-listener APIs and Reactive Streams. Specifically, base class for the Servlet 3.1
<del> * and Undertow support.
<add> * event-listener APIs and Reactive Streams. Specifically, base class for the
<add> * Servlet 3.1 and Undertow support.
<ide> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/AbstractResponseBodyProcessor.java
<ide>
<ide> /**
<ide> * Abstract base class for {@code Subscriber} implementations that bridge between
<del> * event-listener APIs and Reactive Streams. Specifically, base class for the Servlet 3.1
<del> * and Undertow support.
<add> * event-listener APIs and Reactive Streams. Specifically, base class for the
<add> * Servlet 3.1 and Undertow support.
<add> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide> * @see ServletServerHttpRequest
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/ReactorHttpHandlerAdapter.java
<ide> import org.springframework.util.Assert;
<ide>
<ide> /**
<add> * Adapt {@link HttpHandler} to the Reactor Netty {@link ChannelHandler}.
<add> *
<ide> * @author Stephane Maldini
<ide> * @since 5.0
<ide> */
<del>public class ReactorHttpHandlerAdapter
<del> implements ChannelHandler<ByteBuf, ByteBuf, HttpChannel> {
<add>public class ReactorHttpHandlerAdapter implements ChannelHandler<ByteBuf, ByteBuf, HttpChannel> {
<ide>
<ide> private final HttpHandler httpHandler;
<ide>
<add>
<ide> public ReactorHttpHandlerAdapter(HttpHandler httpHandler) {
<ide> Assert.notNull(httpHandler, "'httpHandler' is required.");
<ide> this.httpHandler = httpHandler;
<ide> }
<ide>
<add>
<ide> @Override
<ide> public Mono<Void> apply(HttpChannel channel) {
<del> NettyDataBufferFactory dataBufferFactory =
<del> new NettyDataBufferFactory(channel.delegate().alloc());
<del>
<del> ReactorServerHttpRequest adaptedRequest =
<del> new ReactorServerHttpRequest(channel, dataBufferFactory);
<del> ReactorServerHttpResponse adaptedResponse =
<del> new ReactorServerHttpResponse(channel, dataBufferFactory);
<add> NettyDataBufferFactory bufferFactory = new NettyDataBufferFactory(channel.delegate().alloc());
<add> ReactorServerHttpRequest adaptedRequest = new ReactorServerHttpRequest(channel, bufferFactory);
<add> ReactorServerHttpResponse adaptedResponse = new ReactorServerHttpResponse(channel, bufferFactory);
<ide> return this.httpHandler.handle(adaptedRequest, adaptedResponse);
<ide> }
<ide>
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/ResponseBodyWriteResultPublisher.java
<ide>
<ide> /**
<ide> * Publisher returned from {@link ServerHttpResponse#writeWith(Publisher)}.
<add> *
<ide> * @author Arjen Poutsma
<ide> * @since 5.0
<ide> */
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/RxNettyHttpHandlerAdapter.java
<ide> import org.springframework.util.Assert;
<ide>
<ide> /**
<add> * Adapt {@link HttpHandler} to the RxNetty {@link RequestHandler}.
<add> *
<ide> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide> */
<ide> public class RxNettyHttpHandlerAdapter implements RequestHandler<ByteBuf, ByteBuf> {
<ide>
<ide> private final HttpHandler httpHandler;
<ide>
<add>
<ide> public RxNettyHttpHandlerAdapter(HttpHandler httpHandler) {
<ide> Assert.notNull(httpHandler, "'httpHandler' is required");
<ide> this.httpHandler = httpHandler;
<ide> }
<ide>
<add>
<ide> @Override
<ide> public Observable<Void> handle(HttpServerRequest<ByteBuf> request, HttpServerResponse<ByteBuf> response) {
<del> NettyDataBufferFactory dataBufferFactory =
<del> new NettyDataBufferFactory(response.unsafeNettyChannel().alloc());
<del>
<del> RxNettyServerHttpRequest adaptedRequest =
<del> new RxNettyServerHttpRequest(request, dataBufferFactory);
<del> RxNettyServerHttpResponse adaptedResponse =
<del> new RxNettyServerHttpResponse(response, dataBufferFactory);
<add> NettyDataBufferFactory bufferFactory = new NettyDataBufferFactory(response.unsafeNettyChannel().alloc());
<add> RxNettyServerHttpRequest adaptedRequest = new RxNettyServerHttpRequest(request, bufferFactory);
<add> RxNettyServerHttpResponse adaptedResponse = new RxNettyServerHttpResponse(response, bufferFactory);
<ide> Publisher<Void> result = this.httpHandler.handle(adaptedRequest, adaptedResponse);
<ide> return RxJava1Adapter.publisherToObservable(result);
<ide> }
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/ServletHttpHandlerAdapter.java
<ide> import org.springframework.util.Assert;
<ide>
<ide> /**
<add> * Adapt {@link HttpHandler} to an {@link HttpServlet} using Servlet Async
<add> * support and Servlet 3.1 Non-blocking I/O.
<add> *
<ide> * @author Arjen Poutsma
<ide> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide> public class ServletHttpHandlerAdapter extends HttpServlet {
<ide>
<ide> private int bufferSize = DEFAULT_BUFFER_SIZE;
<ide>
<add>
<ide> public void setHandler(HttpHandler handler) {
<ide> Assert.notNull(handler, "'handler' must not be null");
<ide> this.handler = handler;
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/ServletServerHttpRequest.java
<ide>
<ide> /**
<ide> * Adapt {@link ServerHttpRequest} to the Servlet {@link HttpServletRequest}.
<add> *
<ide> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide> */
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/ServletServerHttpResponse.java
<ide>
<ide> /**
<ide> * Adapt {@link ServerHttpResponse} to the Servlet {@link HttpServletResponse}.
<add> *
<ide> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide> */
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/UndertowHttpHandlerAdapter.java
<ide> import org.springframework.util.Assert;
<ide>
<ide> /**
<add> * Adapt {@link HttpHandler} to the Undertow {@link io.undertow.server.HttpHandler}.
<add> *
<ide> * @author Marek Hawrylczak
<ide> * @author Rossen Stoyanchev
<ide> * @author Arjen Poutsma
<ide> public class UndertowHttpHandlerAdapter implements io.undertow.server.HttpHandle
<ide>
<ide> private final DataBufferFactory dataBufferFactory;
<ide>
<del> public UndertowHttpHandlerAdapter(HttpHandler delegate,
<del> DataBufferFactory dataBufferFactory) {
<add>
<add> public UndertowHttpHandlerAdapter(HttpHandler delegate, DataBufferFactory dataBufferFactory) {
<ide> Assert.notNull(delegate, "'delegate' is required");
<ide> Assert.notNull(dataBufferFactory, "'dataBufferFactory' must not be null");
<ide> this.delegate = delegate;
<ide> this.dataBufferFactory = dataBufferFactory;
<ide> }
<ide>
<add>
<ide> @Override
<ide> public void handleRequest(HttpServerExchange exchange) throws Exception {
<ide>
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/UndertowServerHttpRequest.java
<ide> import org.springframework.util.MultiValueMap;
<ide>
<ide> /**
<del> * Adapt {@link ServerHttpRequest} to the Underow {@link HttpServerExchange}.
<add> * Adapt {@link ServerHttpRequest} to the Undertow {@link HttpServerExchange}.
<ide> *
<ide> * @author Marek Hawrylczak
<ide> * @author Rossen Stoyanchev
<ide><path>spring-web/src/main/java/org/springframework/http/server/reactive/UndertowServerHttpResponse.java
<ide>
<ide> /**
<ide> * Adapt {@link ServerHttpResponse} to the Undertow {@link HttpServerExchange}.
<add> *
<ide> * @author Marek Hawrylczak
<ide> * @author Rossen Stoyanchev
<ide> * @author Arjen Poutsma
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/BodyExtractor.java
<ide> import org.springframework.http.converter.reactive.HttpMessageConverter;
<ide>
<ide> /**
<del> * A {@code BodyExtractor} extracts the content of a raw {@link ClientHttpResponse},
<del> * decoding the response body and using a target composition API.
<add> * Contract to extract the content of a raw {@link ClientHttpResponse} decoding
<add> * the response body and using a target composition API.
<ide> *
<del> * <p>See static factory methods in {@link ResponseExtractors}
<del> * and {@link org.springframework.web.client.reactive.support.RxJava1ResponseExtractors}.
<add> * <p>See static factory methods in {@link ResponseExtractors} and
<add> * {@link org.springframework.web.client.reactive.support.RxJava1ResponseExtractors}.
<ide> *
<ide> * @author Brian Clozel
<ide> * @since 5.0
<ide> * @return the relevant content
<ide> */
<ide> T extract(ClientHttpResponse clientResponse, List<HttpMessageConverter<?>> messageConverters);
<add>
<ide> }
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/ClientWebRequest.java
<ide> import org.springframework.util.MultiValueMap;
<ide>
<ide> /**
<del> * Holds all the application information required to build an actual HTTP client request.
<del> * <p>The request body is materialized by a {@code Publisher} of Objects and their type
<del> * by a {@code ResolvableType} instance; it should be later converted to a
<del> * {@code Publisher<DataBuffer>} to be written to the actual HTTP client request.
<add> * Simple container for application-level information required to perform an
<add> * HTTP client request.
<add> *
<add> * <p>The request body is provided through a {@code Publisher<Object>} where the
<add> * type of each Object is indicated through a {@link ResolvableType} which
<add> * subsequently is used to correctly serialize into the
<add> * {@code Publisher<DataBuffer>} actually written to request body.
<ide> *
<ide> * @author Brian Clozel
<ide> * @since 5.0
<ide> */
<del>public class ClientWebRequest {
<add>public class ClientWebRequest {
<ide>
<ide> protected final HttpMethod httpMethod;
<ide>
<ide> public ClientWebRequest(HttpMethod httpMethod, URI url) {
<ide> this.url = url;
<ide> }
<ide>
<add>
<ide> public HttpMethod getMethod() {
<ide> return httpMethod;
<ide> }
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/ClientWebRequestBuilder.java
<ide> package org.springframework.web.client.reactive;
<ide>
<ide> /**
<del> * Build {@link ClientWebRequest}s
<add> * Build {@link ClientWebRequest}s.
<ide> *
<ide> * @author Brian Clozel
<ide> * @since 5.0
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/ClientWebRequestBuilders.java
<ide> import org.springframework.http.HttpMethod;
<ide>
<ide> /**
<del> * Static factory methods for {@link DefaultClientWebRequestBuilder ClientWebRequestBuilders}
<add> * Static factory methods for {@link DefaultClientWebRequestBuilder
<add> * ClientWebRequestBuilders}.
<ide> *
<ide> * @author Brian Clozel
<ide> * @since 5.0
<ide> public static DefaultClientWebRequestBuilder post(String urlTemplate, Object...
<ide> return new DefaultClientWebRequestBuilder(HttpMethod.POST, urlTemplate, urlVariables);
<ide> }
<ide>
<del>
<ide> /**
<ide> * Create a {@link DefaultClientWebRequestBuilder} for a PUT request.
<ide> *
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/ClientWebRequestPostProcessor.java
<ide> package org.springframework.web.client.reactive;
<ide>
<ide> /**
<del> * Allow post processing and/or wrapping the {@link ClientWebRequest} before
<del> * it's sent to the origin server.
<add> * Contract to post-process the {@link ClientWebRequest} after it is created
<add> * an initialized in order to mofidy or even wrap it. This may be used for
<add> * example to pre-package specific modifications to the request.
<ide> *
<ide> * @author Rob Winch
<ide> * @author Brian Clozel
<ide> public interface ClientWebRequestPostProcessor {
<ide>
<ide> /**
<del> * Implementations can modify and/or wrap the {@link ClientWebRequest} passed in
<del> * and return it
<del> *
<add> * Implementations can modify and/or wrap the {@link ClientWebRequest}
<add> * passed in and return it
<ide> * @param request the {@link ClientWebRequest} to be modified and/or wrapped.
<ide> */
<ide> ClientWebRequest postProcess(ClientWebRequest request);
<add>
<ide> }
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/DefaultClientWebRequestBuilder.java
<ide> * Builds a {@link ClientHttpRequest} using a {@link Publisher}
<ide> * as request body.
<ide> *
<del> * <p>See static factory methods in {@link ClientWebRequestBuilders}
<add> * <p>See static factory methods in {@link ClientWebRequestBuilders}.
<ide> *
<ide> * @author Brian Clozel
<ide> * @since 5.0
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/ResponseErrorHandler.java
<ide> import org.springframework.http.converter.reactive.HttpMessageConverter;
<ide>
<ide> /**
<del> * Strategy interface used by the {@link WebClient} to handle
<del> * errors in {@link ClientHttpResponse}s if needed.
<add> * Strategy interface used by the {@link WebClient} to handle errors in
<add> * {@link ClientHttpResponse}s if needed.
<ide> *
<ide> * @author Brian Clozel
<ide> * @see DefaultResponseErrorHandler
<ide> public interface ResponseErrorHandler {
<ide>
<ide> /**
<ide> * Handle the error in the given response.
<del> * Implementations will typically inspect the {@link ClientHttpResponse#getStatusCode() HttpStatus}
<del> * of the response and throw {@link WebClientException}s in case of errors.
<add> * Implementations will typically inspect the
<add> * {@link ClientHttpResponse#getStatusCode() HttpStatus} of the response and
<add> * throw {@link WebClientException}s in case of errors.
<ide> */
<ide> void handleError(ClientHttpResponse response, List<HttpMessageConverter<?>> messageConverters);
<ide>
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/WebClient.java
<ide> * <p>Here is a simple example of a GET request:
<ide> *
<ide> * <pre class="code">
<add> * static imports: ClientWebRequestBuilder.*, ResponseExtractors.*
<add> *
<ide> * // should be shared between HTTP calls
<ide> * WebClient client = new WebClient(new ReactorHttpClient());
<ide> *
<ide> * Mono<String> result = client
<del> * .perform(ClientWebRequestBuilders.get("http://example.org/resource")
<del> * .accept(MediaType.TEXT_PLAIN))
<del> * .extract(ResponseExtractors.body(String.class));
<add> * .perform(get("http://example.org/resource").accept(MediaType.TEXT_PLAIN))
<add> * .extract(body(String.class));
<ide> * </pre>
<ide> *
<del> * <p>This Web client relies on
<add> * <p>This Web client relies on the following:
<ide> * <ul>
<del> * <li>an {@link ClientHttpConnector} implementation that drives the underlying library (e.g. Reactor-Netty)</li>
<del> * <li>a {@link ClientWebRequestBuilder} which creates a Web request with a builder API (see
<del> * {@link ClientWebRequestBuilders})</li>
<del> * <li>an {@link ResponseExtractor} which extracts the relevant part of the server
<add> * <li>{@link ClientHttpConnector} implementation to drive the underlying
<add> * library (e.g. Reactor-Netty)</li>
<add> * <li>{@link ClientWebRequestBuilder} to create a Web request with a builder
<add> * API (see {@link ClientWebRequestBuilders})</li>
<add> * <li>{@link ResponseExtractor} to extract the relevant part of the server
<ide> * response with the composition API of choice (see {@link ResponseExtractors}</li>
<ide> * </ul>
<ide> *
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/WebClientErrorException.java
<ide>
<ide> import java.util.List;
<ide>
<del>import org.springframework.http.HttpStatus;
<ide> import org.springframework.http.client.reactive.ClientHttpResponse;
<ide> import org.springframework.http.converter.reactive.HttpMessageConverter;
<ide>
<ide> @SuppressWarnings("serial")
<ide> public class WebClientErrorException extends WebClientResponseException {
<ide>
<add>
<ide> /**
<del> * Construct a new instance of {@code HttpClientErrorException} based on a {@link ClientHttpResponse}
<del> * and {@link HttpMessageConverter}s to optionally help decoding the response body
<del> * @param clientResponse the HTTP response
<del> * @param messageConverters the message converters that may decode the HTTP response body
<add> * Construct a new instance of {@code HttpClientErrorException} based on a
<add> * {@link ClientHttpResponse} and {@link HttpMessageConverter}s to optionally
<add> * help decoding the response body
<add> *
<add> * @param response the HTTP response
<add> * @param converters the message converters that may decode the HTTP response body
<ide> */
<del> public WebClientErrorException(ClientHttpResponse clientResponse,
<del> List<HttpMessageConverter<?>> messageConverters) {
<del> super(clientResponse.getStatusCode().value() + " " + clientResponse.getStatusCode().getReasonPhrase(),
<del> clientResponse, messageConverters);
<add> public WebClientErrorException(ClientHttpResponse response, List<HttpMessageConverter<?>> converters) {
<add> super(initMessage(response), response, converters);
<add> }
<add>
<add> private static String initMessage(ClientHttpResponse response) {
<add> return response.getStatusCode().value() + " " + response.getStatusCode().getReasonPhrase();
<ide> }
<add>
<ide> }
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/WebClientException.java
<ide> import org.springframework.core.NestedRuntimeException;
<ide>
<ide> /**
<del> * Base class for exceptions thrown by {@link WebClient} whenever
<del> * it encounters errors.
<add> * Base class for exceptions thrown by {@link WebClient}.
<ide> *
<ide> * @author Brian Clozel
<ide> * @since 5.0
<ide> public WebClientException(String msg) {
<ide> public WebClientException(String msg, Throwable cause) {
<ide> super(msg, cause);
<ide> }
<add>
<ide> }
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/WebClientResponseException.java
<ide> import org.springframework.http.converter.reactive.HttpMessageConverter;
<ide>
<ide> /**
<del> * Base class for exceptions associated with specific HTTP client response status codes.
<add> * Base class for exceptions associated with specific HTTP client response
<add> * status codes.
<ide> *
<ide> * @author Brian Clozel
<ide> * @since 5.0
<ide> public class WebClientResponseException extends WebClientException {
<ide>
<ide> private final List<HttpMessageConverter<?>> messageConverters;
<ide>
<add>
<ide> /**
<ide> * Construct a new instance of {@code WebClientResponseException} with the given response data
<ide> * @param message the given error message
<ide> public WebClientResponseException(String message, ClientHttpResponse clientRespo
<ide> this.messageConverters = messageConverters;
<ide> }
<ide>
<add>
<ide> /**
<ide> * Return the HTTP status
<ide> */
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/WebResponseActions.java
<ide> public interface WebResponseActions {
<ide> * </pre>
<ide> */
<ide> <T> T extract(ResponseExtractor<T> extractor);
<add>
<ide> }
<ide><path>spring-web/src/main/java/org/springframework/web/client/reactive/WebServerErrorException.java
<ide> public class WebServerErrorException extends WebClientResponseException {
<ide>
<ide> /**
<del> * Construct a new instance of {@code HttpServerErrorException} based on a {@link ClientHttpResponse}
<del> * and {@link HttpMessageConverter}s to optionally help decoding the response body
<del> * @param clientResponse the HTTP response
<del> * @param messageConverters the message converters that may decode the HTTP response body
<add> * Construct a new instance of {@code HttpServerErrorException} based on a
<add> * {@link ClientHttpResponse} and {@link HttpMessageConverter}s to optionally
<add> * help decoding the response body
<add> * @param response the HTTP response
<add> * @param converters the message converters that may decode the HTTP response body
<ide> */
<del> public WebServerErrorException(ClientHttpResponse clientResponse, List<HttpMessageConverter<?>> messageConverters) {
<del> super(clientResponse.getStatusCode().value() + " " + clientResponse.getStatusCode().getReasonPhrase(),
<del> clientResponse, messageConverters);
<add> public WebServerErrorException(ClientHttpResponse response, List<HttpMessageConverter<?>> converters) {
<add> super(initMessage(response), response, converters);
<ide> }
<add>
<add> private static String initMessage(ClientHttpResponse response) {
<add> return response.getStatusCode().value() + " " + response.getStatusCode().getReasonPhrase();
<add> }
<add>
<ide> }
<ide><path>spring-web/src/main/java/org/springframework/web/server/session/DefaultWebSession.java
<ide> import org.springframework.util.Assert;
<ide>
<ide> /**
<add> * Default implementation of {@link org.springframework.web.server.WebSession}.
<add> *
<ide> * @author Rossen Stoyanchev
<ide> * @since 5.0
<ide> */ | 62 |
Text | Text | add changelog entry for | e1154a694d861abbcc1f8f1c8d557d2e349ba9c2 | <ide><path>activerecord/CHANGELOG.md
<add>* Make possible to run SQLite rake tasks without the `Rails` constant defined.
<add>
<add> *Damien Mathieu*
<add>
<ide> * Allow Relation#from to accept other relations with bind values.
<ide>
<ide> *Ryan Wallace* | 1 |
Ruby | Ruby | fix line number off-by-one | fe1bf57a1547bec01c80f42c75ee2e3d9afa0bb1 | <ide><path>Library/Homebrew/cmd/audit.rb
<ide> def audit
<ide> audit_conflicts
<ide> audit_patches
<ide> audit_text
<del> text.split("\n").each_with_index { |line, lineno| audit_line(line, lineno) }
<add> text.split("\n").each_with_index { |line, lineno| audit_line(line+1, lineno) }
<ide> audit_installed
<ide> end
<ide> | 1 |
PHP | PHP | move last page logic into paginator class | acc279989cfc2872abd92586caa4f98ba07d30cb | <ide><path>system/paginator.php
<ide> class Paginator {
<ide> */
<ide> public function __construct($results, $total, $per_page)
<ide> {
<add> $this->page = static::page($total, $per_page);
<add>
<ide> $this->per_page = $per_page;
<ide> $this->results = $results;
<ide> $this->total = $total;
<del>
<del> $this->page = static::page($this->last_page());
<ide> }
<ide>
<ide> /**
<ide> public function __construct($results, $total, $per_page)
<ide> * The page will be validated and adjusted if it is less than 1 or
<ide> * greater than the last page number.
<ide> *
<del> * @param int $last_page
<add> * @param int $total
<add> * @param int $per_page
<ide> * @return int
<ide> */
<del> public static function page($last_page)
<add> public static function page($total, $per_page)
<ide> {
<add> $last_page = ceil($total / $per_page);
<add>
<ide> $page = Input::get('page', 1);
<ide>
<ide> if (is_numeric($page) and $page > $last_page) | 1 |
Text | Text | remove networkdriver from readme.md in daemon | ce5bbed8582227482f7afa1c647d52eab9fb2de3 | <ide><path>daemon/README.md
<ide> This directory contains code pertaining to running containers and storing images
<ide> Code pertaining to running containers:
<ide>
<ide> - execdriver
<del> - networkdriver
<ide>
<ide> Code pertaining to storing images:
<ide> | 1 |
Ruby | Ruby | put conditional back | a4e0e16e05fa49fc84b92a5ffd42a9f728ee1f89 | <ide><path>lib/active_storage/log_subscriber.rb
<ide> class ActiveStorage::LogSubscriber < ActiveSupport::LogSubscriber
<ide> def service_upload(event)
<ide> message = "Uploaded file to key: #{key_in(event)}"
<del> message << " (checksum: #{event.payload[:checksum]})"
<del> info event, color(message, GREEN) if event.payload[:checksum]
<add> message << " (checksum: #{event.payload[:checksum]})" if event.payload[:checksum]
<add> info event, color(message, GREEN)
<ide> end
<ide>
<ide> def service_download(event) | 1 |
Ruby | Ruby | call string#gsub with hash directly | cc986db54665c4155f1b8da5d3f2ebf4a55ef23f | <ide><path>activesupport/lib/active_support/core_ext/string/output_safety.rb
<ide> def html_escape(s)
<ide> # html_escape_once('<< Accept & Checkout')
<ide> # # => "<< Accept & Checkout"
<ide> def html_escape_once(s)
<del> result = s.to_s.gsub(HTML_ESCAPE_ONCE_REGEXP) { |special| HTML_ESCAPE[special] }
<add> result = s.to_s.gsub(HTML_ESCAPE_ONCE_REGEXP, HTML_ESCAPE)
<ide> s.html_safe? ? result.html_safe : result
<ide> end
<ide>
<ide> def html_escape_once(s)
<ide> # json_escape('{"name":"john","created_at":"2010-04-28T01:39:31Z","id":1}')
<ide> # # => {name:john,created_at:2010-04-28T01:39:31Z,id:1}
<ide> def json_escape(s)
<del> result = s.to_s.gsub(JSON_ESCAPE_REGEXP) { |special| JSON_ESCAPE[special] }
<add> result = s.to_s.gsub(JSON_ESCAPE_REGEXP, JSON_ESCAPE)
<ide> s.html_safe? ? result.html_safe : result
<ide> end
<ide> | 1 |
Go | Go | remove some unused funcs and variables | 5ee8652a21756b1b4719e3477bd871d13b237c12 | <ide><path>daemon/container_operations_unix.go
<ide> func (daemon *Daemon) DisconnectFromNetwork(container *container.Container, n li
<ide> return nil
<ide> }
<ide>
<del>// called from the libcontainer pre-start hook to set the network
<del>// namespace configuration linkage to the libnetwork "sandbox" entity
<del>func (daemon *Daemon) setNetworkNamespaceKey(containerID string, pid int) error {
<del> path := fmt.Sprintf("/proc/%d/ns/net", pid)
<del> var sandbox libnetwork.Sandbox
<del> search := libnetwork.SandboxContainerWalker(&sandbox, containerID)
<del> daemon.netController.WalkSandboxes(search)
<del> if sandbox == nil {
<del> return fmt.Errorf("error locating sandbox id %s: no sandbox found", containerID)
<del> }
<del>
<del> return sandbox.SetKey(path)
<del>}
<del>
<ide> func (daemon *Daemon) getIpcContainer(container *container.Container) (*container.Container, error) {
<ide> containerID := container.HostConfig.IpcMode.Container()
<ide> c, err := daemon.GetContainer(containerID)
<ide> func getDevicesFromPath(deviceMapping containertypes.DeviceMapping) (devs []spec
<ide> return devs, devPermissions, fmt.Errorf("error gathering device information while adding custom device %q: %s", deviceMapping.PathOnHost, err)
<ide> }
<ide>
<del>func mergeDevices(defaultDevices, userDevices []*configs.Device) []*configs.Device {
<del> if len(userDevices) == 0 {
<del> return defaultDevices
<del> }
<del>
<del> paths := map[string]*configs.Device{}
<del> for _, d := range userDevices {
<del> paths[d.Path] = d
<del> }
<del>
<del> var devs []*configs.Device
<del> for _, d := range defaultDevices {
<del> if _, defined := paths[d.Path]; !defined {
<del> devs = append(devs, d)
<del> }
<del> }
<del> return append(devs, userDevices...)
<del>}
<del>
<ide> func detachMounted(path string) error {
<ide> return syscall.Unmount(path, syscall.MNT_DETACH)
<ide> }
<ide><path>daemon/container_operations_windows.go
<ide> func (daemon *Daemon) getSize(container *container.Container) (int64, int64) {
<ide> return 0, 0
<ide> }
<ide>
<del>// setNetworkNamespaceKey is a no-op on Windows.
<del>func (daemon *Daemon) setNetworkNamespaceKey(containerID string, pid int) error {
<del> return nil
<del>}
<del>
<ide> func (daemon *Daemon) setupIpcDirs(container *container.Container) error {
<ide> return nil
<ide> }
<ide><path>distribution/xfer/upload_test.go
<ide> import (
<ide> "time"
<ide>
<ide> "github.com/docker/distribution"
<del> "github.com/docker/distribution/digest"
<ide> "github.com/docker/docker/layer"
<ide> "github.com/docker/docker/pkg/progress"
<ide> "golang.org/x/net/context"
<ide> func uploadDescriptors(currentUploads *int32) []UploadDescriptor {
<ide> }
<ide> }
<ide>
<del>var expectedDigests = map[layer.DiffID]digest.Digest{
<del> layer.DiffID("sha256:cbbf2f9a99b47fc460d422812b6a5adff7dfee951d8fa2e4a98caa0382cfbdbf"): digest.Digest("sha256:c5095d6cf7ee42b7b064371dcc1dc3fb4af197f04d01a60009d484bd432724fc"),
<del> layer.DiffID("sha256:1515325234325236634634608943609283523908626098235490238423902343"): digest.Digest("sha256:968cbfe2ff5269ea1729b3804767a1f57ffbc442d3bc86f47edbf7e688a4f36e"),
<del> layer.DiffID("sha256:6929356290463485374960346430698374523437683470934634534953453453"): digest.Digest("sha256:8a5e56ab4b477a400470a7d5d4c1ca0c91235fd723ab19cc862636a06f3a735d"),
<del> layer.DiffID("sha256:8159352387436803946235346346368745389534789534897538734598734987"): digest.Digest("sha256:5e733e5cd3688512fc240bd5c178e72671c9915947d17bb8451750d827944cb2"),
<del> layer.DiffID("sha256:4637863963478346897346987346987346789346789364879364897364987346"): digest.Digest("sha256:ec4bb98d15e554a9f66c3ef9296cf46772c0ded3b1592bd8324d96e2f60f460c"),
<del>}
<del>
<ide> func TestSuccessfulUpload(t *testing.T) {
<ide> lum := NewLayerUploadManager(maxUploadConcurrency)
<ide>
<ide><path>oci/defaults_linux.go
<ide> import (
<ide> )
<ide>
<ide> func sPtr(s string) *string { return &s }
<del>func rPtr(r rune) *rune { return &r }
<ide> func iPtr(i int64) *int64 { return &i }
<ide> func u32Ptr(i int64) *uint32 { u := uint32(i); return &u }
<ide> func fmPtr(i int64) *os.FileMode { fm := os.FileMode(i); return &fm } | 4 |
PHP | PHP | add some missing phpdocs for facades | 7f8ac4f4abee41215b7f421f0dc491c844aea7b9 | <ide><path>src/Illuminate/Support/Facades/Log.php
<ide> * @method static void log($level, string $message, array $context = [])
<ide> * @method static void notice(string $message, array $context = [])
<ide> * @method static void warning(string $message, array $context = [])
<add> * @method static void write(string $level, string $message, array $context = [])
<add> * @method static void listen(\Closure $callback)
<ide> *
<ide> * @see \Illuminate\Log\Logger
<ide> */
<ide><path>src/Illuminate/Support/Facades/Mail.php
<ide> * @method static bool hasQueued(string $mailable)
<ide> * @method static bool hasSent(string $mailable)
<ide> * @method static mixed later(\DateTimeInterface|\DateInterval|int $delay, \Illuminate\Contracts\Mail\Mailable|string|array $view, string $queue = null)
<add> * @method static mixed laterOn(string $queue, \DateTimeInterface|\DateInterval|int $delay, \Illuminate\Contracts\Mail\Mailable|string|array $view)
<ide> * @method static mixed queue(\Illuminate\Contracts\Mail\Mailable|string|array $view, string $queue = null)
<add> * @method static mixed queueOn(string $queue, \Illuminate\Contracts\Mail\Mailable|string|array $view)
<ide> * @method static void assertNotQueued(string $mailable, callable $callback = null)
<ide> * @method static void assertNotSent(string $mailable, callable|int $callback = null)
<ide> * @method static void assertNothingQueued()
<ide> * @method static void assertNothingSent()
<ide> * @method static void assertQueued(string $mailable, callable|int $callback = null)
<ide> * @method static void assertSent(string $mailable, callable|int $callback = null)
<ide> * @method static void raw(string $text, $callback)
<add> * @method static void plain(string $view, array $data, $callback)
<add> * @method static void html(string $html, $callback)
<ide> * @method static void send(\Illuminate\Contracts\Mail\Mailable|string|array $view, array $data = [], \Closure|string $callback = null)
<ide> *
<ide> * @see \Illuminate\Mail\Mailer
<ide><path>src/Illuminate/Support/Facades/Redirect.php
<ide> * @method static \Illuminate\Http\RedirectResponse to(string $path, int $status = 302, array $headers = [], bool $secure = null)
<ide> * @method static \Illuminate\Routing\UrlGenerator getUrlGenerator()
<ide> * @method static void setSession(\Illuminate\Session\Store $session)
<add> * @method static void setIntendedUrl(string $url)
<ide> *
<ide> * @see \Illuminate\Routing\Redirector
<ide> */
<ide><path>src/Illuminate/Support/Facades/Route.php
<ide> * @method static void pattern(string $key, string $pattern)
<ide> * @method static void resources(array $resources)
<ide> * @method static void substituteImplicitBindings(\Illuminate\Support\Facades\Route $route)
<add> * @method static boolean uses(...$patterns)
<add> * @method static boolean is(...$patterns)
<add> * @method static boolean has(string $name)
<add> * @method static mixed input(string $key, string|null $default = null)
<ide> *
<ide> * @see \Illuminate\Routing\Router
<ide> */
<ide><path>src/Illuminate/Support/Facades/URL.php
<ide> * @method static bool hasValidSignature(\Illuminate\Http\Request $request, bool $absolute = true)
<ide> * @method static string action(string $action, $parameters = [], bool $absolute = true)
<ide> * @method static string asset(string $path, bool $secure = null)
<add> * @method static string secureAsset(string $path)
<ide> * @method static string current()
<ide> * @method static string full()
<ide> * @method static string previous($fallback = false)
<ide> * @method static string to(string $path, $extra = [], bool $secure = null)
<ide> * @method static void defaults(array $defaults)
<ide> * @method static void forceScheme(string $scheme)
<add> * @method static bool isValidUrl(string $path)
<ide> *
<ide> * @see \Illuminate\Routing\UrlGenerator
<ide> */
<ide><path>src/Illuminate/Support/Facades/Validator.php
<ide> * @method static void extend(string $rule, \Closure|string $extension, string $message = null)
<ide> * @method static void extendImplicit(string $rule, \Closure|string $extension, string $message = null)
<ide> * @method static void replacer(string $rule, \Closure|string $replacer)
<add> * @method static array validate(array $data, array $rules, array $messages = [], array $customAttributes = [])
<ide> *
<ide> * @see \Illuminate\Validation\Factory
<ide> */ | 6 |
Python | Python | use positional arguments due to inconsistent api | e1844d9a45c6232ba7fac77c6c7b3f5326e72929 | <ide><path>src/transformers/modeling_utils.py
<ide> def top_k_top_p_filtering(logits, top_k=0, top_p=1.0, filter_value=-float("Inf")
<ide> sorted_indices_to_remove[..., 0] = 0
<ide>
<ide> # scatter sorted tensors to original indexing
<del> indices_to_remove = sorted_indices_to_remove.scatter(dim=1, index=sorted_indices, src=sorted_indices_to_remove)
<add> indices_to_remove = sorted_indices_to_remove.scatter(1, sorted_indices, sorted_indices_to_remove)
<ide> logits[indices_to_remove] = filter_value
<ide> return logits
<ide> | 1 |
Python | Python | fix examples in m2m100 docstrings | a5d2967bd8a5ed2456c593fa9eb5d9c0d726ae7a | <ide><path>src/transformers/models/m2m_100/modeling_m2m_100.py
<ide> def _init_weights(self, module):
<ide> >>> model_inputs = tokenizer(text_to_translate, return_tensors='pt')
<ide>
<ide> >>> # translate to French
<del> >>> gen_tokens = model.generate( **model_inputs, forced_bos_token_id=tok.get_lang_id("fr"))
<add> >>> gen_tokens = model.generate( **model_inputs, forced_bos_token_id=tokenizer.get_lang_id("fr"))
<ide> >>> print(tokenizer.batch_decode(gen_tokens, skip_special_tokens=True))
<ide> """
<ide>
<ide> def forward(
<ide> >>> model_inputs = tokenizer(text_to_translate, return_tensors='pt')
<ide>
<ide> >>> # translate to French
<del> >>> gen_tokens = model.generate( **model_inputs, forced_bos_token_id=tok.get_lang_id("fr"))
<add> >>> gen_tokens = model.generate( **model_inputs, forced_bos_token_id=tokenizer.get_lang_id("fr"))
<ide> >>> print(tokenizer.batch_decode(gen_tokens, skip_special_tokens=True))
<ide> """
<ide> return_dict = return_dict if return_dict is not None else self.config.use_return_dict | 1 |
Python | Python | fix indeterminacy of icu_locales value | 5e9274a3e7803e9713a87ebae69019b9ee6468de | <ide><path>configure.py
<ide> def icu_download(path):
<ide> o['variables']['icu_small'] = b(True)
<ide> locs = set(options.with_icu_locales.split(','))
<ide> locs.add('root') # must have root
<del> o['variables']['icu_locales'] = ','.join(str(loc) for loc in locs)
<add> o['variables']['icu_locales'] = ','.join(str(loc) for loc in sorted(locs))
<ide> # We will check a bit later if we can use the canned deps/icu-small
<ide> o['variables']['icu_default_data'] = options.with_icu_default_data_dir or ''
<ide> elif with_intl == 'full-icu': | 1 |
Ruby | Ruby | convert date extension modules to class reopens | c43dec888a4823b36ee4369be0558b94d475d734 | <ide><path>activesupport/lib/active_support/core_ext/date.rb
<ide> require 'active_support/core_ext/util'
<ide> require 'date'
<del>ActiveSupport.core_ext Date, %w(behavior calculations conversions)
<add>require 'active_support/core_ext/date/acts_like'
<add>require 'active_support/core_ext/date/freeze'
<add>ActiveSupport.core_ext Date, %w(calculations conversions)
<ide><path>activesupport/lib/active_support/core_ext/date/acts_like.rb
<add>require 'date'
<add>
<add>class Date
<add> # Enable more predictable duck-typing on Date-like classes. See
<add> # Object#acts_like?.
<add> def acts_like_date?
<add> true
<add> end
<add>end
<ide><path>activesupport/lib/active_support/core_ext/date/behavior.rb
<del>require 'date'
<del>
<del>module ActiveSupport #:nodoc:
<del> module CoreExtensions #:nodoc:
<del> module Date #:nodoc:
<del> module Behavior
<del> # Enable more predictable duck-typing on Date-like classes. See
<del> # Object#acts_like?.
<del> def acts_like_date?
<del> true
<del> end
<del>
<del> # Date memoizes some instance methods using metaprogramming to wrap
<del> # the methods with one that caches the result in an instance variable.
<del> #
<del> # If a Date is frozen but the memoized method hasn't been called, the
<del> # first call will result in a frozen object error since the memo
<del> # instance variable is uninitialized.
<del> #
<del> # Work around by eagerly memoizing before freezing.
<del> #
<del> # Ruby 1.9 uses a preinitialized instance variable so it's unaffected.
<del> # This hack is as close as we can get to feature detection:
<del> begin
<del> ::Date.today.freeze.jd
<del> rescue => frozen_object_error
<del> if frozen_object_error.message =~ /frozen/
<del> def freeze #:nodoc:
<del> self.class.private_instance_methods(false).each do |m|
<del> if m.to_s =~ /\A__\d+__\Z/
<del> instance_variable_set(:"@#{m}", [send(m)])
<del> end
<del> end
<del>
<del> super
<del> end
<del> end
<del> end
<del> end
<del> end
<del> end
<del>end
<ide><path>activesupport/lib/active_support/core_ext/date/freeze.rb
<add># Date memoizes some instance methods using metaprogramming to wrap
<add># the methods with one that caches the result in an instance variable.
<add>#
<add># If a Date is frozen but the memoized method hasn't been called, the
<add># first call will result in a frozen object error since the memo
<add># instance variable is uninitialized.
<add>#
<add># Work around by eagerly memoizing before freezing.
<add>#
<add># Ruby 1.9 uses a preinitialized instance variable so it's unaffected.
<add># This hack is as close as we can get to feature detection:
<add>if RUBY_VERSION < '1.9'
<add> require 'date'
<add> begin
<add> ::Date.today.freeze.jd
<add> rescue => frozen_object_error
<add> if frozen_object_error.message =~ /frozen/
<add> class Date #:nodoc:
<add> def freeze
<add> self.class.private_instance_methods(false).each do |m|
<add> if m.to_s =~ /\A__\d+__\Z/
<add> instance_variable_set(:"@#{m}", [send(m)])
<add> end
<add> end
<add>
<add> super
<add> end
<add> end
<add> end
<add> end
<add>end | 4 |
Ruby | Ruby | improve sms_to api documentation [ci-skip] | 5b8fee1be1e66941b7792f545724a8f02925de3c | <ide><path>actionview/lib/action_view/helpers/url_helper.rb
<ide> def current_page?(options = nil, check_parameters: false, **options_as_kwargs)
<ide> end
<ide> end
<ide>
<del> # Creates an SMS anchor link tag to the specified +phone_number+, which is
<del> # also used as the name of the link unless +name+ is specified. Additional
<del> # HTML attributes for the link can be passed in +html_options+.
<add> # Creates an SMS anchor link tag to the specified +phone_number+. When the
<add> # link is clicked, the default SMS messaging app is opened ready to send a
<add> # message to the linked phone number. If the +body+ option is specified,
<add> # the contents of the message will be preset to +body+.
<ide> #
<del> # When clicked, an SMS message is prepopulated with the passed phone number
<del> # and optional +body+ value.
<add> # If +name+ is not specified, +phone_number+ will be used as the name of
<add> # the link.
<ide> #
<del> # +sms_to+ has a +body+ option for customizing the SMS message itself by
<del> # passing special keys to +html_options+.
<add> # Additional HTML attributes for the link can be passed via +html_options+.
<ide> #
<ide> # ==== Options
<ide> # * <tt>:body</tt> - Preset the body of the message. | 1 |
Go | Go | fix flakey testauthzpluginalloweventstream | 4cf9b725f2117581ac4ba3e6f1db23da090e1732 | <ide><path>integration-cli/docker_cli_authz_unix_test.go
<ide> func (s *DockerAuthzSuite) TestAuthZPluginAllowEventStream(c *check.C) {
<ide> // Create a container and wait for the creation events
<ide> out, err := s.d.Cmd("run", "-d", "busybox", "top")
<ide> c.Assert(err, check.IsNil, check.Commentf(out))
<del>
<ide> containerID := strings.TrimSpace(out)
<add> c.Assert(s.d.waitRun(containerID), checker.IsNil)
<ide>
<ide> events := map[string]chan bool{
<ide> "create": make(chan bool), | 1 |
Python | Python | allow hashing of errordetail to fix | 2ebd4797595fb86504cf093fe8ed94c59a061acb | <ide><path>rest_framework/exceptions.py
<ide> def __repr__(self):
<ide> self.code,
<ide> ))
<ide>
<add> def __hash__(self):
<add> return hash(str(self))
<add>
<ide>
<ide> class APIException(Exception):
<ide> """
<ide><path>tests/test_exceptions.py
<ide> def test_str(self):
<ide> assert str(ErrorDetail('msg1')) == 'msg1'
<ide> assert str(ErrorDetail('msg1', 'code')) == 'msg1'
<ide>
<add> def test_hash(self):
<add> assert hash(ErrorDetail('msg')) == hash('msg')
<add> assert hash(ErrorDetail('msg', 'code')) == hash('msg')
<add>
<ide>
<ide> class TranslationTests(TestCase):
<ide> | 2 |
Javascript | Javascript | add extra profiling app | 162f11c0c0541a4d7c92fc4a53decd9ac9e6747d | <ide><path>test/integration/bundle-size-profiling/next.config.js
<add>module.exports = {
<add> experimental: {
<add> polyfillsOptimization: true,
<add> },
<add> webpack(config, options) {
<add> if (!options.isServer) {
<add> config.profile = true
<add> const { BundleAnalyzerPlugin } = require('webpack-bundle-analyzer')
<add> config.plugins.push(
<add> new BundleAnalyzerPlugin({
<add> generateStatsFile: true,
<add> analyzerMode: 'static',
<add> reportFilename: options.isServer
<add> ? '../analyze/server.html'
<add> : './analyze/client.html',
<add> })
<add> )
<add> }
<add>
<add> return config
<add> },
<add>}
<ide><path>test/integration/bundle-size-profiling/pages/index.js
<add>export default () => 'Hello World' | 2 |
Javascript | Javascript | add assert.notdeepstrictequal() tests | 181324e192aa75e923c918af84d9902de4ee3b24 | <ide><path>test/parallel/test-assert.js
<ide> assert.doesNotThrow(makeBlock(a.deepEqual, new Boolean(true), {}),
<ide> assert.throws(makeBlock(a.deepEqual, {a: 1}, {b: 1}), a.AssertionError);
<ide>
<ide> //deepStrictEqual
<del>assert.doesNotThrow(makeBlock(a.deepStrictEqual, new Date(2000, 3, 14),
<del> new Date(2000, 3, 14)),
<del> 'deepStrictEqual(new Date(2000, 3, 14),\
<del> new Date(2000, 3, 14))');
<add>assert.doesNotThrow(
<add> makeBlock(a.deepStrictEqual, new Date(2000, 3, 14), new Date(2000, 3, 14)),
<add> 'deepStrictEqual(new Date(2000, 3, 14), new Date(2000, 3, 14))'
<add>);
<ide>
<del>assert.throws(makeBlock(a.deepStrictEqual, new Date(), new Date(2000, 3, 14)),
<del> a.AssertionError,
<del> 'deepStrictEqual(new Date(), new Date(2000, 3, 14))');
<add>assert.throws(
<add> makeBlock(a.deepStrictEqual, new Date(), new Date(2000, 3, 14)),
<add> a.AssertionError,
<add> 'deepStrictEqual(new Date(), new Date(2000, 3, 14))'
<add>);
<add>
<add>assert.throws(
<add> makeBlock(a.notDeepStrictEqual, new Date(2000, 3, 14), new Date(2000, 3, 14)),
<add> a.AssertionError,
<add> 'notDeepStrictEqual(new Date(2000, 3, 14), new Date(2000, 3, 14))'
<add>);
<add>
<add>assert.doesNotThrow(
<add> makeBlock(a.notDeepStrictEqual, new Date(), new Date(2000, 3, 14)),
<add> 'notDeepStrictEqual(new Date(), new Date(2000, 3, 14))'
<add>);
<ide>
<ide> // 7.3 - strict
<ide> assert.doesNotThrow(makeBlock(a.deepStrictEqual, /a/, /a/)); | 1 |
Javascript | Javascript | add support for case sensitive | 6022476f036b4829a9b91c02ee988f2bf2f94bd2 | <ide><path>web/viewer.js
<ide> var PDFFindController = {
<ide> // TODO: Handle the other find options here as well.
<ide>
<ide> var query = this.state.query;
<add> var caseSensitive = this.state.caseSensitive;
<ide> var queryLen = query.length;
<ide>
<ide> if (queryLen === 0)
<ide> return [];
<ide>
<add> if (!caseSensitive) {
<add> pageContent = pageContent.toLowerCase();
<add> query = query.toLowerCase();
<add> }
<add>
<ide> var matches = [];
<ide>
<ide> var matchIdx = -queryLen; | 1 |
Python | Python | fix mypy errors in leveldb | c4b369410155dfc461d2b95ee66cb1927f8e4230 | <ide><path>airflow/providers/google/leveldb/hooks/leveldb.py
<ide> from airflow.exceptions import AirflowException
<ide> from airflow.hooks.base import BaseHook
<ide>
<add>DB_NOT_INITIALIZED_BEFORE = "The `get_conn` method should be called before!"
<add>
<ide>
<ide> class LevelDBHookException(AirflowException):
<ide> """Exception specific for LevelDB"""
<ide> def __init__(self, leveldb_conn_id: str = default_conn_name):
<ide> super().__init__()
<ide> self.leveldb_conn_id = leveldb_conn_id
<ide> self.connection = self.get_connection(leveldb_conn_id)
<del> self.db = None
<add> self.db: Optional[plyvel.DB] = None
<ide>
<ide> def get_conn(self, name: str = '/tmp/testdb/', create_if_missing: bool = False, **kwargs) -> DB:
<ide> """
<ide> def run(
<ide> self,
<ide> command: str,
<ide> key: bytes,
<del> value: bytes = None,
<del> keys: List[bytes] = None,
<del> values: List[bytes] = None,
<add> value: Optional[bytes] = None,
<add> keys: Optional[List[bytes]] = None,
<add> values: Optional[List[bytes]] = None,
<ide> ) -> Optional[bytes]:
<ide> """
<ide> Execute operation with leveldb
<ide> def run(
<ide> :param key: key for command(put,get,delete) execution(, e.g. ``b'key'``, ``b'another-key'``)
<ide> :type key: bytes
<ide> :param value: value for command(put) execution(bytes, e.g. ``b'value'``, ``b'another-value'``)
<del> :type value: bytes
<add> :type value: Optional[bytes]
<ide> :param keys: keys for command(write_batch) execution(List[bytes], e.g. ``[b'key', b'another-key'])``
<del> :type keys: List[bytes]
<add> :type keys: Optional[List[bytes]]
<ide> :param values: values for command(write_batch) execution e.g. ``[b'value'``, ``b'another-value']``
<del> :type values: List[bytes]
<add> :type values: Optional[List[bytes]]
<ide> :returns: value from get or None
<ide> :rtype: Optional[bytes]
<ide> """
<ide> if command == 'put':
<add> if not value:
<add> raise Exception("Please provide `value`!")
<ide> return self.put(key, value)
<ide> elif command == 'get':
<ide> return self.get(key)
<ide> elif command == 'delete':
<ide> return self.delete(key)
<ide> elif command == 'write_batch':
<add> if not keys:
<add> raise Exception("Please provide `keys`!")
<add> if not values:
<add> raise Exception("Please provide `values`!")
<ide> return self.write_batch(keys, values)
<ide> else:
<ide> raise LevelDBHookException("Unknown command for LevelDB hook")
<ide> def put(self, key: bytes, value: bytes):
<ide> :param value: value for put execution e.g. ``b'value'``, ``b'another-value'``
<ide> :type value: bytes
<ide> """
<add> if not self.db:
<add> raise Exception(DB_NOT_INITIALIZED_BEFORE)
<ide> self.db.put(key, value)
<ide>
<ide> def get(self, key: bytes) -> bytes:
<ide> def get(self, key: bytes) -> bytes:
<ide> :returns: value of key from db.get
<ide> :rtype: bytes
<ide> """
<add> if not self.db:
<add> raise Exception(DB_NOT_INITIALIZED_BEFORE)
<ide> return self.db.get(key)
<ide>
<ide> def delete(self, key: bytes):
<ide> def delete(self, key: bytes):
<ide> :param key: key for delete execution, e.g. ``b'key'``, ``b'another-key'``
<ide> :type key: bytes
<ide> """
<add> if not self.db:
<add> raise Exception(DB_NOT_INITIALIZED_BEFORE)
<ide> self.db.delete(key)
<ide>
<ide> def write_batch(self, keys: List[bytes], values: List[bytes]):
<ide> def write_batch(self, keys: List[bytes], values: List[bytes]):
<ide> :param values: values for write_batch execution e.g. ``[b'value', b'another-value']``
<ide> :type values: List[bytes]
<ide> """
<add> if not self.db:
<add> raise Exception(DB_NOT_INITIALIZED_BEFORE)
<ide> with self.db.write_batch() as batch:
<ide> for i, key in enumerate(keys):
<ide> batch.put(key, values[i])
<ide><path>airflow/providers/google/leveldb/operators/leveldb.py
<ide> class LevelDBOperator(BaseOperator):
<ide> :param key: key for command(put,get,delete) execution(, e.g. ``b'key'``, ``b'another-key'``)
<ide> :type key: bytes
<ide> :param value: value for command(put) execution(bytes, e.g. ``b'value'``, ``b'another-value'``)
<del> :type value: bytes
<add> :type value: Optional[bytes]
<ide> :param keys: keys for command(write_batch) execution(List[bytes], e.g. ``[b'key', b'another-key'])``
<del> :type keys: List[bytes]
<add> :type keys: Optional[List[bytes]]
<ide> :param values: values for command(write_batch) execution e.g. ``[b'value'``, ``b'another-value']``
<del> :type values: List[bytes]
<add> :type values: Optional[List[bytes]]
<ide> :param leveldb_conn_id:
<ide> :type leveldb_conn_id: str
<ide> :param create_if_missing: whether a new database should be created if needed
<ide> def __init__(
<ide> *,
<ide> command: str,
<ide> key: bytes,
<del> value: bytes = None,
<del> keys: List[bytes] = None,
<del> values: List[bytes] = None,
<add> value: Optional[bytes] = None,
<add> keys: Optional[List[bytes]] = None,
<add> values: Optional[List[bytes]] = None,
<ide> leveldb_conn_id: str = 'leveldb_default',
<ide> name: str = '/tmp/testdb/',
<ide> create_if_missing: bool = True,
<ide> def execute(self, context) -> Optional[str]:
<ide> )
<ide> self.log.info("Done. Returned value was: %s", str(value))
<ide> leveldb_hook.close_conn()
<del> value = value if value is None else value.decode()
<del> return value
<add> str_value = value if value is None else value.decode()
<add> return str_value | 2 |
Text | Text | fix typo in inception/readme.md (#49) | 148a15fb043dacdd1595eb4c5267705fbd362c6a | <ide><path>inception/README.md
<ide> To train this model, you simply need to specify the following:
<ide> bazel build inception/imagenet_train
<ide>
<ide> # run it
<del>bazel-bin/inception/imagenet_train.py --num_gpus=1 --batch_size=32 --train_dir=/tmp/imagenet_train --data_dir=/tmp/imagenet_data
<add>bazel-bin/inception/imagenet_train --num_gpus=1 --batch_size=32 --train_dir=/tmp/imagenet_train --data_dir=/tmp/imagenet_data
<ide> ```
<ide>
<ide> The model reads in the ImageNet training data from `--data_dir`. If you followed | 1 |
Python | Python | replace float ops with faster integer ops | 0c41cefc883f1a56994a0a0eba2648dc643be9ca | <ide><path>keras/engine/training.py
<ide> def _make_batches(size, batch_size):
<ide> # Returns
<ide> A list of tuples of array indices.
<ide> """
<del> num_batches = int(np.ceil(size / float(batch_size)))
<add> num_batches = (size + batch_size - 1) // batch_size # round up
<ide> return [(i * batch_size, min(size, (i + 1) * batch_size))
<ide> for i in range(num_batches)]
<ide>
<ide><path>keras/preprocessing/image.py
<ide> def __getitem__(self, idx):
<ide> return self._get_batches_of_transformed_samples(index_array)
<ide>
<ide> def __len__(self):
<del> return int(np.ceil(self.n / float(self.batch_size)))
<add> return (self.n + self.batch_size - 1) // self.batch_size # round up
<ide>
<ide> def on_epoch_end(self):
<ide> self._set_index_array() | 2 |
Python | Python | fix race condition with new fft cache | b0efcbadd5944d44f56c106dc5bc4571ebcab03e | <ide><path>numpy/fft/fftpack.py
<ide> __all__ = ['fft', 'ifft', 'rfft', 'irfft', 'hfft', 'ihfft', 'rfftn',
<ide> 'irfftn', 'rfft2', 'irfft2', 'fft2', 'ifft2', 'fftn', 'ifftn']
<ide>
<add>import threading
<add>
<ide> from numpy.core import (array, asarray, zeros, swapaxes, shape, conjugate,
<ide> take, sqrt)
<ide> from . import fftpack_lite as fftpack
<ide> from .helper import _FFTCache
<ide>
<ide> _fft_cache = _FFTCache(max_size_in_mb=100, max_item_count=32)
<ide> _real_fft_cache = _FFTCache(max_size_in_mb=100, max_item_count=32)
<add>_cache_lock = threading.Lock()
<ide>
<ide>
<ide> def _raw_fft(a, n=None, axis=-1, init_function=fftpack.cffti,
<ide> def _raw_fft(a, n=None, axis=-1, init_function=fftpack.cffti,
<ide> % n)
<ide>
<ide> try:
<del> # Thread-safety note: We rely on list.pop() here to atomically
<del> # retrieve-and-remove a wsave from the cache. This ensures that no
<del> # other thread can get the same wsave while we're using it.
<del> wsave = fft_cache.setdefault(n, []).pop()
<add> # We have to ensure that only a single thread can access a wsave array
<add> # at any given time. Thus we remove it from the cache and insert it
<add> # again after it has been used. Multiple threads might create multiple
<add> # copies of the wsave array. This is intentional and a limitation of
<add> # the current C code.
<add> with _cache_lock:
<add> wsave = fft_cache.setdefault(n, []).pop()
<ide> except (IndexError):
<ide> wsave = init_function(n)
<ide>
<ide> def _raw_fft(a, n=None, axis=-1, init_function=fftpack.cffti,
<ide> # As soon as we put wsave back into the cache, another thread could pick it
<ide> # up and start using it, so we must not do this until after we're
<ide> # completely done using it ourselves.
<del> fft_cache[n].append(wsave)
<add> with _cache_lock:
<add> fft_cache[n].append(wsave)
<ide>
<ide> return r
<ide>
<ide><path>numpy/fft/helper.py
<ide> def _prune_dict(self):
<ide> self._dict.popitem(last=False)
<ide>
<ide> def _check_size(self):
<del> item_sizes = [_i[0].nbytes for _i in self._dict.values() if _i]
<add> item_sizes = [sum(_j.nbytes for _j in _i)
<add> for _i in self._dict.values() if _i]
<ide> max_size = max(self._max_size_in_bytes, 1.5 * max(item_sizes))
<ide> return sum(item_sizes) > max_size | 2 |
Python | Python | sync the callback api with new tf.keras api | 1336cdb14ff03de754aec6899794742ca91057b2 | <ide><path>keras/callbacks.py
<ide> from collections import deque
<ide> from collections import OrderedDict
<ide> from collections import Iterable
<add>from collections import defaultdict
<ide> from .utils.generic_utils import Progbar
<ide> from . import backend as K
<ide> from .engine.training_utils import standardize_input_data
<ide> requests = None
<ide>
<ide>
<add>_TRAIN = 'train'
<add>_TEST = 'test'
<add>_PREDICT = 'predict'
<add>
<add>
<ide> class CallbackList(object):
<ide> """Container abstracting a list of callbacks.
<ide>
<ide> def __init__(self, callbacks=None, queue_length=10):
<ide> callbacks = callbacks or []
<ide> self.callbacks = [c for c in callbacks]
<ide> self.queue_length = queue_length
<add> self.params = {}
<add> self.model = None
<add> self._reset_batch_timing()
<add>
<add> def _reset_batch_timing(self):
<add> self._delta_t_batch = 0.
<add> self._delta_ts = defaultdict(lambda: deque([], maxlen=self.queue_length))
<ide>
<ide> def append(self, callback):
<ide> self.callbacks.append(callback)
<ide>
<ide> def set_params(self, params):
<add> self.params = params
<ide> for callback in self.callbacks:
<ide> callback.set_params(params)
<ide>
<ide> def set_model(self, model):
<add> self.model = model
<ide> for callback in self.callbacks:
<ide> callback.set_model(model)
<ide>
<add> def _call_batch_hook(self, mode, hook, batch, logs=None):
<add> """Helper function for all batch_{begin | end} methods."""
<add> if not self.callbacks:
<add> return
<add> hook_name = 'on_{mode}_batch_{hook}'.format(mode=mode, hook=hook)
<add> if hook == 'end':
<add> if not hasattr(self, '_t_enter_batch'):
<add> self._t_enter_batch = time.time()
<add> # Batch is ending, calculate batch time
<add> self._delta_t_batch = time.time() - self._t_enter_batch
<add>
<add> logs = logs or {}
<add> t_before_callbacks = time.time()
<add> for callback in self.callbacks:
<add> batch_hook = getattr(callback, hook_name)
<add> batch_hook(batch, logs)
<add> self._delta_ts[hook_name].append(time.time() - t_before_callbacks)
<add>
<add> delta_t_median = np.median(self._delta_ts[hook_name])
<add> if (self._delta_t_batch > 0. and
<add> delta_t_median > 0.95 * self._delta_t_batch and
<add> delta_t_median > 0.1):
<add> warnings.warn(
<add> 'Method (%s) is slow compared '
<add> 'to the batch update (%f). Check your callbacks.', hook_name,
<add> delta_t_median)
<add> if hook == 'begin':
<add> self._t_enter_batch = time.time()
<add>
<add> def _call_begin_hook(self, mode):
<add> """Helper function for on_{train|test|predict}_begin methods."""
<add> if mode == _TRAIN:
<add> self.on_train_begin()
<add> elif mode == _TEST:
<add> self.on_test_begin()
<add> else:
<add> self.on_predict_begin()
<add>
<add> def _call_end_hook(self, mode):
<add> """Helper function for on_{train|test|predict}_end methods."""
<add> if mode == _TRAIN:
<add> self.on_train_end()
<add> elif mode == _TEST:
<add> self.on_test_end()
<add> else:
<add> self.on_predict_end()
<add>
<add> def on_batch_begin(self, batch, logs=None):
<add> self._call_batch_hook(_TRAIN, 'begin', batch, logs=logs)
<add>
<add> def on_batch_end(self, batch, logs=None):
<add> self._call_batch_hook(_TRAIN, 'end', batch, logs=logs)
<add>
<ide> def on_epoch_begin(self, epoch, logs=None):
<del> """Called at the start of an epoch.
<add> """Calls the `on_epoch_begin` methods of its callbacks.
<add>
<add> This function should only be called during train mode.
<ide>
<ide> # Arguments
<ide> epoch: integer, index of epoch.
<del> logs: dictionary of logs.
<add> logs: dict, Currently no data is passed to this argument for this method
<add> but that may change in the future.
<ide> """
<ide> logs = logs or {}
<ide> for callback in self.callbacks:
<ide> callback.on_epoch_begin(epoch, logs)
<del> self._delta_t_batch = 0.
<del> self._delta_ts_batch_begin = deque([], maxlen=self.queue_length)
<del> self._delta_ts_batch_end = deque([], maxlen=self.queue_length)
<add> self._reset_batch_timing()
<ide>
<ide> def on_epoch_end(self, epoch, logs=None):
<del> """Called at the end of an epoch.
<add> """Calls the `on_epoch_end` methods of its callbacks.
<add>
<add> This function should only be called during train mode.
<ide>
<ide> # Arguments
<ide> epoch: integer, index of epoch.
<del> logs: dictionary of logs.
<add> logs: dict, metric results for this training epoch, and for the
<add> validation epoch if validation is performed. Validation result keys
<add> are prefixed with `val_`.
<ide> """
<ide> logs = logs or {}
<ide> for callback in self.callbacks:
<ide> callback.on_epoch_end(epoch, logs)
<ide>
<del> def on_batch_begin(self, batch, logs=None):
<del> """Called right before processing a batch.
<add> def on_train_batch_begin(self, batch, logs=None):
<add> """Calls the `on_train_batch_begin` methods of its callbacks.
<ide>
<ide> # Arguments
<ide> batch: integer, index of batch within the current epoch.
<del> logs: dictionary of logs.
<add> logs: dict, has keys `batch` and `size` representing the current
<add> batch number and the size of the batch.
<ide> """
<del> logs = logs or {}
<del> t_before_callbacks = time.time()
<del> for callback in self.callbacks:
<del> callback.on_batch_begin(batch, logs)
<del> self._delta_ts_batch_begin.append(time.time() - t_before_callbacks)
<del> delta_t_median = np.median(self._delta_ts_batch_begin)
<del> if (self._delta_t_batch > 0. and
<del> delta_t_median > 0.95 * self._delta_t_batch and
<del> delta_t_median > 0.1):
<del> warnings.warn('Method on_batch_begin() is slow compared '
<del> 'to the batch update (%f). Check your callbacks.'
<del> % delta_t_median)
<del> self._t_enter_batch = time.time()
<add> self._call_batch_hook(_TRAIN, 'begin', batch, logs=logs)
<ide>
<del> def on_batch_end(self, batch, logs=None):
<del> """Called at the end of a batch.
<add> def on_train_batch_end(self, batch, logs=None):
<add> """Calls the `on_train_batch_end` methods of its callbacks.
<ide>
<ide> # Arguments
<ide> batch: integer, index of batch within the current epoch.
<del> logs: dictionary of logs.
<add> logs: dict, metric results for this batch.
<ide> """
<del> logs = logs or {}
<del> if not hasattr(self, '_t_enter_batch'):
<del> self._t_enter_batch = time.time()
<del> self._delta_t_batch = time.time() - self._t_enter_batch
<del> t_before_callbacks = time.time()
<del> for callback in self.callbacks:
<del> callback.on_batch_end(batch, logs)
<del> self._delta_ts_batch_end.append(time.time() - t_before_callbacks)
<del> delta_t_median = np.median(self._delta_ts_batch_end)
<del> if (self._delta_t_batch > 0. and
<del> (delta_t_median > 0.95 * self._delta_t_batch and delta_t_median > 0.1)):
<del> warnings.warn('In your callbacks, method `on_batch_end()` '
<del> 'is slow compared to a model step '
<del> '(%f vs %f). Check your callbacks.'
<del> % (delta_t_median, self._delta_t_batch))
<add> self._call_batch_hook(_TRAIN, 'end', batch, logs=logs)
<add>
<add> def on_test_batch_begin(self, batch, logs=None):
<add> """Calls the `on_test_batch_begin` methods of its callbacks.
<add>
<add> # Arguments
<add> batch: integer, index of batch within the current epoch.
<add> logs: dict, has keys `batch` and `size` representing the current
<add> batch number and the size of the batch.
<add> """
<add> self._call_batch_hook(_TEST, 'begin', batch, logs=logs)
<add>
<add> def on_test_batch_end(self, batch, logs=None):
<add> """Calls the `on_test_batch_end` methods of its callbacks.
<add>
<add> # Arguments
<add> batch: integer, index of batch within the current epoch.
<add> logs: dict, metric results for this batch.
<add> """
<add> self._call_batch_hook(_TEST, 'end', batch, logs=logs)
<add>
<add> def on_predict_batch_begin(self, batch, logs=None):
<add> """Calls the `on_predict_batch_begin` methods of its callbacks.
<add>
<add> # Arguments
<add> batch: integer, index of batch within the current epoch.
<add> logs: dict, has keys `batch` and `size` representing the current
<add> batch number and the size of the batch.
<add> """
<add> self._call_batch_hook(_PREDICT, 'begin', batch, logs=logs)
<add>
<add> def on_predict_batch_end(self, batch, logs=None):
<add> """Calls the `on_predict_batch_end` methods of its callbacks.
<add>
<add> # Argument
<add> batch: integer, index of batch within the current epoch.
<add> logs: dict, metric results for this batch.
<add> """
<add> self._call_batch_hook(_PREDICT, 'end', batch, logs=logs)
<ide>
<ide> def on_train_begin(self, logs=None):
<del> """Called at the beginning of training.
<add> """Calls the `on_train_begin` methods of its callbacks.
<ide>
<ide> # Arguments
<del> logs: dictionary of logs.
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<ide> """
<del> logs = logs or {}
<ide> for callback in self.callbacks:
<ide> callback.on_train_begin(logs)
<ide>
<ide> def on_train_end(self, logs=None):
<del> """Called at the end of training.
<add> """Calls the `on_train_end` methods of its callbacks.
<ide>
<ide> # Arguments
<del> logs: dictionary of logs.
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<ide> """
<del> logs = logs or {}
<ide> for callback in self.callbacks:
<ide> callback.on_train_end(logs)
<ide>
<add> def on_test_begin(self, logs=None):
<add> """Calls the `on_test_begin` methods of its callbacks.
<add>
<add> # Arguments
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<add> """
<add> for callback in self.callbacks:
<add> callback.on_test_begin(logs)
<add>
<add> def on_test_end(self, logs=None):
<add> """Calls the `on_test_end` methods of its callbacks.
<add>
<add> # Arguments
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<add> """
<add> for callback in self.callbacks:
<add> callback.on_test_end(logs)
<add>
<add> def on_predict_begin(self, logs=None):
<add> """Calls the `on_predict_begin` methods of its callbacks.
<add>
<add> # Arguments
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<add> """
<add> for callback in self.callbacks:
<add> callback.on_predict_begin(logs)
<add>
<add> def on_predict_end(self, logs=None):
<add> """Calls the `on_predict_end` methods of its callbacks.
<add>
<add> # Arguments
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<add> """
<add> for callback in self.callbacks:
<add> callback.on_predict_end(logs)
<add>
<ide> def __iter__(self):
<ide> return iter(self.callbacks)
<ide>
<ide> def set_params(self, params):
<ide> def set_model(self, model):
<ide> self.model = model
<ide>
<add> def on_batch_begin(self, batch, logs=None):
<add> """A backwards compatibility alias for `on_train_batch_begin`."""
<add>
<add> def on_batch_end(self, batch, logs=None):
<add> """A backwards compatibility alias for `on_train_batch_end`."""
<add>
<ide> def on_epoch_begin(self, epoch, logs=None):
<del> pass
<add> """Called at the start of an epoch.
<add>
<add> Subclasses should override for any actions to run. This function should only
<add> be called during train mode.
<add>
<add> # Arguments
<add> epoch: integer, index of epoch.
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<add> """
<ide>
<ide> def on_epoch_end(self, epoch, logs=None):
<del> pass
<add> """Called at the end of an epoch.
<ide>
<del> def on_batch_begin(self, batch, logs=None):
<del> pass
<add> Subclasses should override for any actions to run. This function should only
<add> be called during train mode.
<ide>
<del> def on_batch_end(self, batch, logs=None):
<del> pass
<add> # Arguments
<add> epoch: integer, index of epoch.
<add> logs: dict, metric results for this training epoch, and for the
<add> validation epoch if validation is performed. Validation result keys
<add> are prefixed with `val_`.
<add> """
<add>
<add> def on_train_batch_begin(self, batch, logs=None):
<add> """Called at the beginning of a training batch in `fit` methods.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> batch: integer, index of batch within the current epoch.
<add> logs: dict, has keys `batch` and `size` representing the current
<add> batch number and the size of the batch.
<add> """
<add> # For backwards compatibility
<add> self.on_batch_begin(batch, logs=logs)
<add>
<add> def on_train_batch_end(self, batch, logs=None):
<add> """Called at the end of a training batch in `fit` methods.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> batch: integer, index of batch within the current epoch.
<add> logs: dict, metric results for this batch.
<add> """
<add> # For backwards compatibility
<add> self.on_batch_end(batch, logs=logs)
<add>
<add> def on_test_batch_begin(self, batch, logs=None):
<add> """Called at the beginning of a batch in `evaluate` methods.
<add>
<add> Also called at the beginning of a validation batch in the `fit` methods,
<add> if validation data is provided.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> batch: integer, index of batch within the current epoch.
<add> logs: dict, has keys `batch` and `size` representing the current
<add> batch number and the size of the batch.
<add> """
<add>
<add> def on_test_batch_end(self, batch, logs=None):
<add> """Called at the end of a batch in `evaluate` methods.
<add>
<add> Also called at the end of a validation batch in the `fit` methods,
<add> if validation data is provided.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> batch: integer, index of batch within the current epoch.
<add> logs: dict, metric results for this batch.
<add> """
<add>
<add> def on_predict_batch_begin(self, batch, logs=None):
<add> """Called at the beginning of a batch in `predict` methods.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> batch: integer, index of batch within the current epoch.
<add> logs: dict, has keys `batch` and `size` representing the current
<add> batch number and the size of the batch.
<add> """
<add>
<add> def on_predict_batch_end(self, batch, logs=None):
<add> """Called at the end of a batch in `predict` methods.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> batch: integer, index of batch within the current epoch.
<add> logs: dict, metric results for this batch.
<add> """
<ide>
<ide> def on_train_begin(self, logs=None):
<del> pass
<add> """Called at the beginning of training.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<add> """
<ide>
<ide> def on_train_end(self, logs=None):
<del> pass
<add> """Called at the end of training.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<add> """
<add>
<add> def on_test_begin(self, logs=None):
<add> """Called at the beginning of evaluation or validation.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<add> """
<add>
<add> def on_test_end(self, logs=None):
<add> """Called at the end of evaluation or validation.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<add> """
<add>
<add> def on_predict_begin(self, logs=None):
<add> """Called at the beginning of prediction.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<add> """
<add>
<add> def on_predict_end(self, logs=None):
<add> """Called at the end of prediction.
<add>
<add> Subclasses should override for any actions to run.
<add>
<add> # Arguments
<add> logs: dict, currently no data is passed to this argument for this method
<add> but that may change in the future.
<add> """
<ide>
<ide>
<ide> class BaseLogger(Callback):
<ide><path>keras/engine/training.py
<ide> def _standardize_user_data(self, x,
<ide> str(x[0].shape[0]) + ' samples')
<ide> return x, y, sample_weights
<ide>
<add> def _get_callback_model(self):
<add> """Returns the Callback Model for this Model."""
<add> if hasattr(self, 'callback_model') and self.callback_model:
<add> return self.callback_model
<add> return self
<add>
<ide> def fit(self,
<ide> x=None,
<ide> y=None,
<ide> def fit(self,
<ide> verbose: Integer. 0, 1, or 2. Verbosity mode.
<ide> 0 = silent, 1 = progress bar, 2 = one line per epoch.
<ide> callbacks: List of `keras.callbacks.Callback` instances.
<del> List of callbacks to apply during training.
<add> List of callbacks to apply during training and validation
<add> (if ).
<ide> See [callbacks](/callbacks).
<ide> validation_split: Float between 0 and 1.
<ide> Fraction of the training data to be used as validation data.
<ide> def evaluate(self, x=None, y=None,
<ide> batch_size=None,
<ide> verbose=1,
<ide> sample_weight=None,
<del> steps=None):
<add> steps=None,
<add> callbacks=None):
<ide> """Returns the loss value & metrics values for the model in test mode.
<ide>
<ide> Computation is done in batches.
<ide> def evaluate(self, x=None, y=None,
<ide> Total number of steps (batches of samples)
<ide> before declaring the evaluation round finished.
<ide> Ignored with the default value of `None`.
<add> callbacks: List of `keras.callbacks.Callback` instances.
<add> List of callbacks to apply during evaluation.
<add> See [callbacks](/callbacks).
<ide>
<ide> # Returns
<ide> Scalar test loss (if the model has a single output and no metrics)
<ide> def evaluate(self, x=None, y=None,
<ide> return training_arrays.test_loop(self, f, ins,
<ide> batch_size=batch_size,
<ide> verbose=verbose,
<del> steps=steps)
<add> steps=steps,
<add> callbacks=callbacks)
<ide>
<ide> def predict(self, x,
<ide> batch_size=None,
<ide> verbose=0,
<del> steps=None):
<add> steps=None,
<add> callbacks=None):
<ide> """Generates output predictions for the input samples.
<ide>
<ide> Computation is done in batches.
<ide> def predict(self, x,
<ide> steps: Total number of steps (batches of samples)
<ide> before declaring the prediction round finished.
<ide> Ignored with the default value of `None`.
<add> callbacks: List of `keras.callbacks.Callback` instances.
<add> List of callbacks to apply during prediction.
<add> See [callbacks](/callbacks).
<ide>
<ide> # Returns
<ide> Numpy array(s) of predictions.
<ide> def predict(self, x,
<ide> return training_arrays.predict_loop(self, f, ins,
<ide> batch_size=batch_size,
<ide> verbose=verbose,
<del> steps=steps)
<add> steps=steps,
<add> callbacks=callbacks)
<ide>
<ide> def train_on_batch(self, x, y,
<ide> sample_weight=None,
<ide> def generate_arrays_from_file(path):
<ide> @interfaces.legacy_generator_methods_support
<ide> def evaluate_generator(self, generator,
<ide> steps=None,
<add> callbacks=None,
<ide> max_queue_size=10,
<ide> workers=1,
<ide> use_multiprocessing=False,
<ide> def evaluate_generator(self, generator,
<ide> to yield from `generator` before stopping.
<ide> Optional for `Sequence`: if unspecified, will use
<ide> the `len(generator)` as a number of steps.
<add> callbacks: List of `keras.callbacks.Callback` instances.
<add> List of callbacks to apply during training.
<add> See [callbacks](/callbacks).
<ide> max_queue_size: maximum size for the generator queue
<ide> workers: Integer. Maximum number of processes to spin up
<ide> when using process based threading.
<ide> def evaluate_generator(self, generator,
<ide> return training_generator.evaluate_generator(
<ide> self, generator,
<ide> steps=steps,
<add> callbacks=callbacks,
<ide> max_queue_size=max_queue_size,
<ide> workers=workers,
<ide> use_multiprocessing=use_multiprocessing,
<ide> def evaluate_generator(self, generator,
<ide> @interfaces.legacy_generator_methods_support
<ide> def predict_generator(self, generator,
<ide> steps=None,
<add> callbacks=None,
<ide> max_queue_size=10,
<ide> workers=1,
<ide> use_multiprocessing=False,
<ide> def predict_generator(self, generator,
<ide> to yield from `generator` before stopping.
<ide> Optional for `Sequence`: if unspecified, will use
<ide> the `len(generator)` as a number of steps.
<add> callbacks: List of `keras.callbacks.Callback` instances.
<add> List of callbacks to apply during training.
<add> See [callbacks](/callbacks).
<ide> max_queue_size: Maximum size for the generator queue.
<ide> workers: Integer. Maximum number of processes to spin up
<ide> when using process based threading.
<ide> def predict_generator(self, generator,
<ide> return training_generator.predict_generator(
<ide> self, generator,
<ide> steps=steps,
<add> callbacks=callbacks,
<ide> max_queue_size=max_queue_size,
<ide> workers=workers,
<ide> use_multiprocessing=use_multiprocessing,
<ide><path>keras/engine/training_arrays.py
<ide> def fit_loop(model, fit_function, fit_inputs,
<ide> batch_size: Integer batch size or None if unknown.
<ide> epochs: Number of times to iterate over the data
<ide> verbose: Verbosity mode, 0, 1 or 2
<del> callbacks: List of callbacks to be called during training
<add> callbacks: List of callbacks to be called during training and validation
<add> (if `val_function` and `val_inputs` are not `None`).
<ide> val_function: Keras function to call for validation
<ide> val_inputs: List of tensors to be fed to `val_function`
<ide> shuffle: Whether to shuffle the data at the beginning of each epoch
<ide> def fit_loop(model, fit_function, fit_inputs,
<ide>
<ide> # it's possible to callback a different model than itself
<ide> # (used by Sequential models)
<del> if hasattr(model, 'callback_model') and model.callback_model:
<del> callback_model = model.callback_model
<del> else:
<del> callback_model = model
<add> callback_model = model._get_callback_model()
<ide>
<ide> callbacks.set_model(callback_model)
<ide> callbacks.set_params({
<ide> def fit_loop(model, fit_function, fit_inputs,
<ide> 'do_validation': do_validation,
<ide> 'metrics': callback_metrics or [],
<ide> })
<del> callbacks.on_train_begin()
<del> callback_model.stop_training = False
<add> callbacks._call_begin_hook('train')
<add> callbacks.model.stop_training = False
<ide> for cbk in callbacks:
<ide> cbk.validation_data = val_inputs
<ide>
<ide> def fit_loop(model, fit_function, fit_inputs,
<ide> epoch_logs = {}
<ide> if steps_per_epoch is not None:
<ide> for step_index in range(steps_per_epoch):
<del> batch_logs = {}
<del> batch_logs['batch'] = step_index
<del> batch_logs['size'] = 1
<del> callbacks.on_batch_begin(step_index, batch_logs)
<add> batch_logs = {'batch': step_index, 'size': 1}
<add> callbacks._call_batch_hook('train', 'begin', step_index, batch_logs)
<ide> outs = fit_function(fit_inputs)
<ide>
<ide> outs = to_list(outs)
<ide> for l, o in zip(out_labels, outs):
<ide> batch_logs[l] = o
<ide>
<del> callbacks.on_batch_end(step_index, batch_logs)
<add> callbacks._call_batch_hook('train', 'end', step_index, batch_logs)
<ide> if callback_model.stop_training:
<ide> break
<ide>
<ide> if do_validation:
<ide> val_outs = test_loop(model, val_function, val_inputs,
<ide> steps=validation_steps,
<add> callbacks=callbacks,
<ide> verbose=0)
<ide> val_outs = to_list(val_outs)
<ide> # Same labels assumed.
<ide> def fit_loop(model, fit_function, fit_inputs,
<ide> raise TypeError('TypeError while preparing batch. '
<ide> 'If using HDF5 input data, '
<ide> 'pass shuffle="batch".')
<del> batch_logs = {}
<del> batch_logs['batch'] = batch_index
<del> batch_logs['size'] = len(batch_ids)
<del> callbacks.on_batch_begin(batch_index, batch_logs)
<add> batch_logs = {'batch': batch_index, 'size': len(batch_ids)}
<add> callbacks._call_batch_hook('train', 'begin', batch_index, batch_logs)
<ide> for i in indices_for_conversion_to_dense:
<ide> ins_batch[i] = ins_batch[i].toarray()
<ide>
<ide> def fit_loop(model, fit_function, fit_inputs,
<ide> for l, o in zip(out_labels, outs):
<ide> batch_logs[l] = o
<ide>
<del> callbacks.on_batch_end(batch_index, batch_logs)
<del> if callback_model.stop_training:
<add> callbacks._call_batch_hook('train', 'end', batch_index, batch_logs)
<add> if callbacks.model.stop_training:
<ide> break
<ide>
<ide> if batch_index == len(batches) - 1: # Last batch.
<ide> if do_validation:
<ide> val_outs = test_loop(model, val_function, val_inputs,
<ide> batch_size=batch_size,
<add> callbacks=callbacks,
<ide> verbose=0)
<ide> val_outs = to_list(val_outs)
<ide> # Same labels assumed.
<ide> for l, o in zip(out_labels, val_outs):
<ide> epoch_logs['val_' + l] = o
<ide> callbacks.on_epoch_end(epoch, epoch_logs)
<del> if callback_model.stop_training:
<add> if callbacks.model.stop_training:
<ide> break
<del> callbacks.on_train_end()
<add> callbacks._call_end_hook('train')
<ide> return model.history
<ide>
<ide>
<del>def predict_loop(model, f, ins, batch_size=32, verbose=0, steps=None):
<add>def predict_loop(model, f, ins,
<add> batch_size=32,
<add> verbose=0,
<add> steps=None,
<add> callbacks=None):
<ide> """Abstract method to loop over some data in batches.
<ide>
<ide> # Arguments
<ide> def predict_loop(model, f, ins, batch_size=32, verbose=0, steps=None):
<ide> steps: Total number of steps (batches of samples)
<ide> before declaring `predict_loop` finished.
<ide> Ignored with the default value of `None`.
<add> callbacks: List of callbacks or an instance of
<add> `keras.callbacks.CallbackList` to be called during prediction.
<ide>
<ide> # Returns
<ide> Array of predictions (if the model has a single output)
<ide> def predict_loop(model, f, ins, batch_size=32, verbose=0, steps=None):
<ide> batch_size=batch_size,
<ide> steps=steps,
<ide> steps_name='steps')
<add>
<add> # Check if callbacks have not been already configured
<add> if not isinstance(callbacks, cbks.CallbackList):
<add> callbacks = cbks.CallbackList(callbacks)
<add> callback_model = model._get_callback_model()
<add> callbacks.set_model(callback_model)
<add> callback_params = {
<add> 'batch_size': batch_size,
<add> 'steps': steps,
<add> 'samples': num_samples,
<add> 'verbose': verbose,
<add> }
<add> callbacks.set_params(callback_params)
<add>
<ide> if verbose == 1:
<ide> if steps is not None:
<ide> progbar = Progbar(target=steps)
<ide> def predict_loop(model, f, ins, batch_size=32, verbose=0, steps=None):
<ide> if issparse(ins[i]) and not K.is_sparse(model._feed_inputs[i]):
<ide> indices_for_conversion_to_dense.append(i)
<ide>
<add> callbacks.model.stop_training = False
<add> callbacks._call_begin_hook('predict')
<add>
<ide> if steps is not None:
<ide> # Step-based predictions.
<ide> # Since we do not know how many samples
<ide> def predict_loop(model, f, ins, batch_size=32, verbose=0, steps=None):
<ide> # and concatenate them upon returning.
<ide> unconcatenated_outs = []
<ide> for step in range(steps):
<add> batch_logs = {'batch': step, 'size': 1}
<add> callbacks._call_batch_hook('predict', 'begin', step, batch_logs)
<ide> batch_outs = f(ins)
<ide> batch_outs = to_list(batch_outs)
<ide> if step == 0:
<ide> for batch_out in batch_outs:
<ide> unconcatenated_outs.append([])
<ide> for i, batch_out in enumerate(batch_outs):
<ide> unconcatenated_outs[i].append(batch_out)
<add>
<add> batch_logs['outputs'] = batch_outs
<add> callbacks._call_batch_hook('predict', 'end', step, batch_logs)
<ide> if verbose == 1:
<ide> progbar.update(step + 1)
<add> callbacks.on_predict_end()
<ide> if len(unconcatenated_outs) == 1:
<ide> return np.concatenate(unconcatenated_outs[0], axis=0)
<ide> return [np.concatenate(unconcatenated_outs[i], axis=0)
<ide> def predict_loop(model, f, ins, batch_size=32, verbose=0, steps=None):
<ide> for i in indices_for_conversion_to_dense:
<ide> ins_batch[i] = ins_batch[i].toarray()
<ide>
<add> batch_logs = {'batch': batch_index, 'size': len(batch_ids)}
<add> callbacks._call_batch_hook('predict', 'begin', batch_index, batch_logs)
<ide> batch_outs = f(ins_batch)
<ide> batch_outs = to_list(batch_outs)
<ide> if batch_index == 0:
<ide> def predict_loop(model, f, ins, batch_size=32, verbose=0, steps=None):
<ide> outs.append(np.zeros(shape, dtype=batch_out.dtype))
<ide> for i, batch_out in enumerate(batch_outs):
<ide> outs[i][batch_start:batch_end] = batch_out
<add>
<add> batch_logs['outputs'] = batch_outs
<add> callbacks._call_batch_hook('predict', 'end', batch_index, batch_logs)
<ide> if verbose == 1:
<ide> progbar.update(batch_end)
<add> callbacks._call_end_hook('predict')
<ide> return unpack_singleton(outs)
<ide>
<ide>
<del>def test_loop(model, f, ins, batch_size=None, verbose=0, steps=None):
<add>def test_loop(model, f, ins,
<add> batch_size=None,
<add> verbose=0,
<add> steps=None,
<add> callbacks=None):
<ide> """Abstract method to loop over some data in batches.
<ide>
<ide> # Arguments
<ide> def test_loop(model, f, ins, batch_size=None, verbose=0, steps=None):
<ide> steps: Total number of steps (batches of samples)
<ide> before declaring predictions finished.
<ide> Ignored with the default value of `None`.
<add> callbacks: List of callbacks or an instance of
<add> `keras.callbacks.CallbackList` to be called during evaluation.
<ide>
<ide> # Returns
<ide> Scalar loss (if the model has a single output and no metrics)
<ide> def test_loop(model, f, ins, batch_size=None, verbose=0, steps=None):
<ide> batch_size=batch_size,
<ide> steps=steps,
<ide> steps_name='steps')
<add>
<add> # Check if callbacks have not been already configured
<add> if not isinstance(callbacks, cbks.CallbackList):
<add> callbacks = cbks.CallbackList(callbacks)
<add> callback_model = model._get_callback_model()
<add> callbacks.set_model(callback_model)
<add> callback_metrics = []
<add> if hasattr(model, 'metrics_names'):
<add> callback_metrics = list(model.metrics_names)
<add> callback_params = {
<add> 'batch_size': batch_size,
<add> 'steps': steps,
<add> 'samples': num_samples,
<add> 'verbose': verbose,
<add> 'metrics': callback_metrics,
<add> }
<add> callbacks.set_params(callback_params)
<add>
<ide> outs = []
<ide> if verbose == 1:
<ide> if steps is not None:
<ide> def test_loop(model, f, ins, batch_size=None, verbose=0, steps=None):
<ide> if issparse(ins[i]) and not K.is_sparse(feed[i]):
<ide> indices_for_conversion_to_dense.append(i)
<ide>
<add> callbacks.model.stop_training = False
<add> callbacks._call_begin_hook('test')
<add>
<ide> if steps is not None:
<ide> for step in range(steps):
<add> batch_logs = {'batch': step, 'size': 1}
<add> callbacks._call_batch_hook('test', 'begin', step, batch_logs)
<ide> batch_outs = f(ins)
<ide> if isinstance(batch_outs, list):
<ide> if step == 0:
<ide> def test_loop(model, f, ins, batch_size=None, verbose=0, steps=None):
<ide> if step == 0:
<ide> outs.append(0.)
<ide> outs[0] += batch_outs
<add>
<add> if hasattr(model, 'metrics_names'):
<add> for l, o in zip(model.metrics_names, batch_outs):
<add> batch_logs[l] = o
<add> callbacks._call_batch_hook('test', 'end', step, batch_logs)
<add>
<ide> if verbose == 1:
<ide> progbar.update(step + 1)
<ide> for i in range(len(outs)):
<ide> def test_loop(model, f, ins, batch_size=None, verbose=0, steps=None):
<ide> for i in indices_for_conversion_to_dense:
<ide> ins_batch[i] = ins_batch[i].toarray()
<ide>
<add> batch_logs = {'batch': batch_index, 'size': len(batch_ids)}
<add> callbacks._call_batch_hook('test', 'begin', batch_index, batch_logs)
<ide> batch_outs = f(ins_batch)
<ide> if isinstance(batch_outs, list):
<ide> if batch_index == 0:
<ide> def test_loop(model, f, ins, batch_size=None, verbose=0, steps=None):
<ide> outs.append(0.)
<ide> outs[0] += batch_outs * len(batch_ids)
<ide>
<add> if hasattr(model, 'metrics_names'):
<add> for l, o in zip(model.metrics_names, batch_outs):
<add> batch_logs[l] = o
<add> callbacks._call_batch_hook('test', 'end', batch_index, batch_logs)
<add>
<ide> if verbose == 1:
<ide> progbar.update(batch_end)
<ide> for i in range(len(outs)):
<ide> if i not in stateful_metric_indices:
<ide> outs[i] /= num_samples
<add> callbacks._call_end_hook('test')
<ide> return unpack_singleton(outs)
<ide><path>keras/engine/training_generator.py
<ide> def fit_generator(model,
<ide> callbacks = cbks.CallbackList(_callbacks)
<ide>
<ide> # it's possible to callback a different model than self:
<del> if hasattr(model, 'callback_model') and model.callback_model:
<del> callback_model = model.callback_model
<del> else:
<del> callback_model = model
<add> callback_model = model._get_callback_model()
<add>
<ide> callbacks.set_model(callback_model)
<ide> callbacks.set_params({
<ide> 'epochs': epochs,
<ide> def fit_generator(model,
<ide> 'do_validation': do_validation,
<ide> 'metrics': callback_metrics,
<ide> })
<del> callbacks.on_train_begin()
<add> callbacks._call_begin_hook('train')
<ide>
<ide> enqueuer = None
<ide> val_enqueuer = None
<ide> def fit_generator(model,
<ide> else:
<ide> output_generator = generator
<ide>
<del> callback_model.stop_training = False
<add> callbacks.model.stop_training = False
<ide> # Construct epoch logs.
<ide> epoch_logs = {}
<ide> while epoch < epochs:
<ide> def fit_generator(model,
<ide> 'a tuple `(x, y, sample_weight)` '
<ide> 'or `(x, y)`. Found: ' +
<ide> str(generator_output))
<del> # build batch logs
<del> batch_logs = {}
<ide> if x is None or len(x) == 0:
<ide> # Handle data tensors support when no input given
<ide> # step-size = 1 for data tensors
<ide> def fit_generator(model,
<ide> batch_size = list(x.values())[0].shape[0]
<ide> else:
<ide> batch_size = x.shape[0]
<del> batch_logs['batch'] = batch_index
<del> batch_logs['size'] = batch_size
<add> # build batch logs
<add> batch_logs = {'batch': batch_index, 'size': batch_size}
<ide> callbacks.on_batch_begin(batch_index, batch_logs)
<ide>
<ide> outs = model.train_on_batch(x, y,
<ide> def fit_generator(model,
<ide> for l, o in zip(out_labels, outs):
<ide> batch_logs[l] = o
<ide>
<del> callbacks.on_batch_end(batch_index, batch_logs)
<add> callbacks._call_batch_hook('train', 'end', batch_index, batch_logs)
<ide>
<ide> batch_index += 1
<ide> steps_done += 1
<ide>
<ide> # Epoch finished.
<ide> if steps_done >= steps_per_epoch and do_validation:
<add> # Note that `callbacks` here is an instance of
<add> # `keras.callbacks.CallbackList`
<ide> if val_gen:
<ide> val_outs = model.evaluate_generator(
<ide> val_enqueuer_gen,
<ide> validation_steps,
<add> callbacks=callbacks,
<ide> workers=0)
<ide> else:
<ide> # No need for try/except because
<ide> def fit_generator(model,
<ide> val_x, val_y,
<ide> batch_size=batch_size,
<ide> sample_weight=val_sample_weights,
<add> callbacks=callbacks,
<ide> verbose=0)
<ide> val_outs = to_list(val_outs)
<ide> # Same labels assumed.
<ide> for l, o in zip(out_labels, val_outs):
<ide> epoch_logs['val_' + l] = o
<ide>
<del> if callback_model.stop_training:
<add> if callbacks.model.stop_training:
<ide> break
<ide>
<ide> callbacks.on_epoch_end(epoch, epoch_logs)
<ide> epoch += 1
<del> if callback_model.stop_training:
<add> if callbacks.model.stop_training:
<ide> break
<ide>
<ide> finally:
<ide> def fit_generator(model,
<ide> if val_enqueuer is not None:
<ide> val_enqueuer.stop()
<ide>
<del> callbacks.on_train_end()
<add> callbacks._call_end_hook('train')
<ide> return model.history
<ide>
<ide>
<ide> def evaluate_generator(model, generator,
<ide> steps=None,
<add> callbacks=None,
<ide> max_queue_size=10,
<ide> workers=1,
<ide> use_multiprocessing=False,
<ide> def evaluate_generator(model, generator,
<ide> ' `keras.utils.Sequence` class.')
<ide> enqueuer = None
<ide>
<add> # Check if callbacks have not been already configured
<add> if not isinstance(callbacks, cbks.CallbackList):
<add> callbacks = cbks.CallbackList(callbacks)
<add> callback_model = model._get_callback_model()
<add> callbacks.set_model(callback_model)
<add> callback_metrics = []
<add> if hasattr(model, 'metrics_names'):
<add> callback_metrics = list(model.metrics_names)
<add> callback_params = {
<add> 'steps': steps,
<add> 'verbose': verbose,
<add> 'metrics': callback_metrics,
<add> }
<add> callbacks.set_params(callback_params)
<add>
<add> callbacks.model.stop_training = False
<add> callbacks._call_begin_hook('test')
<add>
<ide> try:
<ide> if workers > 0:
<ide> if use_sequence_api:
<ide> def evaluate_generator(model, generator,
<ide> '(x, y, sample_weight) '
<ide> 'or (x, y). Found: ' +
<ide> str(generator_output))
<del> outs = model.test_on_batch(x, y, sample_weight=sample_weight)
<del> outs = to_list(outs)
<del> outs_per_batch.append(outs)
<ide>
<ide> if x is None or len(x) == 0:
<ide> # Handle data tensors support when no input given
<ide> def evaluate_generator(model, generator,
<ide> raise ValueError('Received an empty batch. '
<ide> 'Batches should contain '
<ide> 'at least one item.')
<add>
<add> batch_logs = {'batch': steps_done, 'size': batch_size}
<add> callbacks._call_batch_hook('test', 'begin', steps_done, batch_logs)
<add> outs = model.test_on_batch(x, y, sample_weight=sample_weight)
<add> outs = to_list(outs)
<add> outs_per_batch.append(outs)
<add>
<add> if hasattr(model, 'metrics_names'):
<add> for l, o in zip(model.metrics_names, outs):
<add> batch_logs[l] = o
<add> callbacks._call_batch_hook('test', 'end', steps_done, batch_logs)
<add>
<ide> steps_done += 1
<ide> batch_sizes.append(batch_size)
<add>
<ide> if verbose == 1:
<ide> progbar.update(steps_done)
<add> callbacks._call_end_hook('test')
<ide>
<ide> finally:
<ide> if enqueuer is not None:
<ide> def evaluate_generator(model, generator,
<ide>
<ide> def predict_generator(model, generator,
<ide> steps=None,
<add> callbacks=None,
<ide> max_queue_size=10,
<ide> workers=1,
<ide> use_multiprocessing=False,
<ide> def predict_generator(model, generator,
<ide> ' `keras.utils.Sequence` class.')
<ide> enqueuer = None
<ide>
<add> # Check if callbacks have not been already configured
<add> if not isinstance(callbacks, cbks.CallbackList):
<add> callbacks = cbks.CallbackList(callbacks)
<add> callback_model = model._get_callback_model()
<add> callbacks.set_model(callback_model)
<add> callback_params = {
<add> 'steps': steps,
<add> 'verbose': verbose,
<add> }
<add> callbacks.set_params(callback_params)
<add>
<add> callbacks.model.stop_training = False
<add> callbacks._call_begin_hook('predict')
<add>
<ide> try:
<ide> if workers > 0:
<ide> if use_sequence_api:
<ide> def predict_generator(model, generator,
<ide> # yields inputs (not targets and sample weights).
<ide> x = generator_output
<ide>
<add> if x is None or len(x) == 0:
<add> # Handle data tensors support when no input given
<add> # step-size = 1 for data tensors
<add> batch_size = 1
<add> elif isinstance(x, list):
<add> batch_size = x[0].shape[0]
<add> elif isinstance(x, dict):
<add> batch_size = list(x.values())[0].shape[0]
<add> else:
<add> batch_size = x.shape[0]
<add> if batch_size == 0:
<add> raise ValueError('Received an empty batch. '
<add> 'Batches should contain '
<add> 'at least one item.')
<add>
<add> batch_logs = {'batch': steps_done, 'size': batch_size}
<add> callbacks._call_batch_hook('predict', 'begin', steps_done, batch_logs)
<add>
<ide> outs = model.predict_on_batch(x)
<ide> outs = to_list(outs)
<ide>
<ide> def predict_generator(model, generator,
<ide>
<ide> for i, out in enumerate(outs):
<ide> all_outs[i].append(out)
<add>
<add> batch_logs['outputs'] = outs
<add> callbacks._call_batch_hook('predict', 'end', steps_done, batch_logs)
<add>
<ide> steps_done += 1
<ide> if verbose == 1:
<ide> progbar.update(steps_done)
<del>
<add> callbacks._call_end_hook('predict')
<ide> finally:
<ide> if enqueuer is not None:
<ide> enqueuer.stop()
<ide><path>tests/keras/test_callbacks.py
<ide> from csv import reader
<ide> from csv import Sniffer
<ide> import shutil
<add>from collections import defaultdict
<ide> from keras import optimizers
<ide> from keras import initializers
<ide> from keras import callbacks
<ide> def get_data_callbacks(num_train=train_samples,
<ide> num_classes=num_classes)
<ide>
<ide>
<add>class Counter(callbacks.Callback):
<add> """Counts the number of times each callback method was run.
<add>
<add> # Arguments
<add> method_counts: dict, contains the counts of time each callback method was
<add> run.
<add> """
<add>
<add> def __init__(self):
<add> self.method_counts = defaultdict(int)
<add> methods_to_count = [
<add> 'on_batch_begin', 'on_batch_end', 'on_epoch_begin', 'on_epoch_end',
<add> 'on_train_batch_begin', 'on_train_batch_end',
<add> 'on_test_batch_begin', 'on_test_batch_end',
<add> 'on_predict_batch_begin', 'on_predict_batch_end',
<add> 'on_train_begin', 'on_train_end', 'on_predict_begin', 'on_predict_end',
<add> 'on_test_begin', 'on_test_end',
<add> ]
<add> for method_name in methods_to_count:
<add> setattr(self, method_name,
<add> self.wrap_with_counts(method_name, getattr(self, method_name)))
<add>
<add> def wrap_with_counts(self, method_name, method):
<add>
<add> def _call_and_count(*args, **kwargs):
<add> self.method_counts[method_name] += 1
<add> return method(*args, **kwargs)
<add>
<add> return _call_and_count
<add>
<add>
<add>class TestCallbackCounts(object):
<add>
<add> def _check_counts(self, counter, expected_counts):
<add> """Checks that the counts registered by `counter` are those expected."""
<add> for method_name, expected_count in expected_counts.items():
<add> count = counter.method_counts[method_name]
<add> assert count == expected_count, \
<add> 'For method {}: expected {}, got: {}'.format(
<add> method_name, expected_count, count)
<add>
<add> def _get_model(self):
<add> layers = [
<add> Dense(10, activation='relu', input_dim=input_dim),
<add> Dense(num_classes, activation='softmax')
<add> ]
<add> model = Sequential(layers=layers)
<add> model.compile(optimizer='adam', loss='binary_crossentropy')
<add> return model
<add>
<add> def test_callback_hooks_are_called_in_fit(self):
<add> np.random.seed(1337)
<add> (X_train, y_train), (X_test, y_test) = get_data_callbacks(num_train=10,
<add> num_test=4)
<add> y_train = np_utils.to_categorical(y_train)
<add> y_test = np_utils.to_categorical(y_test)
<add>
<add> model = self._get_model()
<add> counter = Counter()
<add> model.fit(X_train, y_train, validation_data=(X_test, y_test),
<add> batch_size=2, epochs=5, callbacks=[counter])
<add>
<add> self._check_counts(
<add> counter, {
<add> 'on_batch_begin': 25,
<add> 'on_batch_end': 25,
<add> 'on_epoch_begin': 5,
<add> 'on_epoch_end': 5,
<add> 'on_predict_batch_begin': 0,
<add> 'on_predict_batch_end': 0,
<add> 'on_predict_begin': 0,
<add> 'on_predict_end': 0,
<add> 'on_test_batch_begin': 10,
<add> 'on_test_batch_end': 10,
<add> 'on_test_begin': 5,
<add> 'on_test_end': 5,
<add> 'on_train_batch_begin': 25,
<add> 'on_train_batch_end': 25,
<add> 'on_train_begin': 1,
<add> 'on_train_end': 1,
<add> })
<add>
<add> def test_callback_hooks_are_called_in_evaluate(self):
<add> np.random.seed(1337)
<add> (_, _), (X_test, y_test) = get_data_callbacks(num_test=10)
<add>
<add> y_test = np_utils.to_categorical(y_test)
<add>
<add> model = self._get_model()
<add> counter = Counter()
<add> model.evaluate(X_test, y_test, batch_size=2, callbacks=[counter])
<add> self._check_counts(
<add> counter, {
<add> 'on_test_batch_begin': 5,
<add> 'on_test_batch_end': 5,
<add> 'on_test_begin': 1,
<add> 'on_test_end': 1,
<add> 'on_batch_begin': 0,
<add> 'on_batch_end': 0,
<add> 'on_epoch_begin': 0,
<add> 'on_epoch_end': 0,
<add> 'on_predict_batch_begin': 0,
<add> 'on_predict_batch_end': 0,
<add> 'on_predict_begin': 0,
<add> 'on_predict_end': 0,
<add> 'on_train_batch_begin': 0,
<add> 'on_train_batch_end': 0,
<add> 'on_train_begin': 0,
<add> 'on_train_end': 0,
<add> })
<add>
<add> def test_callback_hooks_are_called_in_predict(self):
<add> np.random.seed(1337)
<add> (_, _), (X_test, _) = get_data_callbacks(num_test=10)
<add>
<add> model = self._get_model()
<add> counter = Counter()
<add> model.predict(X_test, batch_size=2, callbacks=[counter])
<add> self._check_counts(
<add> counter, {
<add> 'on_predict_batch_begin': 5,
<add> 'on_predict_batch_end': 5,
<add> 'on_predict_begin': 1,
<add> 'on_predict_end': 1,
<add> 'on_batch_begin': 0,
<add> 'on_batch_end': 0,
<add> 'on_epoch_begin': 0,
<add> 'on_epoch_end': 0,
<add> 'on_test_batch_begin': 0,
<add> 'on_test_batch_end': 0,
<add> 'on_test_begin': 0,
<add> 'on_test_end': 0,
<add> 'on_train_batch_begin': 0,
<add> 'on_train_batch_end': 0,
<add> 'on_train_begin': 0,
<add> 'on_train_end': 0,
<add> })
<add>
<add> def test_callback_hooks_are_called_in_fit_generator(self):
<add> np.random.seed(1337)
<add> (X_train, y_train), (X_test, y_test) = get_data_callbacks(num_train=10,
<add> num_test=4)
<add> y_train = np_utils.to_categorical(y_train)
<add> y_test = np_utils.to_categorical(y_test)
<add> train_generator = data_generator(X_train, y_train, batch_size=2)
<add> validation_generator = data_generator(X_test, y_test, batch_size=2)
<add>
<add> model = self._get_model()
<add> counter = Counter()
<add> model.fit_generator(train_generator, steps_per_epoch=len(X_train) // 2,
<add> epochs=5, validation_data=validation_generator,
<add> validation_steps=len(X_test) // 2, callbacks=[counter])
<add>
<add> self._check_counts(
<add> counter, {
<add> 'on_batch_begin': 25,
<add> 'on_batch_end': 25,
<add> 'on_epoch_begin': 5,
<add> 'on_epoch_end': 5,
<add> 'on_predict_batch_begin': 0,
<add> 'on_predict_batch_end': 0,
<add> 'on_predict_begin': 0,
<add> 'on_predict_end': 0,
<add> 'on_test_batch_begin': 10,
<add> 'on_test_batch_end': 10,
<add> 'on_test_begin': 5,
<add> 'on_test_end': 5,
<add> 'on_train_batch_begin': 25,
<add> 'on_train_batch_end': 25,
<add> 'on_train_begin': 1,
<add> 'on_train_end': 1,
<add> })
<add>
<add> def test_callback_hooks_are_called_in_evaluate_generator(self):
<add> np.random.seed(1337)
<add> (_, _), (X_test, y_test) = get_data_callbacks(num_test=10)
<add> y_test = np_utils.to_categorical(y_test)
<add>
<add> model = self._get_model()
<add> counter = Counter()
<add> model.evaluate_generator(data_generator(X_test, y_test, batch_size=2),
<add> steps=len(X_test) // 2, callbacks=[counter])
<add> self._check_counts(
<add> counter, {
<add> 'on_test_batch_begin': 5,
<add> 'on_test_batch_end': 5,
<add> 'on_test_begin': 1,
<add> 'on_test_end': 1,
<add> 'on_batch_begin': 0,
<add> 'on_batch_end': 0,
<add> 'on_epoch_begin': 0,
<add> 'on_epoch_end': 0,
<add> 'on_predict_batch_begin': 0,
<add> 'on_predict_batch_end': 0,
<add> 'on_predict_begin': 0,
<add> 'on_predict_end': 0,
<add> 'on_train_batch_begin': 0,
<add> 'on_train_batch_end': 0,
<add> 'on_train_begin': 0,
<add> 'on_train_end': 0,
<add> })
<add>
<add> def test_callback_hooks_are_called_in_predict_generator(self):
<add> np.random.seed(1337)
<add> (_, _), (X_test, _) = get_data_callbacks(num_test=10)
<add>
<add> def data_generator(x, batch_size):
<add> x = to_list(x)
<add> max_batch_index = len(x[0]) // batch_size
<add> i = 0
<add> while 1:
<add> x_batch = [
<add> array[i * batch_size: (i + 1) * batch_size] for array in x]
<add> x_batch = unpack_singleton(x_batch)
<add>
<add> yield x_batch
<add> i += 1
<add> i = i % max_batch_index
<add>
<add> model = self._get_model()
<add> counter = Counter()
<add> model.predict_generator(data_generator(X_test, batch_size=2),
<add> steps=len(X_test) // 2, callbacks=[counter])
<add> self._check_counts(
<add> counter, {
<add> 'on_predict_batch_begin': 5,
<add> 'on_predict_batch_end': 5,
<add> 'on_predict_begin': 1,
<add> 'on_predict_end': 1,
<add> 'on_batch_begin': 0,
<add> 'on_batch_end': 0,
<add> 'on_epoch_begin': 0,
<add> 'on_epoch_end': 0,
<add> 'on_test_batch_begin': 0,
<add> 'on_test_batch_end': 0,
<add> 'on_test_begin': 0,
<add> 'on_test_end': 0,
<add> 'on_train_batch_begin': 0,
<add> 'on_train_batch_end': 0,
<add> 'on_train_begin': 0,
<add> 'on_train_end': 0,
<add> })
<add>
<add> def test_callback_list_methods(self):
<add> counter = Counter()
<add> callback_list = callbacks.CallbackList([counter])
<add>
<add> batch = 0
<add> callback_list.on_test_batch_begin(batch)
<add> callback_list.on_test_batch_end(batch)
<add> callback_list.on_predict_batch_begin(batch)
<add> callback_list.on_predict_batch_end(batch)
<add>
<add> self._check_counts(
<add> counter, {
<add> 'on_test_batch_begin': 1,
<add> 'on_test_batch_end': 1,
<add> 'on_predict_batch_begin': 1,
<add> 'on_predict_batch_end': 1,
<add> 'on_predict_begin': 0,
<add> 'on_predict_end': 0,
<add> 'on_batch_begin': 0,
<add> 'on_batch_end': 0,
<add> 'on_epoch_begin': 0,
<add> 'on_epoch_end': 0,
<add> 'on_test_begin': 0,
<add> 'on_test_end': 0,
<add> 'on_train_batch_begin': 0,
<add> 'on_train_batch_end': 0,
<add> 'on_train_begin': 0,
<add> 'on_train_end': 0,
<add> })
<add>
<add>
<ide> def test_TerminateOnNaN():
<ide> np.random.seed(1337)
<ide> (X_train, y_train), (X_test, y_test) = get_data_callbacks() | 5 |
Python | Python | remove last references to `imports85` | d041e56954bde8eadfdddd6c2a697601bc27affb | <ide><path>samples/cookbook/regression/automobile_data.py
<ide> # See the License for the specific language governing permissions and
<ide> # limitations under the License.
<ide> # ==============================================================================
<del>"""A dataset loader for imports85.data."""
<add>"""Utility functions for loading the automobile data set."""
<ide>
<ide> from __future__ import absolute_import
<ide> from __future__ import division
<ide> import collections
<ide>
<ide> import numpy as np
<add>import pandas as pd
<ide> import tensorflow as tf
<ide>
<del>try:
<del> import pandas as pd # pylint: disable=g-import-not-at-top
<del>except ImportError:
<del> pass
<del>
<del>
<ide> URL = "https://archive.ics.uci.edu/ml/machine-learning-databases/autos/imports-85.data"
<ide>
<ide> # Order is important for the csv-readers, so we use an OrderedDict here.
<ide>
<ide>
<ide> def raw_dataframe():
<del> """Load the imports85 data as a pd.DataFrame."""
<add> """Load the automobile data set as a pd.DataFrame."""
<ide> # Download and cache the data
<ide> path = tf.keras.utils.get_file(URL.split("/")[-1], URL)
<ide>
<del> # Load it into a pandas dataframe
<add> # Load it into a pandas DataFrame
<ide> df = pd.read_csv(path, names=COLUMN_TYPES.keys(),
<ide> dtype=COLUMN_TYPES, na_values="?")
<ide>
<ide> return df
<ide>
<ide>
<ide> def load_data(y_name="price", train_fraction=0.7, seed=None):
<del> """Get the imports85 data set.
<add> """Load the automobile data set and split it train/test and features/label.
<ide>
<ide> A description of the data is available at:
<ide> https://archive.ics.uci.edu/ml/datasets/automobile
<ide> def load_data(y_name="price", train_fraction=0.7, seed=None):
<ide>
<ide> Args:
<ide> y_name: the column to return as the label.
<del> train_fraction: the fraction of the dataset to use for training.
<add> train_fraction: the fraction of the data set to use for training.
<ide> seed: The random seed to use when shuffling the data. `None` generates a
<ide> unique shuffle every run.
<ide> Returns:
<ide> a pair of pairs where the first pair is the training data, and the second
<ide> is the test data:
<del> `(x_train, y_train), (x_test, y_test) = get_imports85_dataset(...)`
<add> `(x_train, y_train), (x_test, y_test) = load_data(...)`
<ide> `x` contains a pandas DataFrame of features, while `y` contains the label
<ide> array.
<ide> """
<ide> def load_data(y_name="price", train_fraction=0.7, seed=None):
<ide> x_train = data.sample(frac=train_fraction, random_state=seed)
<ide> x_test = data.drop(x_train.index)
<ide>
<del> # Extract the label from the features dataframe.
<add> # Extract the label from the features DataFrame.
<ide> y_train = x_train.pop(y_name)
<ide> y_test = x_test.pop(y_name)
<ide>
<ide> return (x_train, y_train), (x_test, y_test)
<ide>
<ide> def make_dataset(x, y=None):
<del> """Create a slice dataset from a pandas DataFrame and labels"""
<add> """Create a slice Dataset from a pandas DataFrame and labels"""
<ide> # TODO(markdaooust): simplify this after the 1.4 cut.
<ide> # Convert the DataFrame to a dict
<ide> x = dict(x) | 1 |
Java | Java | measure time to create reactinstancemanager | 6224ef5301d67266b28c77e5e46816f319122f38 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/ReactNativeHost.java
<ide>
<ide> import com.facebook.infer.annotation.Assertions;
<ide> import com.facebook.react.bridge.JavaScriptExecutorFactory;
<add>import com.facebook.react.bridge.ReactMarker;
<add>import com.facebook.react.bridge.ReactMarkerConstants;
<ide> import com.facebook.react.common.LifecycleState;
<ide> import com.facebook.react.devsupport.RedBoxHandler;
<ide> import com.facebook.react.uimanager.UIImplementationProvider;
<ide> protected ReactNativeHost(Application application) {
<ide> */
<ide> public ReactInstanceManager getReactInstanceManager() {
<ide> if (mReactInstanceManager == null) {
<add> ReactMarker.logMarker(ReactMarkerConstants.GET_REACT_INSTANCE_MANAGER_START);
<ide> mReactInstanceManager = createReactInstanceManager();
<add> ReactMarker.logMarker(ReactMarkerConstants.GET_REACT_INSTANCE_MANAGER_END);
<ide> }
<ide> return mReactInstanceManager;
<ide> }
<ide> public void clear() {
<ide> }
<ide>
<ide> protected ReactInstanceManager createReactInstanceManager() {
<add> ReactMarker.logMarker(ReactMarkerConstants.BUILD_REACT_INSTANCE_MANAGER_START);
<ide> ReactInstanceManagerBuilder builder = ReactInstanceManager.builder()
<ide> .setApplication(mApplication)
<ide> .setJSMainModulePath(getJSMainModuleName())
<ide> protected ReactInstanceManager createReactInstanceManager() {
<ide> } else {
<ide> builder.setBundleAssetName(Assertions.assertNotNull(getBundleAssetName()));
<ide> }
<del> return builder.build();
<add> ReactInstanceManager reactInstanceManager = builder.build();
<add> ReactMarker.logMarker(ReactMarkerConstants.BUILD_REACT_INSTANCE_MANAGER_END);
<add> return reactInstanceManager;
<ide> }
<ide>
<ide> /** | 1 |
Ruby | Ruby | explain method invertible_for [ci skip] | d4dad040c39495f30b097f7178d13c40c2e19847 | <ide><path>activerecord/lib/active_record/associations/association.rb
<ide> def inverse_reflection_for(record)
<ide> reflection.inverse_of
<ide> end
<ide>
<del> # Is this association invertible? Can be redefined by subclasses.
<add> # Returns true if inverse association on the given record needs to be set.
<add> # This method is redefined by subclasses.
<ide> def invertible_for?(record)
<ide> inverse_reflection_for(record)
<ide> end | 1 |
PHP | PHP | add comment to many to many | 4b43571517918f58d76a64476c7ddbdc3995f849 | <ide><path>src/Illuminate/Database/Eloquent/Model.php
<ide> public function morphMany($related, $name, $type = null, $id = null)
<ide> {
<ide> $instance = new $related;
<ide>
<add> // Here we will gather up the morph type and ID for the relationship so that we
<add> // can properly query the intermediate table of a relation. Finally, we will
<add> // get the table and create the relationship instances for the developers.
<ide> list($type, $id) = $this->getMorphs($name, $type, $id);
<ide>
<ide> $table = $instance->getTable(); | 1 |
Python | Python | parametrize pep3118 scalar tests | 53bb95d65c874e4d5f9d7735460a0d7a07024de5 | <ide><path>numpy/core/tests/test_scalarbuffer.py
<ide> (np.cdouble, 'Zd'),
<ide> (np.clongdouble, 'Zg'),
<ide> ]
<add>scalars_only, codes_only = zip(*scalars_and_codes)
<ide>
<ide>
<ide> @pytest.mark.skipif(sys.version_info.major < 3,
<ide> reason="Python 2 scalars lack a buffer interface")
<ide> class TestScalarPEP3118(object):
<ide>
<del> def test_scalar_match_array(self):
<del> for scalar, _ in scalars_and_codes:
<del> x = scalar()
<del> a = np.array([], dtype=np.dtype(scalar))
<del> mv_x = memoryview(x)
<del> mv_a = memoryview(a)
<del> assert_equal(mv_x.format, mv_a.format)
<add> @pytest.mark.parametrize('scalar', scalars_only, ids=codes_only)
<add> def test_scalar_match_array(self, scalar):
<add> x = scalar()
<add> a = np.array([], dtype=np.dtype(scalar))
<add> mv_x = memoryview(x)
<add> mv_a = memoryview(a)
<add> assert_equal(mv_x.format, mv_a.format)
<ide>
<del> def test_scalar_dim(self):
<del> for scalar, _ in scalars_and_codes:
<del> x = scalar()
<del> mv_x = memoryview(x)
<del> assert_equal(mv_x.itemsize, np.dtype(scalar).itemsize)
<del> assert_equal(mv_x.ndim, 0)
<del> assert_equal(mv_x.shape, ())
<del> assert_equal(mv_x.strides, ())
<del> assert_equal(mv_x.suboffsets, ())
<add> @pytest.mark.parametrize('scalar', scalars_only, ids=codes_only)
<add> def test_scalar_dim(self, scalar):
<add> x = scalar()
<add> mv_x = memoryview(x)
<add> assert_equal(mv_x.itemsize, np.dtype(scalar).itemsize)
<add> assert_equal(mv_x.ndim, 0)
<add> assert_equal(mv_x.shape, ())
<add> assert_equal(mv_x.strides, ())
<add> assert_equal(mv_x.suboffsets, ())
<ide>
<del> def test_scalar_known_code(self):
<del> for scalar, code in scalars_and_codes:
<del> x = scalar()
<del> mv_x = memoryview(x)
<del> assert_equal(mv_x.format, code)
<add> @pytest.mark.parametrize('scalar, code', scalars_and_codes, ids=codes_only)
<add> def test_scalar_known_code(self, scalar, code):
<add> x = scalar()
<add> mv_x = memoryview(x)
<add> assert_equal(mv_x.format, code)
<ide>
<ide> def test_void_scalar_structured_data(self):
<ide> dt = np.dtype([('name', np.unicode_, 16), ('grades', np.float64, (2,))]) | 1 |
Javascript | Javascript | touch instrumentation interface | 3fb11eed9a166ebab23b778662f54c449fcffad0 | <ide><path>packages/react-native-renderer/src/legacy-events/ResponderTouchHistoryStore.js
<ide> function printTouchBank(): string {
<ide> return printed;
<ide> }
<ide>
<add>let instrumentationCallback: ?(string, TouchEvent) => void;
<add>
<ide> const ResponderTouchHistoryStore = {
<add> /**
<add> * Registers a listener which can be used to instrument every touch event.
<add> */
<add> instrument(callback: (string, TouchEvent) => void): void {
<add> instrumentationCallback = callback;
<add> },
<add>
<ide> recordTouchTrack(topLevelType: string, nativeEvent: TouchEvent): void {
<add> if (instrumentationCallback != null) {
<add> instrumentationCallback(topLevelType, nativeEvent);
<add> }
<add>
<ide> if (isMoveish(topLevelType)) {
<ide> nativeEvent.changedTouches.forEach(recordTouchMove);
<ide> } else if (isStartish(topLevelType)) { | 1 |
Javascript | Javascript | add tests for default source map extensions | a2cc0313abfb6f960a1a955e08b1777d50037b4b | <ide><path>test/ConfigTestCases.test.js
<ide> describe("ConfigTestCases", () => {
<ide> });
<ide> let testConfig = {
<ide> findBundle: function(i, options) {
<add> const ext = path.extname(options.output.filename);
<ide> if (
<ide> fs.existsSync(
<del> path.join(options.output.path, "bundle" + i + ".js")
<add> path.join(options.output.path, "bundle" + i + ext)
<ide> )
<ide> ) {
<del> return "./bundle" + i + ".js";
<add> return "./bundle" + i + ext;
<ide> }
<ide> },
<ide> timeout: 30000
<ide><path>test/configCases/source-map/default-filename-extensions-css/index.js
<add>it("creates source maps for .css output files by default", function() {
<add> var fs = require("fs");
<add> var source = fs.readFileSync(__filename, "utf-8");
<add> var match = /sourceMappingURL\s*=\s*(.*)\*\//.exec(source);
<add> expect(match[1]).toBe("bundle0.css.map");
<add>});
<ide>\ No newline at end of file
<ide><path>test/configCases/source-map/default-filename-extensions-css/test.js
<add>var foo = {};
<add>
<add>module.exports = foo;
<ide>\ No newline at end of file
<ide><path>test/configCases/source-map/default-filename-extensions-css/webpack.config.js
<add>module.exports = {
<add> mode: "development",
<add> output: {
<add> filename: "bundle0.css"
<add> },
<add> node: {
<add> __dirname: false,
<add> __filename: false
<add> },
<add> devtool: "source-map"
<add>};
<add>
<ide><path>test/configCases/source-map/default-filename-extensions-js/index.js
<add>it("creates source maps for .js output files by default", function() {
<add> var fs = require("fs");
<add> var source = fs.readFileSync(__filename, "utf-8");
<add> var match = /sourceMappingURL\s*=\s*(.*)/.exec(source);
<add> expect(match[1]).toBe("bundle0.js.map");
<add>});
<ide>\ No newline at end of file
<ide><path>test/configCases/source-map/default-filename-extensions-js/test.js
<add>var foo = {};
<add>
<add>module.exports = foo;
<ide>\ No newline at end of file
<ide><path>test/configCases/source-map/default-filename-extensions-js/webpack.config.js
<add>module.exports = {
<add> mode: "development",
<add> output: {
<add> filename: "bundle0.js"
<add> },
<add> node: {
<add> __dirname: false,
<add> __filename: false
<add> },
<add> devtool: "source-map"
<add>};
<add>
<ide><path>test/configCases/source-map/default-filename-extensions-mjs/index.js
<add>it("creates source maps for .mjs output files by default", function() {
<add> var fs = require("fs");
<add> var source = fs.readFileSync(__filename, "utf-8");
<add> var match = /sourceMappingURL\s*=\s*(.*)/.exec(source);
<add> expect(match[1]).toBe("bundle0.mjs.map");
<add>});
<ide><path>test/configCases/source-map/default-filename-extensions-mjs/test.js
<add>var foo = {};
<add>
<add>module.exports = foo;
<ide>\ No newline at end of file
<ide><path>test/configCases/source-map/default-filename-extensions-mjs/webpack.config.js
<add>module.exports = {
<add> mode: "development",
<add> output: {
<add> filename: "bundle0.mjs"
<add> },
<add> node: {
<add> __dirname: false,
<add> __filename: false
<add> },
<add> devtool: "source-map"
<add>};
<add> | 10 |
Javascript | Javascript | add degreestoradians / radianstodegrees to math | b11e805560f6f10de27ee46c21505ca22fb31f0c | <ide><path>src/math/Math.js
<ide> THREE.Math = {
<ide>
<ide> }
<ide>
<add> degreesToRadians: function( degrees ) {
<add>
<add> return degrees * THREE.Math.__d2r;
<add>
<add> };
<add>
<add> radiansToDegrees: function( radians ) {
<add>
<add> return radians * THREE.Math.__r2d;
<add>
<add> };
<add>
<add>
<ide> };
<add>
<add>THREE.Math.__d2r = Math.PI / 180;
<add>THREE.Math.__r2d = 180 / Math.PI; | 1 |
Ruby | Ruby | use symbols for spec iteration | 675e80e9aedb104c9a1814d8c1450d8debb3147b | <ide><path>Library/Homebrew/formula_auditor.rb
<ide> def audit_homepage
<ide>
<ide> return unless DevelopmentTools.curl_handles_most_https_certificates?
<ide>
<del> use_homebrew_curl = false
<del> %w[Stable HEAD].each do |name|
<del> spec_name = name.downcase.to_sym
<del> next unless (spec = formula.send(spec_name))
<add> use_homebrew_curl = [:stable, :head].any? do |spec_name|
<add> next false unless (spec = formula.send(spec_name))
<ide>
<del> use_homebrew_curl = spec.using == :homebrew_curl
<del> break if use_homebrew_curl
<add> spec.using == :homebrew_curl
<ide> end
<ide>
<ide> if (http_content_problem = curl_check_http_content(homepage, | 1 |
PHP | PHP | fix example code | 9e9320976fe16412c3bda142775715ec2d6c131e | <ide><path>src/Routing/Router.php
<ide> public static function reload()
<ide> *
<ide> * ```
<ide> * Router::addUrlFilter(function ($params, $request) {
<del> * if (isset($request->params['lang']) && !isset($params['lang']) {
<add> * if (isset($request->params['lang']) && !isset($params['lang'])) {
<ide> * $params['lang'] = $request->params['lang'];
<ide> * }
<ide> * return $params; | 1 |
Ruby | Ruby | remove redundant dep for meson | f3e15955b3cc698b532b4cbd0c9cd92da49ab2b6 | <ide><path>Library/Homebrew/formula_creator.rb
<ide> class #{Formulary.class_s(name)} < Formula
<ide> <% end %>
<ide> sha256 "#{sha256}"
<ide> <% end %>
<add>
<ide> <% if mode == :cmake %>
<ide> depends_on "cmake" => :build
<ide> <% elsif mode == :meson %>
<ide> depends_on "meson" => :build
<ide> depends_on "ninja" => :build
<del> depends_on "python" => :build
<ide> <% elsif mode.nil? %>
<ide> # depends_on "cmake" => :build
<ide> <% end %> | 1 |
Java | Java | fix error messages to match commit #1a9556 | b651c10e83e6876bffa54d057ec15bc2f630c017 | <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/mvc/support/DefaultHandlerExceptionResolverTests.java
<ide> /*
<del> * Copyright 2002-2019 the original author or authors.
<add> * Copyright 2002-2021 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> public void handleMissingPathVariable() throws NoSuchMethodException {
<ide> assertThat(mav).as("No ModelAndView returned").isNotNull();
<ide> assertThat(mav.isEmpty()).as("No Empty ModelAndView returned").isTrue();
<ide> assertThat(response.getStatus()).as("Invalid status code").isEqualTo(500);
<del> assertThat(response.getErrorMessage()).isEqualTo("Missing URI template variable 'foo' for method parameter of type String");
<add> assertThat(response.getErrorMessage())
<add> .isEqualTo("Required URI template variable 'foo' for method parameter type String is not present");
<ide> }
<ide>
<ide> @Test
<ide> public void handleMissingServletRequestParameter() {
<ide> assertThat(mav).as("No ModelAndView returned").isNotNull();
<ide> assertThat(mav.isEmpty()).as("No Empty ModelAndView returned").isTrue();
<ide> assertThat(response.getStatus()).as("Invalid status code").isEqualTo(400);
<del> assertThat(response.getErrorMessage()).isEqualTo("Required bar parameter 'foo' is not present");
<add> assertThat(response.getErrorMessage()).isEqualTo(
<add> "Required request parameter 'foo' for method parameter type bar is not present");
<ide> }
<ide>
<ide> @Test | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.