content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
|---|---|---|---|---|---|
Javascript
|
Javascript
|
match the ordering of the gitrepository spec
|
e867b68eab9b691f91cecb401f320cf811458c1d
|
<ide><path>spec/git-repository-async-spec.js
<ide> describe('GitRepositoryAsync', () => {
<ide> })
<ide> })
<ide>
<del> describe('.getPathStatus(path)', () => {
<del> let filePath
<del>
<del> beforeEach(() => {
<del> const workingDirectory = copyRepository()
<del> repo = GitRepositoryAsync.open(workingDirectory)
<del> filePath = path.join(workingDirectory, 'file.txt')
<del> })
<del>
<del> it('trigger a status-changed event when the new status differs from the last cached one', async () => {
<del> const statusHandler = jasmine.createSpy('statusHandler')
<del> repo.onDidChangeStatus(statusHandler)
<del> fs.writeFileSync(filePath, '')
<del>
<del> await repo.getPathStatus(filePath)
<del>
<del> expect(statusHandler.callCount).toBe(1)
<del> const status = Git.Status.STATUS.WT_MODIFIED
<del> expect(statusHandler.argsForCall[0][0]).toEqual({path: filePath, pathStatus: status})
<del> fs.writeFileSync(filePath, 'abc')
<del>
<del> await repo.getPathStatus(filePath)
<del> expect(statusHandler.callCount).toBe(1)
<del> })
<del> })
<del>
<ide> describe('.checkoutHeadForEditor(editor)', () => {
<ide> let filePath
<ide> let editor
<ide> describe('GitRepositoryAsync', () => {
<ide> })
<ide> })
<ide>
<add> describe('.getPathStatus(path)', () => {
<add> let filePath
<add>
<add> beforeEach(() => {
<add> const workingDirectory = copyRepository()
<add> repo = GitRepositoryAsync.open(workingDirectory)
<add> filePath = path.join(workingDirectory, 'file.txt')
<add> })
<add>
<add> it('trigger a status-changed event when the new status differs from the last cached one', async () => {
<add> const statusHandler = jasmine.createSpy('statusHandler')
<add> repo.onDidChangeStatus(statusHandler)
<add> fs.writeFileSync(filePath, '')
<add>
<add> await repo.getPathStatus(filePath)
<add>
<add> expect(statusHandler.callCount).toBe(1)
<add> const status = Git.Status.STATUS.WT_MODIFIED
<add> expect(statusHandler.argsForCall[0][0]).toEqual({path: filePath, pathStatus: status})
<add> fs.writeFileSync(filePath, 'abc')
<add>
<add> await repo.getPathStatus(filePath)
<add> expect(statusHandler.callCount).toBe(1)
<add> })
<add> })
<add>
<ide> describe('.getDirectoryStatus(path)', () => {
<ide> let directoryPath, filePath
<ide>
| 1
|
Ruby
|
Ruby
|
add doctor check for xattr
|
caf7df5840d9507ff99fcf8385ffa1f42ada6e2d
|
<ide><path>Library/Homebrew/cask/cmd/doctor.rb
<ide> def summary_header
<ide>
<ide> def run
<ide> check_software_versions
<add> check_xattr
<ide> check_quarantine_support
<ide> check_install_location
<ide> check_staging_location
<ide> def check_environment_variables
<ide> (locale_variables + environment_variables).sort.each(&method(:render_env_var))
<ide> end
<ide>
<add> def check_xattr
<add> ohai "xattr issues"
<add> result = system_command "/usr/bin/xattr"
<add>
<add> if result.status.success?
<add> puts none_string
<add> elsif result.stderr.match? "ImportError: No module named pkg_resources"
<add> result = system_command "/usr/bin/python", "--version"
<add>
<add> if result.stdout.match? "Python 2.7"
<add> add_error "Your Python installation has a broken version of setuptools."
<add> add_error "To fix, reinstall macOS or run 'sudo /usr/bin/python -m pip install -I setuptools'."
<add> else
<add> add_error "The system Python version is wrong."
<add> add_error "To fix, run 'defaults write com.apple.versioner.python Version 2.7'."
<add> end
<add> elsif result.stderr.match? "pkg_resources.DistributionNotFound"
<add> add_error "Your Python installation is unable to find xattr."
<add> else
<add> add_error "unknown xattr error: #{result.stderr.first}"
<add> end
<add> end
<add>
<ide> def check_quarantine_support
<ide> ohai "Gatekeeper support"
<ide>
| 1
|
Python
|
Python
|
ensure order of project urls
|
5c8110de25f08bf20e9fda6611403dc5c59ec849
|
<ide><path>setup.py
<ide> # -*- coding: utf-8 -*-
<ide> import io
<ide> import re
<add>from collections import OrderedDict
<add>
<ide> from setuptools import setup
<ide>
<ide> with io.open('README.rst', 'rt', encoding='utf8') as f:
<ide> name='Flask',
<ide> version=version,
<ide> url='https://www.palletsprojects.com/p/flask/',
<add> project_urls=OrderedDict((
<add> ('Documentation', 'http://flask.pocoo.org/docs/'),
<add> ('Code', 'https://github.com/pallets/pallets-sphinx-themes'),
<add> ('Issue tracker', 'https://github.com/pallets/pallets-sphinx-themes/issues'),
<add> )),
<ide> license='BSD',
<ide> author='Armin Ronacher',
<ide> author_email='armin.ronacher@active-4.com',
<ide> include_package_data=True,
<ide> zip_safe=False,
<ide> platforms='any',
<del> project_urls={
<del> 'Bug Tracker': 'https://github.com/pallets/flask/issues',
<del> 'Documentation': 'http://flask.pocoo.org/',
<del> 'Source Code': 'https://github.com/pallets/flask',
<del> },
<ide> install_requires=[
<ide> 'Werkzeug>=0.14',
<ide> 'Jinja2>=2.10',
| 1
|
Ruby
|
Ruby
|
add strict mode, argument
|
24f183dd12064b9dfd729fda58dee0938fa078f6
|
<ide><path>Library/Homebrew/cmd/audit.rb
<ide> def audit
<ide> ARGV.formulae
<ide> end
<ide>
<add> strict = ARGV.include? "--strict"
<add>
<ide> ff.each do |f|
<del> fa = FormulaAuditor.new f
<add> fa = FormulaAuditor.new(f, :strict => strict)
<ide> fa.audit
<ide>
<ide> unless fa.problems.empty?
<ide> class FormulaAuditor
<ide> swig
<ide> ]
<ide>
<del> def initialize(formula)
<add> def initialize(formula, options={})
<ide> @formula = formula
<add> @strict = !!options[:strict]
<ide> @problems = []
<ide> @text = FormulaText.new(formula.path)
<ide> @specs = %w{stable devel head}.map { |s| formula.send(s) }.compact
| 1
|
Text
|
Text
|
clarify section titles [ci skip]
|
12c7f8042d5945450d05164a3f333b9a5bea74e6
|
<ide><path>guides/source/routing.md
<ide> NOTE: Because the router uses the HTTP verb and URL to match inbound requests, f
<ide>
<ide> NOTE: Rails routes are matched in the order they are specified, so if you have a `resources :photos` above a `get 'photos/poll'` the `show` action's route for the `resources` line will be matched before the `get` line. To fix this, move the `get` line **above** the `resources` line so that it is matched first.
<ide>
<del>### Paths and URLs
<add>### Path and URL Helpers
<ide>
<ide> Creating a resourceful route will also expose a number of helpers to the controllers in your application. In the case of `resources :photos`:
<ide>
<ide> Inspecting and Testing Routes
<ide>
<ide> Rails offers facilities for inspecting and testing your routes.
<ide>
<del>### Seeing Existing Routes
<add>### Listing Existing Routes
<ide>
<ide> To get a complete list of the available routes in your application, visit `http://localhost:3000/rails/info/routes` in your browser while your server is running in the **development** environment. You can also execute the `rake routes` command in your terminal to produce the same output.
<ide>
| 1
|
Ruby
|
Ruby
|
remove `setuptools` audit
|
bc67c0552876407980c3467f402a8239c90cc3f3
|
<ide><path>Library/Homebrew/rubocops/text.rb
<ide> def audit_formula(node, _class_node, _parent_class_node, body_node)
<ide> problem "Formulae in homebrew/core should use OpenBLAS as the default serial linear algebra library."
<ide> end
<ide>
<del> if method_called_ever?(body_node, :virtualenv_create) ||
<del> method_called_ever?(body_node, :virtualenv_install_with_resources)
<del> find_method_with_args(body_node, :resource, "setuptools") do
<del> problem "Formulae using virtualenvs do not need a `setuptools` resource."
<del> end
<del> end
<del>
<ide> unless method_called_ever?(body_node, :go_resource)
<ide> # processed_source.ast is passed instead of body_node because `require` would be outside body_node
<ide> find_method_with_args(processed_source.ast, :require, "language/go") do
<ide><path>Library/Homebrew/test/rubocops/text_spec.rb
<ide> def install
<ide> RUBY
<ide> end
<ide>
<del> it "reports an offense if formula uses virtualenv and also `setuptools` resource" do
<del> expect_offense(<<~RUBY)
<del> class Foo < Formula
<del> url "https://brew.sh/foo-1.0.tgz"
<del> homepage "https://brew.sh"
<del>
<del> resource "setuptools" do
<del> ^^^^^^^^^^^^^^^^^^^^^ Formulae using virtualenvs do not need a `setuptools` resource.
<del> url "https://foo.com/foo.tar.gz"
<del> sha256 "db0904a28253cfe53e7dedc765c71596f3c53bb8a866ae50123320ec1a7b73fd"
<del> end
<del>
<del> def install
<del> virtualenv_create(libexec)
<del> end
<del> end
<del> RUBY
<del> end
<del>
<ide> it "reports an offense if `Formula.factory(name)` is present" do
<ide> expect_offense(<<~RUBY)
<ide> class Foo < Formula
| 2
|
Ruby
|
Ruby
|
add ability to checkout a branch or tag
|
ef02031d7cd1ef048c53cf940b208a6070451b8d
|
<ide><path>Library/Homebrew/download_strategy.rb
<ide> # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
<ide> #
<ide> class AbstractDownloadStrategy
<del> def initialize url, name, version
<add> def initialize url, name, version, specs
<ide> @url=url
<add> case specs
<add> when Hash
<add> @spec = specs.keys.first # only use first spec
<add> @ref = specs.values.first
<add> else
<add> spec = nil
<add> end
<ide> @unique_token="#{name}-#{version}" unless name.to_s.empty? or name == '__UNKNOWN__'
<ide> end
<ide> end
<ide> def fetch
<ide> end
<ide> end
<ide> def stage
<del> dst=Dir.getwd
<add> dst = Dir.getwd
<ide> Dir.chdir @clone do
<add> if @spec and @ref
<add> ohai "Checking out #{@spec} #{@ref}"
<add> case @spec
<add> when :branch
<add> safe_system 'git', 'checkout', '-b', @ref, "origin/#{@ref}"
<add> when :tag
<add> safe_system 'git', 'checkout', @ref
<add> end
<add> end
<ide> # http://stackoverflow.com/questions/160608/how-to-do-a-git-export-like-svn-export
<ide> safe_system 'git', 'checkout-index', '-af', "--prefix=#{dst}/"
<ide> end
<ide> def fetch
<ide> def stage
<ide> dst=Dir.getwd
<ide> Dir.chdir @clone do
<del> safe_system 'hg', 'archive', '-y', '-t', 'files', dst
<add> if @spec and @ref
<add> ohai "Checking out #{@spec} #{@ref}"
<add> Dir.chdir @clone do
<add> safe_system 'hg', 'archive', '-y', '-r', @ref, '-t', 'files', dst
<add> end
<add> else
<add> safe_system 'hg', 'archive', '-y', '-t', 'files', dst
<add> end
<ide> end
<ide> end
<ide> end
<ide><path>Library/Homebrew/formula.rb
<ide> class Formula
<ide> def initialize name='__UNKNOWN__'
<ide> set_instance_variable 'url'
<ide> set_instance_variable 'head'
<add> set_instance_variable 'specs'
<ide>
<ide> if @head and (not @url or ARGV.flag? '--HEAD')
<ide> @url=@head
<ide> def path
<ide> self.class.path name
<ide> end
<ide>
<del> attr_reader :url, :version, :homepage, :name
<add> attr_reader :url, :version, :homepage, :name, :specs
<ide>
<ide> def bin; prefix+'bin' end
<ide> def sbin; prefix+'sbin' end
<ide> def verify_download_integrity fn
<ide> end
<ide>
<ide> def stage
<del> ds=download_strategy.new url, name, version
<add> ds=download_strategy.new url, name, version, specs
<ide> HOMEBREW_CACHE.mkpath
<ide> dl=ds.fetch
<ide> verify_download_integrity dl if dl.kind_of? Pathname
<ide> def #{attr}(val=nil)
<ide> end
<ide> end
<ide>
<del> attr_rw :url, :version, :homepage, :head, :deps, *CHECKSUM_TYPES
<add> attr_rw :url, :version, :homepage, :specs, :deps, *CHECKSUM_TYPES
<add>
<add> def head val=nil, specs=nil
<add> if specs
<add> @specs = specs
<add> end
<add> val.nil? ? @head : @head = val
<add> end
<ide>
<ide> def depends_on name, *args
<ide> @deps ||= []
| 2
|
PHP
|
PHP
|
fix non-existing property
|
ed70b724bd1d10230465bf5c8ae5453f5be40949
|
<ide><path>types/Database/Eloquent/Collection.php
<ide> assertType('User', $user);
<ide> assertType('int', $int);
<ide>
<del> return $user->id;
<add> return $user->primaryKey;
<ide> }));
<ide> assertType('Illuminate\Database\Eloquent\Collection<int, User>', $collection->unique('string'));
<ide>
<ide><path>types/Support/Collection.php
<ide> assertType('User', $user);
<ide> assertType('int', $int);
<ide>
<del> return $user->id;
<add> return $user->primaryKey;
<ide> }));
<ide> assertType('Illuminate\Support\Collection<string, string>', $collection->make(['string' => 'string'])->unique(function ($stringA, $stringB) {
<ide> assertType('string', $stringA);
<ide> assertType('User', $user);
<ide> assertType('int', $int);
<ide>
<del> return $user->id;
<add> return $user->primaryKey;
<ide> }));
<ide>
<ide> assertType('Illuminate\Support\Collection<int, User>', $collection->values());
<ide><path>types/Support/LazyCollection.php
<ide> assertType('User', $user);
<ide> assertType('int', $int);
<ide>
<del> return $user->id;
<add> return $user->primaryKey;
<ide> }));
<ide> assertType('Illuminate\Support\LazyCollection<string, string>', $collection->make(['string' => 'string'])->unique(function ($stringA, $stringB) {
<ide> assertType('string', $stringA);
<ide> assertType('User', $user);
<ide> assertType('int', $int);
<ide>
<del> return $user->id;
<add> return $user->primaryKey;
<ide> }));
<ide>
<ide> assertType('Illuminate\Support\LazyCollection<int, User>', $collection->values());
| 3
|
Python
|
Python
|
add registry function for reading jsonl
|
43fc7a316d415a0e5ef9fecc02112502928c9fd3
|
<ide><path>spacy/training/corpus.py
<ide> def create_jsonl_reader(
<ide> return JsonlTexts(path, min_length=min_length, max_length=max_length, limit=limit)
<ide>
<ide>
<add>@util.registry.readers("srsly.read_json.v1")
<add>def _read_json(loc: Path):
<add> return srsly.read_json(loc)
<add>
<add>
<ide> def walk_corpus(path: Union[str, Path], file_type) -> List[Path]:
<ide> path = util.ensure_path(path)
<ide> if not path.is_dir() and path.parts[-1].endswith(file_type):
| 1
|
Ruby
|
Ruby
|
convert gpg2requirement test to spec
|
9fc14e663bc52b5e32a4c2d07ed6114b99fcbf54
|
<ide><path>Library/Homebrew/test/gpg2_requirement_spec.rb
<add>require "requirements/gpg2_requirement"
<add>require "fileutils"
<add>
<add>describe GPG2Requirement do
<add> let(:dir) { @dir = Pathname.new(Dir.mktmpdir) }
<add>
<add> after(:each) do
<add> FileUtils.rm_rf dir unless @dir.nil?
<add> end
<add>
<add> describe "#satisfied?" do
<add> it "returns true if GPG2 is installed" do
<add> ENV["PATH"] = dir/"bin"
<add> (dir/"bin/gpg").write <<-EOS.undent
<add> #!/bin/bash
<add> echo 2.0.30
<add> EOS
<add> FileUtils.chmod 0755, dir/"bin/gpg"
<add>
<add> expect(subject).to be_satisfied
<add> end
<add> end
<add>end
<ide><path>Library/Homebrew/test/gpg2_requirement_test.rb
<del>require "testing_env"
<del>require "requirements/gpg2_requirement"
<del>require "fileutils"
<del>
<del>class GPG2RequirementTests < Homebrew::TestCase
<del> def setup
<del> super
<del> @dir = Pathname.new(mktmpdir)
<del> (@dir/"bin/gpg").write <<-EOS.undent
<del> #!/bin/bash
<del> echo 2.0.30
<del> EOS
<del> FileUtils.chmod 0755, @dir/"bin/gpg"
<del> end
<del>
<del> def teardown
<del> FileUtils.rm_rf @dir
<del> super
<del> end
<del>
<del> def test_satisfied
<del> ENV["PATH"] = @dir/"bin"
<del> assert_predicate GPG2Requirement.new, :satisfied?
<del> end
<del>end
| 2
|
PHP
|
PHP
|
keep name as well
|
71fb17e0923b11f5c7f4007c22ac574ffd19e3ca
|
<ide><path>lib/Cake/Error/ExceptionRenderer.php
<ide> protected function _cakeError(CakeException $error) {
<ide> $this->controller->response->statusCode($code);
<ide> $this->controller->set(array(
<ide> 'code' => $code,
<del> 'url' => h($url),
<add> 'mame' => h($error->getMessage()),
<ide> 'message' => h($error->getMessage()),
<add> 'url' => h($url),
<ide> 'error' => $error,
<del> '_serialize' => array('code', 'url', 'message')
<add> '_serialize' => array('code', 'name', 'message', 'url')
<ide> ));
<ide> $this->controller->set($error->getAttributes());
<ide> $this->_outputMessage($this->template);
<ide> public function error400($error) {
<ide> $url = $this->controller->request->here();
<ide> $this->controller->response->statusCode($error->getCode());
<ide> $this->controller->set(array(
<del> 'url' => h($url),
<add> 'name' => h($message),
<ide> 'message' => h($message),
<add> 'url' => h($url),
<ide> 'error' => $error,
<del> '_serialize' => array('url', 'message')
<add> '_serialize' => array('name', 'message', 'url')
<ide> ));
<ide> $this->_outputMessage('error400');
<ide> }
<ide> public function error500($error) {
<ide> $code = ($error->getCode() > 500 && $error->getCode() < 506) ? $error->getCode() : 500;
<ide> $this->controller->response->statusCode($code);
<ide> $this->controller->set(array(
<del> 'url' => h($url),
<add> 'name' => h($message),
<ide> 'message' => h($message),
<add> 'url' => h($url),
<ide> 'error' => $error,
<del> '_serialize' => array('url', 'message')
<add> '_serialize' => array('name', 'message', 'url')
<ide> ));
<ide> $this->_outputMessage('error500');
<ide> }
<ide> public function pdoError(PDOException $error) {
<ide> $this->controller->response->statusCode($code);
<ide> $this->controller->set(array(
<ide> 'code' => $code,
<del> 'url' => h($url),
<add> 'name' => h($error->getMessage()),
<ide> 'message' => h($error->getMessage()),
<add> 'url' => h($url),
<ide> 'error' => $error,
<del> '_serialize' => array('code', 'url', 'message', 'error')
<add> '_serialize' => array('code', 'name', 'message', 'url', 'error')
<ide> ));
<ide> $this->_outputMessage($this->template);
<ide> }
| 1
|
Text
|
Text
|
fix code indentation in serializers.md
|
b3aa512d35b7f3e432bb41bf9de9ac7db7a1ed4c
|
<ide><path>docs/api-guide/serializers.md
<ide> You may wish to specify multiple fields as write-only. Instead of adding each f
<ide> fields = ('email', 'username', 'password')
<ide> write_only_fields = ('password',) # Note: Password field is write-only
<ide>
<del> def restore_object(self, attrs, instance=None):
<del> """
<del> Instantiate a new User instance.
<del> """
<del> assert instance is None, 'Cannot update users with CreateUserSerializer'
<del> user = User(email=attrs['email'], username=attrs['username'])
<del> user.set_password(attrs['password'])
<del> return user
<add> def restore_object(self, attrs, instance=None):
<add> """
<add> Instantiate a new User instance.
<add> """
<add> assert instance is None, 'Cannot update users with CreateUserSerializer'
<add> user = User(email=attrs['email'], username=attrs['username'])
<add> user.set_password(attrs['password'])
<add> return user
<ide>
<ide> ## Specifying fields explicitly
<ide>
| 1
|
Python
|
Python
|
add missing abstract method
|
12ea236fc8c1fbc38a848f369e613f0625ccf956
|
<ide><path>keras/preprocessing/image.py
<ide> def fit(self, x,
<ide>
<ide>
<ide> class Iterator(Sequence):
<del> """Abstract base class for image data iterators.
<add> """Base class for image data iterators.
<add>
<add> Every `Iterator` must implement the `_get_batches_of_transformed_samples`
<add> method.
<ide>
<ide> # Arguments
<ide> n: Integer, total number of samples in the dataset to loop over.
<ide> def __iter__(self):
<ide> def __next__(self, *args, **kwargs):
<ide> return self.next(*args, **kwargs)
<ide>
<add> def _get_batches_of_transformed_samples(self, index_array):
<add> """Gets a batch of transformed samples.
<add>
<add> # Arguments
<add> index_array: array of sample indices to include in batch.
<add>
<add> # Returns
<add> A batch of transformed samples.
<add> """
<add> raise NotImplementedError
<add>
<ide>
<ide> class NumpyArrayIterator(Iterator):
<ide> """Iterator yielding data from a Numpy array.
| 1
|
Javascript
|
Javascript
|
simplify event dispatching
|
216026418cf47787f5393e4db734281ae9b6c5c8
|
<ide><path>src/renderers/dom/client/__tests__/ReactBrowserEventEmitter-test.js
<ide> describe('ReactBrowserEventEmitter', function() {
<ide> expect(idCallOrder[0]).toBe(getID(CHILD));
<ide> });
<ide>
<del> it('should stopPropagation if false is returned, but warn', function() {
<add> it('should not stopPropagation if false is returned', function() {
<ide> ReactBrowserEventEmitter.putListener(
<ide> getID(CHILD),
<ide> ON_CLICK_KEY,
<ide> describe('ReactBrowserEventEmitter', function() {
<ide> );
<ide> spyOn(console, 'error');
<ide> ReactTestUtils.Simulate.click(CHILD);
<del> expect(idCallOrder.length).toBe(1);
<add> expect(idCallOrder.length).toBe(3);
<ide> expect(idCallOrder[0]).toBe(getID(CHILD));
<del> expect(console.error.calls.length).toEqual(1);
<del> expect(console.error.calls[0].args[0]).toBe(
<del> 'Warning: Returning `false` from an event handler is deprecated and ' +
<del> 'will be ignored in a future release. Instead, manually call ' +
<del> 'e.stopPropagation() or e.preventDefault(), as appropriate.'
<del> );
<add> expect(idCallOrder[1]).toBe(getID(PARENT));
<add> expect(idCallOrder[2]).toBe(getID(GRANDPARENT));
<add> expect(console.error.calls.length).toEqual(0);
<ide> });
<ide>
<ide> /**
<ide><path>src/renderers/dom/client/eventPlugins/SimpleEventPlugin.js
<ide>
<ide> var EventConstants = require('EventConstants');
<ide> var EventListener = require('EventListener');
<del>var EventPluginUtils = require('EventPluginUtils');
<ide> var EventPropagators = require('EventPropagators');
<ide> var ReactMount = require('ReactMount');
<ide> var SyntheticClipboardEvent = require('SyntheticClipboardEvent');
<ide> var emptyFunction = require('emptyFunction');
<ide> var getEventCharCode = require('getEventCharCode');
<ide> var invariant = require('invariant');
<ide> var keyOf = require('keyOf');
<del>var warning = require('warning');
<ide>
<ide> var topLevelTypes = EventConstants.topLevelTypes;
<ide>
<ide> var SimpleEventPlugin = {
<ide>
<ide> eventTypes: eventTypes,
<ide>
<del> /**
<del> * Same as the default implementation, except cancels the event when return
<del> * value is false. This behavior will be disabled in a future release.
<del> *
<del> * @param {object} event Event to be dispatched.
<del> * @param {function} listener Application-level callback.
<del> * @param {string} domID DOM ID to pass to the callback.
<del> */
<del> executeDispatch: function(event, listener, domID) {
<del> var returnValue = EventPluginUtils.executeDispatch(event, listener, domID);
<del>
<del> warning(
<del> typeof returnValue !== 'boolean',
<del> 'Returning `false` from an event handler is deprecated and will be ' +
<del> 'ignored in a future release. Instead, manually call ' +
<del> 'e.stopPropagation() or e.preventDefault(), as appropriate.'
<del> );
<del>
<del> if (returnValue === false) {
<del> event.stopPropagation();
<del> event.preventDefault();
<del> }
<del> },
<del>
<ide> /**
<ide> * @param {string} topLevelType Record from `EventConstants`.
<ide> * @param {DOMEventTarget} topLevelTarget The listening component root node.
<ide><path>src/renderers/shared/event/EventPluginHub.js
<ide> var eventQueue = null;
<ide> */
<ide> var executeDispatchesAndRelease = function(event) {
<ide> if (event) {
<del> var executeDispatch = EventPluginUtils.executeDispatch;
<del> // Plugins can provide custom behavior when dispatching events.
<del> var PluginModule = EventPluginRegistry.getPluginModuleForEvent(event);
<del> if (PluginModule && PluginModule.executeDispatch) {
<del> executeDispatch = PluginModule.executeDispatch;
<del> }
<del> EventPluginUtils.executeDispatchesInOrder(event, executeDispatch);
<add> EventPluginUtils.executeDispatchesInOrder(event);
<ide>
<ide> if (!event.isPersistent()) {
<ide> event.constructor.release(event);
<ide><path>src/renderers/shared/event/EventPluginUtils.js
<ide> if (__DEV__) {
<ide> }
<ide>
<ide> /**
<del> * Invokes `cb(event, listener, id)`. Avoids using call if no scope is
<del> * provided. The `(listener,id)` pair effectively forms the "dispatch" but are
<del> * kept separate to conserve memory.
<add> * Dispatch the event to the listener.
<add> * @param {SyntheticEvent} event SyntheticEvent to handle
<add> * @param {function} listener Application-level callback
<add> * @param {string} domID DOM id to pass to the callback.
<add> */
<add>function executeDispatch(event, listener, domID) {
<add> var type = event.type || 'unknown-event';
<add> event.currentTarget = injection.Mount.getNode(domID);
<add> ReactErrorUtils.invokeGuardedCallback(type, listener, event, domID);
<add> event.currentTarget = null;
<add>}
<add>
<add>/**
<add> * Standard/simple iteration through an event's collected dispatches.
<ide> */
<del>function forEachEventDispatch(event, cb) {
<add>function executeDispatchesInOrder(event) {
<ide> var dispatchListeners = event._dispatchListeners;
<ide> var dispatchIDs = event._dispatchIDs;
<ide> if (__DEV__) {
<ide> function forEachEventDispatch(event, cb) {
<ide> break;
<ide> }
<ide> // Listeners and IDs are two parallel arrays that are always in sync.
<del> cb(event, dispatchListeners[i], dispatchIDs[i]);
<add> executeDispatch(event, dispatchListeners[i], dispatchIDs[i]);
<ide> }
<ide> } else if (dispatchListeners) {
<del> cb(event, dispatchListeners, dispatchIDs);
<add> executeDispatch(event, dispatchListeners, dispatchIDs);
<ide> }
<del>}
<del>
<del>/**
<del> * Default implementation of PluginModule.executeDispatch().
<del> * @param {SyntheticEvent} event SyntheticEvent to handle
<del> * @param {function} listener Application-level callback
<del> * @param {string} domID DOM id to pass to the callback.
<del> */
<del>function executeDispatch(event, listener, domID) {
<del> event.currentTarget = injection.Mount.getNode(domID);
<del> var type = event.type || 'unknown-event';
<del> var returnValue =
<del> ReactErrorUtils.invokeGuardedCallback(type, listener, event, domID);
<del> event.currentTarget = null;
<del> return returnValue;
<del>}
<del>
<del>/**
<del> * Standard/simple iteration through an event's collected dispatches.
<del> */
<del>function executeDispatchesInOrder(event, cb) {
<del> forEachEventDispatch(event, cb);
<ide> event._dispatchListeners = null;
<ide> event._dispatchIDs = null;
<ide> }
<ide> var EventPluginUtils = {
<ide> isStartish: isStartish,
<ide>
<ide> executeDirectDispatch: executeDirectDispatch,
<del> executeDispatch: executeDispatch,
<ide> executeDispatchesInOrder: executeDispatchesInOrder,
<ide> executeDispatchesInOrderStopAtTrue: executeDispatchesInOrderStopAtTrue,
<ide> hasDispatches: hasDispatches,
| 4
|
Mixed
|
Javascript
|
bind keystroke `ctrl`+`6` to redo
|
a0668572c7f66c21946074625648fec1a42c060d
|
<ide><path>doc/api/readline.md
<ide> const { createInterface } = require('readline');
<ide> <tr>
<ide> <td><kbd>Ctrl</kbd>+<kbd>-</kbd></td>
<ide> <td>Undo previous change</td>
<del> <td>Any keystroke emits key code <code>0x1F</code> would do this action.</td>
<add> <td>Any keystroke that emits key code <code>0x1F</code> will do this action.
<add> In many terminals, for example <code>xterm</code>,
<add> this is bound to <kbd>Ctrl</kbd>+<kbd>-</kbd>.</td>
<add> <td></td>
<add> </tr>
<add> <tr>
<add> <td><kbd>Ctrl</kbd>+<kbd>6</kbd></td>
<add> <td>Redo previous change</td>
<add> <td>Many terminals don't have a default redo keystroke.
<add> We choose key code <code>0x1E</code> to perform redo.
<add> In <code>xterm</code>, it is bound to <kbd>Ctrl</kbd>+<kbd>6</kbd>
<add> by default.</td>
<ide> <td></td>
<ide> </tr>
<ide> <tr>
<ide><path>lib/internal/readline/interface.js
<ide> class Interface extends InterfaceConstructor {
<ide> [kUndo]() {
<ide> if (this[kUndoStack].length <= 0) return;
<ide>
<del> const entry = this[kUndoStack].pop();
<add> ArrayPrototypePush(
<add> this[kRedoStack],
<add> { text: this.line, cursor: this.cursor },
<add> );
<ide>
<add> const entry = ArrayPrototypePop(this[kUndoStack]);
<ide> this.line = entry.text;
<ide> this.cursor = entry.cursor;
<ide>
<del> ArrayPrototypePush(this[kRedoStack], entry);
<ide> this[kRefreshLine]();
<ide> }
<ide>
<ide> [kRedo]() {
<ide> if (this[kRedoStack].length <= 0) return;
<ide>
<del> const entry = this[kRedoStack].pop();
<add> ArrayPrototypePush(
<add> this[kUndoStack],
<add> { text: this.line, cursor: this.cursor },
<add> );
<ide>
<add> const entry = ArrayPrototypePop(this[kRedoStack]);
<ide> this.line = entry.text;
<ide> this.cursor = entry.cursor;
<ide>
<del> ArrayPrototypePush(this[kUndoStack], entry);
<ide> this[kRefreshLine]();
<ide> }
<ide>
<ide> class Interface extends InterfaceConstructor {
<ide> }
<ide> }
<ide>
<del> // Undo
<del> if (typeof key.sequence === 'string' &&
<del> StringPrototypeCodePointAt(key.sequence, 0) === 0x1f) {
<del> this[kUndo]();
<del> return;
<add> // Undo & Redo
<add> if (typeof key.sequence === 'string') {
<add> switch (StringPrototypeCodePointAt(key.sequence, 0)) {
<add> case 0x1f:
<add> this[kUndo]();
<add> return;
<add> case 0x1e:
<add> this[kRedo]();
<add> return;
<add> default:
<add> break;
<add> }
<ide> }
<ide>
<ide> // Ignore escape key, fixes
<ide><path>test/parallel/test-readline-interface.js
<ide> function assertCursorRowsAndCols(rli, rows, cols) {
<ide> rli.close();
<ide> }
<ide>
<del>// Undo
<add>// Undo & Redo
<ide> {
<ide> const [rli, fi] = getInterface({ terminal: true, prompt: '' });
<ide> fi.emit('data', 'the quick brown fox');
<ide> assertCursorRowsAndCols(rli, 0, 19);
<ide>
<del> // Delete right line from the 5th char
<add> // Delete the last eight chars
<ide> fi.emit('keypress', '.', { ctrl: true, shift: false, name: 'b' });
<ide> fi.emit('keypress', '.', { ctrl: true, shift: false, name: 'b' });
<ide> fi.emit('keypress', '.', { ctrl: true, shift: false, name: 'b' });
<ide> fi.emit('keypress', '.', { ctrl: true, shift: false, name: 'b' });
<ide> fi.emit('keypress', ',', { ctrl: true, shift: false, name: 'k' });
<del> fi.emit('keypress', ',', { ctrl: true, shift: false, name: 'u' });
<del> assertCursorRowsAndCols(rli, 0, 0);
<add>
<add> fi.emit('keypress', '.', { ctrl: true, shift: false, name: 'b' });
<add> fi.emit('keypress', '.', { ctrl: true, shift: false, name: 'b' });
<add> fi.emit('keypress', '.', { ctrl: true, shift: false, name: 'b' });
<add> fi.emit('keypress', '.', { ctrl: true, shift: false, name: 'b' });
<add> fi.emit('keypress', ',', { ctrl: true, shift: false, name: 'k' });
<add>
<add> assertCursorRowsAndCols(rli, 0, 11);
<add> // Perform undo twice
<ide> fi.emit('keypress', ',', { sequence: '\x1F' });
<ide> assert.strictEqual(rli.line, 'the quick brown');
<ide> fi.emit('keypress', ',', { sequence: '\x1F' });
<ide> assert.strictEqual(rli.line, 'the quick brown fox');
<add> // Perform redo twice
<add> fi.emit('keypress', ',', { sequence: '\x1E' });
<add> assert.strictEqual(rli.line, 'the quick brown');
<add> fi.emit('keypress', ',', { sequence: '\x1E' });
<add> assert.strictEqual(rli.line, 'the quick b');
<ide> fi.emit('data', '\n');
<ide> rli.close();
<ide> }
| 3
|
Text
|
Text
|
fix broken link on animations page
|
7ee8da9c7f5b6d48115c5b4afa30344ce9399e15
|
<ide><path>docs/configuration/animations.md
<ide> var chart = new Chart(ctx, {
<ide> });
<ide> ```
<ide>
<del>Another example usage of these callbacks can be found on [Github](https://github.com/chartjs/Chart.js/blob/master/samples/animation/progress-bar.html): this sample displays a progress bar showing how far along the animation is.
<add>Another example usage of these callbacks can be found on [Github](https://github.com/chartjs/Chart.js/blob/master/samples/advanced/progress-bar.html): this sample displays a progress bar showing how far along the animation is.
| 1
|
Text
|
Text
|
address more comments
|
cc620dbecb25efbc2e664c9dde9fc5f02b78a2fa
|
<ide><path>docs/Maintainer-Guidelines.md
<ide> definitely not a beginner’s guide.
<ide>
<ide> Maybe you were looking for the [Formula Cookbook](Formula-Cookbook.md)?
<ide>
<del>This document is a work in progress. If you wish to change or discuss any of the below: open a PR to suggest a change.
<add>This document is current practice. If you wish to change or discuss any of the below: open a PR to suggest a change.
<ide>
<ide> ## Quick checklist
<ide>
<ide> of modification that is not whitespace in it. But be careful about
<ide> making changes to inline patches—make sure they still apply.
<ide>
<ide> ### Adding or updating formulae
<del>Only one maintainer is necessary to approve and merge the addition of a new or updated formula which passes CI. However, if the formula addition or update is controversial the maintainer who adds it will be expected to fix issues that arise with it in future.
<add>Any one maintainer is necessary to approve and merge the addition of a new or updated formula which passes CI. However, if the formula addition or update proves controversial the maintainer who adds it will be expected to answer requests and fix problems that arise with it in future.
<ide>
<ide> ### Removing formulae
<ide> Formulae that:
<ide>
<ide> - work on at least 2/3 of our supported macOS versions in the default Homebrew prefix
<ide> - do not require patches rejected by upstream to work
<del>- do not have known security vulnerabilities/CVEs for the version we package
<add>- do not have known security vulnerabilities or CVEs for the version we package
<ide> - are shown to be still installed by users in our analytics with a `BuildError` rate of <25%
<ide>
<ide>
<ide> In the same way that Homebrew maintainers are expected to be spending more of th
<ide>
<ide> Individual Homebrew repositories should not have formal lead maintainers (although those who do the most work will have the loudest voices).
<ide>
<del>Maintainers should feel even more free to pleasantly disagree with the work and decisions of the lead maintainer. With greater authority comes greater responsibility to handle and moderate technical disagreements.
<add>Maintainers should feel even more free to pleasantly disagree with the work and decisions of the lead maintainer: with greater authority comes greater responsibility to handle and moderate technical disagreements.
| 1
|
Ruby
|
Ruby
|
add test for belongs_to? and group initializers
|
e9b02ae27a837d7ba02681e225590f55ce02af8c
|
<ide><path>railties/test/initializable_test.rb
<ide> class Parent
<ide> class Instance
<ide> include Rails::Initializable
<ide>
<del> initializer :one do
<add> initializer :one, :group => :assets do
<ide> $arr << 1
<ide> end
<ide>
<ide> initializer :two do
<ide> $arr << 2
<ide> end
<ide>
<del> initializer :three do
<add> initializer :three, :group => :all do
<ide> $arr << 3
<ide> end
<ide>
<ide> class InstanceTest < ActiveSupport::TestCase
<ide> instance.run_initializers
<ide> assert_equal [1, 2, 3, 4], $arr
<ide> end
<add>
<add> test "running locals with groups" do
<add> $arr = []
<add> instance = Instance.new
<add> instance.run_initializers(:assets)
<add> assert_equal [1, 3], $arr
<add> end
<ide> end
<ide>
<ide> class WithArgsTest < ActiveSupport::TestCase
| 1
|
Python
|
Python
|
update language code in usage example in comment
|
42b241ccd02961f215b42733bc55f6c4010b0d2c
|
<ide><path>spacy/lang/hi/examples.py
<ide> """
<ide> Example sentences to test spaCy and its language models.
<ide>
<del>>>> from spacy.lang.en.examples import sentences
<add>>>> from spacy.lang.hi.examples import sentences
<ide> >>> docs = nlp.pipe(sentences)
<ide> """
<ide>
| 1
|
Text
|
Text
|
add link to arxiv paper
|
f4e95139fa70f1ea5f090a9b1f12e3bf47e9ac53
|
<ide><path>research/lm_commonsense/README.md
<ide> # A Simple Method for Commonsense Reasoning
<ide>
<del>This repository contains code to reproduce results from [*A Simple Method for Commonsense Reasoning*](TBD).
<add>This repository contains code to reproduce results from [*A Simple Method for Commonsense Reasoning*](https://arxiv.org/abs/1806.02847).
<ide>
<ide> Authors and contact:
<ide>
<ide> Accuracy of 10 LM(s) on wsc273 = 0.615
<ide> ...(omitted)
<ide>
<ide> Accuracy of 14 LM(s) on wsc273 = 0.637
<del>```
<ide>\ No newline at end of file
<add>```
| 1
|
PHP
|
PHP
|
add day of week orm function
|
4ee00d992320799a1d0d7e4576d14896fed2acaf
|
<ide><path>src/Database/Dialect/PostgresDialectTrait.php
<ide> protected function _transformFunctionExpression(FunctionExpression $expression)
<ide> ->name('')
<ide> ->type(' + INTERVAL')
<ide> ->iterateParts(function ($p, $key) {
<del> if ($key === 1) {
<add> if ($key === 1) {
<ide> $interval = sprintf("'%s'", key($p));
<ide> $p = [$interval => 'literal'];
<ide> }
<ide> return $p;
<ide> });
<ide> break;
<add> case 'DAYOFWEEK':
<add> $expression
<add> ->name('EXTRACT')
<add> ->add(['DOW' => 'literal'], [], true)
<add> ->type(' FROM');
<add> break;
<ide> }
<ide> }
<ide>
<ide><path>src/Database/Dialect/SqliteDialectTrait.php
<ide> protected function _transformFunctionExpression(FunctionExpression $expression)
<ide> }
<ide> return $p;
<ide> });
<add> break;
<add> case 'DAYOFWEEK':
<add> $expression
<add> ->name('STRFTIME')
<add> ->type(' ,')
<add> ->add(["'%w'" => 'literal'], [], true);
<add> break;
<ide> }
<ide> }
<ide>
<ide><path>src/Database/Dialect/SqlserverDialectTrait.php
<ide> protected function _transformFunctionExpression(FunctionExpression $expression)
<ide> break;
<ide> case 'DATE_ADD':
<ide> $params = [];
<del> $visitor = function ($p, $key) (&$params) {
<add> $visitor = function ($p, $key) use (&$params) {
<ide> if ($key === 0) {
<ide> $params[2] = $value;
<ide> } else {
<ide> protected function _transformFunctionExpression(FunctionExpression $expression)
<ide> $params[1] = $valueUnit[0];
<ide> }
<ide> return $p;
<del> });
<del> $manipulator = function ($p, $key) ($params) {
<add> };
<add> $manipulator = function ($p, $key) use ($params) {
<ide> return $params[$key];
<ide> };
<ide>
<ide> protected function _transformFunctionExpression(FunctionExpression $expression)
<ide> ->iterateParts($manipulator)
<ide> ->add($params[2]);
<ide> break;
<add> case 'DAYOFWEEK':
<add> $expression
<add> ->name('DATEPART')
<add> ->type(' ')
<add> ->add(['weekday, ' => 'literal'], [], true)
<add> ->add([') - (1' => 'literal']); // SqlServer starts on index 1
<add> break;
<ide> }
<ide> }
<ide>
<ide><path>src/Database/FunctionsBuilder.php
<ide> public function dateAdd($expression, $value, $unit, $types = [])
<ide> return $expression;
<ide> }
<ide>
<add> /**
<add> * Returns a FunctionExpression representing a call to SQL WEEKDAY function.
<add> * 0 - Sunday, 1 - Monday, 2 - Tuesday...
<add> *
<add> * @param mixed $expression the function argument
<add> * @param array $types list of types to bind to the arguments
<add> * @return FunctionExpression
<add> */
<add> public function dayOfWeek($expression, $types = [])
<add> {
<add> return $this->_literalArgumentFunction('DAYOFWEEK', $expression, $types);
<add> }
<add>
<add> /**
<add> * Returns a FunctionExpression representing a call to SQL WEEKDAY function.
<add> * 0 - Sunday, 1 - Monday, 2 - Tuesday...
<add> *
<add> * @param mixed $expression the function argument
<add> * @param array $types list of types to bind to the arguments
<add> * @return FunctionExpression
<add> */
<add> public function weekday($expression, $types = [])
<add> {
<add> return $this->dayOfWeek($expression, $types);
<add> }
<add>
<ide> /**
<ide> * Returns a FunctionExpression representing a call that will return the current
<ide> * date and time. By default it returns both date and time, but you can also
<ide><path>tests/TestCase/Database/FunctionsBuilderTest.php
<ide> public function testExtract()
<ide> *
<ide> * @return void
<ide> */
<del> public function testExtract()
<add> public function testDateAdd()
<ide> {
<ide> $function = $this->functions->dateAdd('created', -3, 'day');
<ide> $this->assertInstanceOf('Cake\Database\Expression\FunctionExpression', $function);
<ide> $this->assertEquals("DATE_ADD(created, INTERVAL -3 day)", $function->sql(new ValueBinder));
<ide> }
<add>
<add> /**
<add> * Tests generating a DAYOFWEEK() function
<add> *
<add> * @return void
<add> */
<add> public function testDayOfWeek()
<add> {
<add> $function = $this->functions->dayOfWeek('created');
<add> $this->assertInstanceOf('Cake\Database\Expression\FunctionExpression', $function);
<add> $this->assertEquals("DAYOFWEEK(created)", $function->sql(new ValueBinder));
<add>
<add> $function = $this->functions->weekday('created');
<add> $this->assertInstanceOf('Cake\Database\Expression\FunctionExpression', $function);
<add> $this->assertEquals("DAYOFWEEK(created)", $function->sql(new ValueBinder));
<add> }
<ide> }
| 5
|
Ruby
|
Ruby
|
convert options test to spec
|
7b2d8ed4b3a1a71da59b1aa1859e844c37ba24c2
|
<add><path>Library/Homebrew/cask/spec/cask/cli/options_spec.rb
<del><path>Library/Homebrew/cask/test/cask/cli/options_test.rb
<del>require "test_helper"
<add>require "spec_helper"
<ide>
<ide> describe Hbc::CLI do
<ide> it "supports setting the appdir" do
<ide> Hbc::CLI.process_options %w[help --appdir=/some/path/foo]
<ide>
<del> Hbc.appdir.must_equal Pathname("/some/path/foo")
<add> expect(Hbc.appdir).to eq(Pathname.new("/some/path/foo"))
<ide> end
<ide>
<ide> it "supports setting the appdir from ENV" do
<ide> ENV["HOMEBREW_CASK_OPTS"] = "--appdir=/some/path/bar"
<ide>
<ide> Hbc::CLI.process_options %w[help]
<ide>
<del> Hbc.appdir.must_equal Pathname("/some/path/bar")
<add> expect(Hbc.appdir).to eq(Pathname.new("/some/path/bar"))
<ide> end
<ide>
<ide> it "supports setting the prefpanedir" do
<ide> Hbc::CLI.process_options %w[help --prefpanedir=/some/path/foo]
<ide>
<del> Hbc.prefpanedir.must_equal Pathname("/some/path/foo")
<add> expect(Hbc.prefpanedir).to eq(Pathname.new("/some/path/foo"))
<ide> end
<ide>
<ide> it "supports setting the prefpanedir from ENV" do
<ide> ENV["HOMEBREW_CASK_OPTS"] = "--prefpanedir=/some/path/bar"
<ide>
<ide> Hbc::CLI.process_options %w[help]
<ide>
<del> Hbc.prefpanedir.must_equal Pathname("/some/path/bar")
<add> expect(Hbc.prefpanedir).to eq(Pathname.new("/some/path/bar"))
<ide> end
<ide>
<ide> it "supports setting the qlplugindir" do
<ide> Hbc::CLI.process_options %w[help --qlplugindir=/some/path/foo]
<ide>
<del> Hbc.qlplugindir.must_equal Pathname("/some/path/foo")
<add> expect(Hbc.qlplugindir).to eq(Pathname.new("/some/path/foo"))
<ide> end
<ide>
<ide> it "supports setting the qlplugindir from ENV" do
<ide> ENV["HOMEBREW_CASK_OPTS"] = "--qlplugindir=/some/path/bar"
<ide>
<ide> Hbc::CLI.process_options %w[help]
<ide>
<del> Hbc.qlplugindir.must_equal Pathname("/some/path/bar")
<add> expect(Hbc.qlplugindir).to eq(Pathname.new("/some/path/bar"))
<ide> end
<ide>
<ide> it "supports setting the colorpickerdir" do
<ide> Hbc::CLI.process_options %w[help --colorpickerdir=/some/path/foo]
<ide>
<del> Hbc.colorpickerdir.must_equal Pathname("/some/path/foo")
<add> expect(Hbc.colorpickerdir).to eq(Pathname.new("/some/path/foo"))
<ide> end
<ide>
<ide> it "supports setting the colorpickerdir from ENV" do
<ide> ENV["HOMEBREW_CASK_OPTS"] = "--colorpickerdir=/some/path/bar"
<ide>
<ide> Hbc::CLI.process_options %w[help]
<ide>
<del> Hbc.colorpickerdir.must_equal Pathname("/some/path/bar")
<add> expect(Hbc.colorpickerdir).to eq(Pathname.new("/some/path/bar"))
<ide> end
<ide>
<ide> it "supports setting the dictionarydir" do
<ide> Hbc::CLI.process_options %w[help --dictionarydir=/some/path/foo]
<ide>
<del> Hbc.dictionarydir.must_equal Pathname("/some/path/foo")
<add> expect(Hbc.dictionarydir).to eq(Pathname.new("/some/path/foo"))
<ide> end
<ide>
<ide> it "supports setting the dictionarydir from ENV" do
<ide> ENV["HOMEBREW_CASK_OPTS"] = "--dictionarydir=/some/path/bar"
<ide>
<ide> Hbc::CLI.process_options %w[help]
<ide>
<del> Hbc.dictionarydir.must_equal Pathname("/some/path/bar")
<add> expect(Hbc.dictionarydir).to eq(Pathname.new("/some/path/bar"))
<ide> end
<ide>
<ide> it "supports setting the fontdir" do
<ide> Hbc::CLI.process_options %w[help --fontdir=/some/path/foo]
<ide>
<del> Hbc.fontdir.must_equal Pathname("/some/path/foo")
<add> expect(Hbc.fontdir).to eq(Pathname.new("/some/path/foo"))
<ide> end
<ide>
<ide> it "supports setting the fontdir from ENV" do
<ide> ENV["HOMEBREW_CASK_OPTS"] = "--fontdir=/some/path/bar"
<ide>
<ide> Hbc::CLI.process_options %w[help]
<ide>
<del> Hbc.fontdir.must_equal Pathname("/some/path/bar")
<add> expect(Hbc.fontdir).to eq(Pathname.new("/some/path/bar"))
<ide> end
<ide>
<ide> it "supports setting the servicedir" do
<ide> Hbc::CLI.process_options %w[help --servicedir=/some/path/foo]
<ide>
<del> Hbc.servicedir.must_equal Pathname("/some/path/foo")
<add> expect(Hbc.servicedir).to eq(Pathname.new("/some/path/foo"))
<ide> end
<ide>
<ide> it "supports setting the servicedir from ENV" do
<ide> ENV["HOMEBREW_CASK_OPTS"] = "--servicedir=/some/path/bar"
<ide>
<ide> Hbc::CLI.process_options %w[help]
<ide>
<del> Hbc.servicedir.must_equal Pathname("/some/path/bar")
<add> expect(Hbc.servicedir).to eq(Pathname.new("/some/path/bar"))
<ide> end
<ide>
<ide> it "allows additional options to be passed through" do
<ide> rest = Hbc::CLI.process_options %w[edit foo --create --appdir=/some/path/qux]
<ide>
<del> Hbc.appdir.must_equal Pathname("/some/path/qux")
<del> rest.must_equal %w[edit foo --create]
<add> expect(Hbc.appdir).to eq(Pathname.new("/some/path/qux"))
<add> expect(rest).to eq(%w[edit foo --create])
<ide> end
<ide>
<ide> describe "when a mandatory argument is missing" do
<ide> it "shows a user-friendly error message" do
<del> lambda {
<add> expect {
<ide> Hbc::CLI.process_options %w[install -f]
<del> }.must_raise Hbc::CaskError
<add> }.to raise_error(Hbc::CaskError)
<ide> end
<ide> end
<ide>
<ide> describe "given an ambiguous option" do
<ide> it "shows a user-friendly error message" do
<del> lambda {
<add> expect {
<ide> Hbc::CLI.process_options %w[edit -c]
<del> }.must_raise Hbc::CaskError
<add> }.to raise_error(Hbc::CaskError)
<ide> end
<ide> end
<ide>
<ide> describe "--debug" do
<ide> it "sets the Cask debug method to true" do
<ide> Hbc::CLI.process_options %w[help --debug]
<del> Hbc.debug.must_equal true
<add> expect(Hbc.debug).to be true
<ide> Hbc.debug = false
<ide> end
<ide> end
<ide>
<ide> describe "--help" do
<ide> it "sets the Cask help method to true" do
<ide> Hbc::CLI.process_options %w[foo --help]
<del> Hbc.help.must_equal true
<add> expect(Hbc.help).to be true
<ide> Hbc.help = false
<ide> end
<ide> end
<del>
<del> after do
<del> ENV["HOMEBREW_CASK_OPTS"] = nil
<del> end
<ide> end
<ide><path>Library/Homebrew/cask/spec/spec_helper.rb
<ide> RSpec.configure do |config|
<ide> config.order = :random
<ide> config.include(Test::Helper::Shutup)
<del> config.after(:each) do
<del> FileUtils.rm_rf [
<del> Hbc.appdir.children,
<del> Hbc.caskroom.children,
<del> ]
<add> config.around(:each) do |example|
<add> begin
<add> @__appdir = Hbc.appdir
<add> @__caskroom = Hbc.caskroom
<add> @__prefpanedir = Hbc.prefpanedir
<add> @__qlplugindir = Hbc.qlplugindir
<add> @__servicedir = Hbc.servicedir
<add>
<add> @__argv = ARGV.dup
<add> @__env = ENV.to_hash # dup doesn't work on ENV
<add>
<add> example.run
<add> ensure
<add> ARGV.replace(@__argv)
<add> ENV.replace(@__env)
<add>
<add> Hbc.appdir = @__appdir
<add> Hbc.caskroom = @__caskroom
<add> Hbc.prefpanedir = @__prefpanedir
<add> Hbc.qlplugindir = @__qlplugindir
<add> Hbc.servicedir = @__servicedir
<add>
<add> FileUtils.rm_rf [
<add> Hbc.appdir.children,
<add> Hbc.caskroom.children,
<add> ]
<add> end
<ide> end
<ide> end
<ide>
| 2
|
Python
|
Python
|
rewrite backfilljob logic for clarity
|
c1eb83adbf9d4371dab7dd545c593cc5a853fb8b
|
<ide><path>airflow/jobs.py
<ide> def import_errors(self, dagbag):
<ide> filename=filename, stacktrace=stacktrace))
<ide> session.commit()
<ide>
<del>
<ide> def schedule_dag(self, dag):
<ide> """
<ide> This method checks whether a new DagRun needs to be created
<ide> def process_dag(self, dag, executor):
<ide> if task.adhoc or (task.task_id, dttm) in skip_tis:
<ide> continue
<ide> ti = TI(task, dttm)
<add>
<ide> ti.refresh_from_db()
<ide> if ti.state in (
<ide> State.RUNNING, State.QUEUED, State.SUCCESS, State.FAILED):
<ide> def prioritize_queued(self, session, executor, dagbag):
<ide>
<ide> queue_size = len(tis)
<ide> self.logger.info("Pool {pool} has {open_slots} slots, {queue_size} "
<del> "task instances in queue".format(**locals()))
<add> "task instances in queue".format(**locals()))
<ide> if open_slots <= 0:
<ide> continue
<ide> tis = sorted(
<ide> def _execute(self):
<ide> succeeded = []
<ide> started = []
<ide> wont_run = []
<del> could_not_run = set()
<add> not_ready_to_run = set()
<ide>
<ide> for task in self.dag.tasks:
<ide> if (not self.include_adhoc) and task.adhoc:
<ide> def _execute(self):
<ide> for dttm in self.dag.date_range(start_date, end_date=end_date):
<ide> ti = models.TaskInstance(task, dttm)
<ide> tasks_to_run[ti.key] = ti
<add> session.merge(ti)
<add> session.commit()
<ide>
<ide> # Triggering what is ready to get triggered
<ide> deadlocked = False
<ide> def _execute(self):
<ide> ti.state = State.RUNNING
<ide> if key not in started:
<ide> started.append(key)
<del> if ti in could_not_run:
<del> could_not_run.remove(ti)
<del>
<del> # if the task is not runnable and has no state indicating why
<del> # (like FAILED or UP_FOR_RETRY), then it's just not ready
<del> # to run. If the set of tasks that aren't ready ever equals
<del> # the set of tasks to run, then the backfill is deadlocked
<del> elif ti.state is None:
<del> could_not_run.add(ti)
<del> if could_not_run == set(tasks_to_run.values()):
<add> if ti in not_ready_to_run:
<add> not_ready_to_run.remove(ti)
<add>
<add> # Mark the task as not ready to run. If the set of tasks
<add> # that aren't ready ever equals the set of tasks to run,
<add> # then the backfill is deadlocked
<add> elif ti.state in (State.NONE, State.UPSTREAM_FAILED):
<add> not_ready_to_run.add(ti)
<add> if not_ready_to_run == set(tasks_to_run.values()):
<ide> msg = 'BackfillJob is deadlocked: no tasks can be run.'
<ide> if any(
<ide> t.are_dependencies_met() !=
<ide> def _execute(self):
<ide> for t in tasks_to_run.values()):
<ide> msg += (
<ide> ' Some of the tasks that were unable to '
<del> 'run have depends_on_past=True. Try running '
<del> 'the backfill with '
<del> 'ignore_first_depends_on_past=True '
<del> '(or -I from the command line).')
<add> 'run have "depends_on_past=True". Try running '
<add> 'the backfill with the option '
<add> '"ignore_first_depends_on_past=True" '
<add> ' or passing "-I" at the command line.')
<ide> self.logger.error(msg)
<ide> deadlocked = True
<del> wont_run.extend(could_not_run)
<add> wont_run.extend(not_ready_to_run)
<ide> tasks_to_run.clear()
<ide>
<ide> self.heartbeat()
<ide> def _execute(self):
<ide> continue
<ide> ti = tasks_to_run[key]
<ide> ti.refresh_from_db()
<del> if (
<del> ti.state in (State.FAILED, State.SKIPPED) or
<del> state == State.FAILED):
<del> # executor reports failure; task reports running
<del> if ti.state == State.RUNNING and state == State.FAILED:
<add>
<add> # executor reports failure
<add> if state == State.FAILED:
<add>
<add> # task reports running
<add> if ti.state == State.RUNNING:
<ide> msg = (
<ide> 'Executor reports that task instance {} failed '
<ide> 'although the task says it is running.'.format(key))
<ide> self.logger.error(msg)
<ide> ti.handle_failure(msg)
<del> # executor and task report failure
<del> elif ti.state == State.FAILED or state == State.FAILED:
<del> failed.append(key)
<del> self.logger.error("Task instance {} failed".format(key))
<add>
<ide> # task reports skipped
<ide> elif ti.state == State.SKIPPED:
<ide> wont_run.append(key)
<ide> self.logger.error("Skipping {} ".format(key))
<add>
<add> # anything else is a failure
<add> else:
<add> failed.append(key)
<add> self.logger.error("Task instance {} failed".format(key))
<add>
<ide> tasks_to_run.pop(key)
<del> # Removing downstream tasks that also shouldn't run
<del> for t in self.dag.get_task(task_id).get_flat_relatives(
<del> upstream=False):
<del> key = (ti.dag_id, t.task_id, execution_date)
<del> if key in tasks_to_run:
<del> wont_run.append(key)
<del> tasks_to_run.pop(key)
<del> # executor and task report success
<del> elif ti.state == State.SUCCESS and state == State.SUCCESS:
<del> succeeded.append(key)
<del> tasks_to_run.pop(key)
<del> elif state == State.SUCCESS and key in could_not_run:
<del> continue
<del> # executor reports success but task does not -- this is weird
<del> elif (
<del> ti.state not in (
<add>
<add> # executor reports success
<add> elif state == State.SUCCESS:
<add>
<add> # task reports success
<add> if ti.state == State.SUCCESS:
<add> self.logger.info(
<add> 'Task instance {} succeeded'.format(key))
<add> succeeded.append(key)
<add> tasks_to_run.pop(key)
<add>
<add> # task reports failure
<add> elif ti.state == State.FAILED:
<add> self.logger.error("Task instance {} failed".format(key))
<add> failed.append(key)
<add> tasks_to_run.pop(key)
<add>
<add> # this probably won't ever be triggered
<add> elif key in not_ready_to_run:
<add> continue
<add>
<add> # executor reports success but task does not - this is weird
<add> elif ti.state not in (
<ide> State.SUCCESS,
<ide> State.QUEUED,
<del> State.UP_FOR_RETRY) and
<del> state == State.SUCCESS):
<del> self.logger.error(
<del> "The airflow run command failed "
<del> "at reporting an error. This should not occur "
<del> "in normal circumstances. Task state is '{}',"
<del> "reported state is '{}'. TI is {}"
<del> "".format(ti.state, state, ti))
<del>
<del> # if the executor fails 3 or more times, stop trying to
<del> # run the task
<del> executor_fails[key] += 1
<del> if executor_fails[key] >= 3:
<del> msg = (
<del> 'The airflow run command failed to report an '
<del> 'error for task {} three or more times. The task '
<del> 'is being marked as failed. This is very unusual '
<del> 'and probably means that an error is taking place '
<del> 'before the task even starts.'.format(key))
<del> self.logger.error(msg)
<del> ti.handle_failure(msg)
<del> tasks_to_run.pop(key)
<add> State.UP_FOR_RETRY):
<add> self.logger.error(
<add> "The airflow run command failed "
<add> "at reporting an error. This should not occur "
<add> "in normal circumstances. Task state is '{}',"
<add> "reported state is '{}'. TI is {}"
<add> "".format(ti.state, state, ti))
<add>
<add> # if the executor fails 3 or more times, stop trying to
<add> # run the task
<add> executor_fails[key] += 1
<add> if executor_fails[key] >= 3:
<add> msg = (
<add> 'The airflow run command failed to report an '
<add> 'error for task {} three or more times. The '
<add> 'task is being marked as failed. This is very '
<add> 'unusual and probably means that an error is '
<add> 'taking place before the task even '
<add> 'starts.'.format(key))
<add> self.logger.error(msg)
<add> ti.handle_failure(msg)
<add> tasks_to_run.pop(key)
<ide>
<ide> msg = (
<ide> "[backfill progress] "
| 1
|
Javascript
|
Javascript
|
use mustcall in ephemeralkeyinfo test
|
a06f67f4543ec8d0c2c0c38fcb731ffca8bf507a
|
<ide><path>test/parallel/test-tls-client-getephemeralkeyinfo.js
<ide> function test(size, type, name, cipher) {
<ide> const client = tls.connect({
<ide> port: server.address().port,
<ide> rejectUnauthorized: false
<del> }, function() {
<add> }, common.mustCall(function() {
<ide> const ekeyinfo = client.getEphemeralKeyInfo();
<ide> assert.strictEqual(ekeyinfo.type, type);
<ide> assert.strictEqual(ekeyinfo.size, size);
<ide> assert.strictEqual(ekeyinfo.name, name);
<ide> server.close();
<del> });
<add> }));
<add> client.on('secureConnect', common.mustCall());
<ide> }));
<ide> }
<ide>
| 1
|
PHP
|
PHP
|
add insert/delete/update to orm\query
|
1cd5f8dac2f98c0622a5aff797576eee1d66c181
|
<ide><path>Cake/ORM/Query.php
<ide> public function _dirty() {
<ide> parent::_dirty();
<ide> }
<ide>
<add>/**
<add> * Create an update query.
<add> *
<add> * This changes the query type to be 'update'.
<add> * Can be combined with set() and where() methods to create update queries.
<add> *
<add> * @param string $table Unused parameter.
<add> * @return Query
<add> */
<add> public function update($table = null) {
<add> $table = $this->repository()->table();
<add> return parent::update($table);
<add> }
<add>
<add>/**
<add> * Create a delete query.
<add> *
<add> * This changes the query type to be 'delete'.
<add> * Can be combined with the where() method to create delete queries.
<add> *
<add> * @param string $table Unused parameter.
<add> * @return Query
<add> */
<add> public function delete($table = null) {
<add> $table = $this->repository()->table();
<add> return parent::delete($table);
<add> }
<add>
<add>/**
<add> * Create an insert query.
<add> *
<add> * This changes the query type to be 'insert'.
<add> * Note calling this method will reset any data previously set
<add> * with Query::values()
<add> *
<add> * Can be combined with the where() method to create delete queries.
<add> *
<add> * @param array $columns The columns to insert into.
<add> * @param array $types A map between columns & their datatypes.
<add> * @param array $unused An unused parameter from the parent class' interface
<add> * @return Query
<add> */
<add> public function insert($columns, $types = [], $unused = []) {
<add> $table = $this->repository()->table();
<add> return parent::insert($table, $columns, $types);
<add> }
<add>
<ide> }
<ide><path>Cake/Test/TestCase/ORM/QueryTest.php
<ide> public function testCountWithGroup() {
<ide> }
<ide>
<ide> /**
<del> * Test that there is no beforeFind event with update queries.
<add> * Test update method.
<ide> *
<ide> * @return void
<ide> */
<del> public function testUpdateNoBeforeFind() {
<add> public function testUpdate() {
<ide> $table = TableRegistry::get('articles');
<del> $table->getEventManager()->attach(function() {
<del> $this->fail('No callback should be fired');
<del> }, 'Model.beforeFind');
<ide>
<del> $query = $table->query()
<del> ->update($table->table())
<del> ->set(['title' => 'First']);
<add> $result = $table->query()
<add> ->update()
<add> ->set(['title' => 'First'])
<add> ->execute();
<ide>
<del> $result = $query->execute();
<ide> $this->assertInstanceOf('Cake\Database\StatementInterface', $result);
<ide> $this->assertTrue($result->rowCount() > 0);
<ide> }
<ide>
<ide> /**
<del> * Test that there is no beforeFind event with delete queries.
<add> * Test insert method.
<ide> *
<ide> * @return void
<ide> */
<del> public function testDeleteNoBeforeFind() {
<add> public function testInsert() {
<ide> $table = TableRegistry::get('articles');
<del> $table->getEventManager()->attach(function() {
<del> $this->fail('No callback should be fired');
<del> }, 'Model.beforeFind');
<ide>
<del> $query = $table->query()
<del> ->delete($table->table());
<add> $result = $table->query()
<add> ->insert(['title'])
<add> ->values(['title' => 'First'])
<add> ->values(['title' => 'Second'])
<add> ->execute();
<add>
<add> $this->assertInstanceOf('Cake\Database\StatementInterface', $result);
<add> $this->assertEquals(2, $result->rowCount());
<add> }
<add>
<add>/**
<add> * Test delete method.
<add> *
<add> * @return void
<add> */
<add> public function testDelete() {
<add> $table = TableRegistry::get('articles');
<add>
<add> $result = $table->query()
<add> ->delete()
<add> ->where(['id >=' => 1])
<add> ->execute();
<ide>
<del> $result = $query->execute();
<ide> $this->assertInstanceOf('Cake\Database\StatementInterface', $result);
<ide> $this->assertTrue($result->rowCount() > 0);
<ide> }
| 2
|
Javascript
|
Javascript
|
remember hash position correctly
|
4ea5b9f771fdb7e3d707f62f9dde0b8b23edda95
|
<ide><path>web/viewer.js
<ide> var PDFView = {
<ide> this.error('An error occurred while reading the PDF.', e);
<ide> }
<ide> var pagesCount = pdf.numPages;
<add> var id = pdf.fingerprint;
<add> var storedHash = null;
<ide> document.getElementById('numPages').innerHTML = pagesCount;
<ide> document.getElementById('pageNumber').max = pagesCount;
<add> PDFView.documentFingerprint = id;
<add>
<add> if (Settings.get(id + '.exists', false)) {
<add> var page = Settings.get(id + '.page', '1');
<add> var zoom = Settings.get(id + '.zoom', PDFView.currentScale);
<add> var left = Settings.get(id + '.scrollLeft', '0');
<add> var top = Settings.get(id + '.scrollTop', '0');
<add>
<add> storedHash = 'page=' + page + '&zoom=' + Math.round(zoom * 100);
<add> storedHash += ',' + left + ',' + top;
<add> }
<ide>
<ide> var pages = this.pages = [];
<ide> var pagesRefMap = {};
<ide> var PDFView = {
<ide> this.setHash(this.initialBookmark);
<ide> this.initialBookmark = null;
<ide> }
<del> else {
<del> var scroll = Settings.get(pdf.fingerprint + '.scroll', -1);
<del> if (scroll != -1) {
<del> setTimeout(function scrollWindow() {
<del> window.scrollTo(0, scroll);
<del> }, 0);
<del> } else
<del> this.page = 1;
<add> else if (storedHash) {
<add> this.setHash(storedHash);
<ide> }
<ide> },
<ide>
<ide> var PDFView = {
<ide> if ('zoom' in params) {
<ide> var zoomArgs = params.zoom.split(','); // scale,left,top
<ide> // building destination array
<del> var dest = [null, new Name('XYZ'), (zoomArgs[1] | 0),
<add> var dest = [null, {name: 'XYZ'}, (zoomArgs[1] | 0),
<ide> (zoomArgs[2] | 0), (zoomArgs[0] | 0) / 100];
<ide> var currentPage = this.pages[pageNumber - 1];
<ide> currentPage.scrollIntoView(dest);
<ide> function updateViewarea() {
<ide> var topLeft = currentPage.getPagePoint(window.pageXOffset,
<ide> window.pageYOffset - firstPage.y - kViewerTopMargin);
<ide> pdfOpenParams += ',' + Math.round(topLeft.x) + ',' + Math.round(topLeft.y);
<add>
<add> var id = PDFView.documentFingerprint;
<add> Settings.set(id + '.exists', true);
<add> Settings.set(id + '.page', pageNumber);
<add> Settings.set(id + '.zoom', PDFView.currentScale);
<add> Settings.set(id + '.scrollLeft', Math.round(topLeft.x));
<add> Settings.set(id + '.scrollTop', Math.round(topLeft.y));
<add>
<ide> document.getElementById('viewBookmark').href = pdfOpenParams;
<ide> }
<ide>
<ide> window.addEventListener('scroll', function webViewerScroll(evt) {
<ide> updateViewarea();
<del> var fingerprint = PDFView.pages[0].content.pdf.fingerprint;
<del> Settings.set(fingerprint + '.scroll', window.pageYOffset);
<ide> }, true);
<ide>
<ide>
| 1
|
Text
|
Text
|
clarify rl.question callback args
|
c300ba22128ce8e675650adcd1f6b869dc1b5126
|
<ide><path>doc/api/readline.md
<ide> rl.question('What is your favorite food?', (answer) => {
<ide> });
<ide> ```
<ide>
<add>*Note*: The `callback` function passed to `rl.question()` does not follow the
<add>typical pattern of accepting an `Error` object or `null` as the first argument.
<add>The `callback` is called with the provided answer as the only argument.git
<add>
<ide> ### rl.resume()
<ide> <!-- YAML
<ide> added: v0.3.4
| 1
|
Javascript
|
Javascript
|
remove superfluous fromcharcode function
|
ef76afdf8096bca3eb4e3e3180a7f34de7f193f9
|
<ide><path>src/Angular.js
<ide> var uppercase = function(string){return isString(string) ? string.toUpperCase()
<ide>
<ide> var manualLowercase = function(s) {
<ide> return isString(s)
<del> ? s.replace(/[A-Z]/g, function(ch) {return fromCharCode(ch.charCodeAt(0) | 32);})
<add> ? s.replace(/[A-Z]/g, function(ch) {return String.fromCharCode(ch.charCodeAt(0) | 32);})
<ide> : s;
<ide> };
<ide> var manualUppercase = function(s) {
<ide> return isString(s)
<del> ? s.replace(/[a-z]/g, function(ch) {return fromCharCode(ch.charCodeAt(0) & ~32);})
<add> ? s.replace(/[a-z]/g, function(ch) {return String.fromCharCode(ch.charCodeAt(0) & ~32);})
<ide> : s;
<ide> };
<ide>
<ide> if ('i' !== 'I'.toLowerCase()) {
<ide> uppercase = manualUppercase;
<ide> }
<ide>
<del>function fromCharCode(code) {return String.fromCharCode(code);}
<del>
<ide>
<ide> var /** holds major version number for IE or NaN for real browsers */
<ide> msie = int((/msie (\d+)/.exec(lowercase(navigator.userAgent)) || [])[1]),
| 1
|
Python
|
Python
|
add example to corrcoef function
|
9fd8b2db731d79fabdc40de3a7111381fb4aae5a
|
<ide><path>numpy/lib/function_base.py
<ide> def corrcoef(x, y=None, rowvar=True, bias=np._NoValue, ddof=np._NoValue):
<ide> for backwards compatibility with previous versions of this function. These
<ide> arguments had no effect on the return values of the function and can be
<ide> safely ignored in this and previous versions of numpy.
<add>
<add> Examples
<add> --------
<add> In this example we generate two random arrays, ``xarr`` and ``yarr``, and
<add> compute the row-wise and column-wise Pearson correlation coefficients,
<add> ``R``. Since ``rowvar`` is true by default, we first find the row-wise
<add> Pearson correlation coefficients between the variables of ``xarr``.
<add>
<add> >>> import numpy as np
<add> >>> rng = np.random.default_rng(seed=42)
<add> >>> xarr = rng.random((3, 3))
<add> >>> xarr
<add> array([[0.77395605, 0.43887844, 0.85859792],
<add> [0.69736803, 0.09417735, 0.97562235],
<add> [0.7611397 , 0.78606431, 0.12811363]])
<add> >>> R1 = np.corrcoef(xarr)
<add> >>> R1
<add> array([[ 1. , 0.99256089, -0.68080986],
<add> [ 0.99256089, 1. , -0.76492172],
<add> [-0.68080986, -0.76492172, 1. ]])
<add>
<add> If we add another set of variables and observations ``yarr``, we can
<add> compute the row-wise Pearson correlation coefficients between the
<add> variables in ``xarr`` and ``yarr``.
<add>
<add> >>> yarr = rng.random((3, 3))
<add> >>> yarr
<add> array([[0.45038594, 0.37079802, 0.92676499],
<add> [0.64386512, 0.82276161, 0.4434142 ],
<add> [0.22723872, 0.55458479, 0.06381726]])
<add> >>> R2 = np.corrcoef(xarr, yarr)
<add> >>> R2
<add> array([[ 1. , 0.99256089, -0.68080986, 0.75008178, -0.934284 ,
<add> -0.99004057],
<add> [ 0.99256089, 1. , -0.76492172, 0.82502011, -0.97074098,
<add> -0.99981569],
<add> [-0.68080986, -0.76492172, 1. , -0.99507202, 0.89721355,
<add> 0.77714685],
<add> [ 0.75008178, 0.82502011, -0.99507202, 1. , -0.93657855,
<add> -0.83571711],
<add> [-0.934284 , -0.97074098, 0.89721355, -0.93657855, 1. ,
<add> 0.97517215],
<add> [-0.99004057, -0.99981569, 0.77714685, -0.83571711, 0.97517215,
<add> 1. ]])
<add>
<add> Finally if we use the option ``rowvar=False``, the columns are now
<add> being treated as the variables and we will find the column-wise Pearson
<add> correlation coefficients between variables in ``xarr`` and ``yarr``.
<add>
<add> >>> R3 = np.corrcoef(xarr, yarr, rowvar=False)
<add> >>> R3
<add> array([[ 1. , 0.77598074, -0.47458546, -0.75078643, -0.9665554 ,
<add> 0.22423734],
<add> [ 0.77598074, 1. , -0.92346708, -0.99923895, -0.58826587,
<add> -0.44069024],
<add> [-0.47458546, -0.92346708, 1. , 0.93773029, 0.23297648,
<add> 0.75137473],
<add> [-0.75078643, -0.99923895, 0.93773029, 1. , 0.55627469,
<add> 0.47536961],
<add> [-0.9665554 , -0.58826587, 0.23297648, 0.55627469, 1. ,
<add> -0.46666491],
<add> [ 0.22423734, -0.44069024, 0.75137473, 0.47536961, -0.46666491,
<add> 1. ]])
<ide>
<ide> """
<ide> if bias is not np._NoValue or ddof is not np._NoValue:
| 1
|
Javascript
|
Javascript
|
add tests for no root compilers in multi-compiler
|
e6ec99e1e78cef7e9df5b2f8856adfb47f729d80
|
<ide><path>test/MultiCompiler.test.js
<ide> const createCompiler = function(overrides) {
<ide> };
<ide>
<ide> const setupTwoCompilerEnvironment = function(env, compiler1Values, compiler2Values) {
<del> const compilerEnvironment1 = new CompilerEnvironment();
<del> const compilerEnvironment2 = new CompilerEnvironment();
<del> const compilers = [
<del> Object.assign({
<del> name: "compiler1"
<del> }, (compiler1Values || {}), compilerEnvironment1.getCompilerStub()),
<del> Object.assign({
<del> name: "compiler2"
<del> }, (compiler2Values || {}), compilerEnvironment2.getCompilerStub())
<del> ];
<add> return setupMutliCompilerEnvironment(env, 2, [compiler1Values, compiler2Values]);
<add>};
<add>
<add>const setupMutliCompilerEnvironment = function(env, count, compilerValues) {
<add> const values = Array.isArray(compilerValues) ? compilerValues : new Array(count);
<add> const environments = values.map(() => new CompilerEnvironment());
<add> const compilers = environments.map((e, i) => Object.assign({
<add> name: `compiler${i + 1}`
<add> }, (values[i] || {}), e.getCompilerStub()));
<ide> env.myMultiCompiler = new MultiCompiler(compilers);
<del> env.compiler1EventBindings = compilerEnvironment1.getPluginEventBindings();
<del> env.compiler2EventBindings = compilerEnvironment2.getPluginEventBindings();
<del> env.compiler1WatchCallbacks = compilerEnvironment1.getWatchCallbacks();
<del> env.compiler2WatchCallbacks = compilerEnvironment2.getWatchCallbacks();
<del> env.compiler1RunCallbacks = compilerEnvironment1.getRunCallbacks();
<del> env.compiler2RunCallbacks = compilerEnvironment2.getRunCallbacks();
<add> environments.forEach((compilerEnvironment, i) => {
<add> env[`compiler${i + 1}EventBindings`] = compilerEnvironment.getPluginEventBindings();
<add> env[`compiler${i + 1}WatchCallbacks`] = compilerEnvironment.getWatchCallbacks();
<add> env[`compiler${i + 1}RunCallbacks`] = compilerEnvironment.getRunCallbacks();
<add> });
<ide> };
<ide>
<ide> describe("MultiCompiler", () => {
<ide> describe("MultiCompiler", () => {
<ide> should(env.callback.getCall(0).args[1]).be.undefined();
<ide> });
<ide> });
<add>
<add> describe("with no root compiler", () => {
<add> beforeEach(() => {
<add> setupMutliCompilerEnvironment(env, 3, [{
<add> name: "a",
<add> }, {
<add> name: "b",
<add> dependencies: ["a", "c"]
<add> }, {
<add> name: "c",
<add> dependencies: ["b"]
<add> }]);
<add> env.callback = sinon.spy();
<add> env.options = [{
<add> testWatchOptions: true
<add> }, {
<add> testWatchOptions2: true
<add> }];
<add> env.result = env.myMultiCompiler.watch(env.options, env.callback);
<add> });
<add>
<add> it("should call the callback with an error message", () => {
<add> env.compiler1RunCallbacks.length.should.be.exactly(0);
<add> env.compiler2RunCallbacks.length.should.be.exactly(0);
<add> env.compiler3RunCallbacks.length.should.be.exactly(0);
<add> env.callback.callCount.should.be.exactly(1);
<add> env.callback.getCall(0).args[0].should.be.Error();
<add> should(env.callback.getCall(0).args[1]).be.undefined();
<add> });
<add> });
<ide> });
<ide>
<ide> describe("run", () => {
<ide> describe("MultiCompiler", () => {
<ide> should(env.callback.getCall(0).args[1]).be.undefined();
<ide> });
<ide> });
<add>
<add> describe("with no root compiler", () => {
<add> beforeEach(() => {
<add> setupMutliCompilerEnvironment(env, 3, [{
<add> name: "a",
<add> }, {
<add> name: "b",
<add> dependencies: ["a", "c"]
<add> }, {
<add> name: "c",
<add> dependencies: ["b"]
<add> }]);
<add> env.callback = sinon.spy();
<add> env.myMultiCompiler.run(env.callback);
<add> });
<add>
<add> it("should call the callback with an error message", () => {
<add> env.compiler1RunCallbacks.length.should.be.exactly(0);
<add> env.compiler2RunCallbacks.length.should.be.exactly(0);
<add> env.compiler3RunCallbacks.length.should.be.exactly(0);
<add> env.callback.callCount.should.be.exactly(1);
<add> env.callback.getCall(0).args[0].should.be.Error();
<add> should(env.callback.getCall(0).args[1]).be.undefined();
<add> });
<add> });
<ide> });
<ide>
<ide> describe("purgeInputFileSystem", () => {
| 1
|
Text
|
Text
|
add v2.3.2 to changelog
|
5641c3089180bdd1d4fa54e9dd2d3ac285f088e4
|
<ide><path>CHANGELOG.md
<ide> - [#12664](https://github.com/emberjs/ember.js/pull/12664) Include NaN as a falsey value in the `with` helper's docstring
<ide> - [#12698](https://github.com/emberjs/ember.js/pull/12698) convert all this._super.apply(this, arguments) to this._super(...arguments)
<ide>
<del>### 2.3.1 (February 4, 2016)
<add>### v2.3.2 (March 17, 2016)
<add>
<add>- [#13118](https://github.com/emberjs/ember.js/pull/13118) [BUGFIX] Work around Chrome 49/50 optimization bug affecting helper usage.
<add>
<add>### v2.3.1 (February 4, 2016)
<ide>
<ide> - [#12829](https://github.com/emberjs/ember.js/pull/12829) [BUGFIX] Support tagless components in fastboot.
<ide> - [#12848](https://github.com/emberjs/ember.js/pull/12848) Make dependencies that end in `@each` expand to `[]`.
| 1
|
Text
|
Text
|
improve process.emitwarning() example
|
71d4a7474d234bba9461e28c95841bbd090cc642
|
<ide><path>doc/api/process.md
<ide> so, it is recommended to place the `emitWarning()` behind a simple boolean
<ide> flag as illustrated in the example below:
<ide>
<ide> ```js
<del>var warned = false;
<ide> function emitMyWarning() {
<del> if (!warned) {
<add> if (!emitMyWarning.warned) {
<add> emitMyWarning.warned = true;
<ide> process.emitWarning('Only warn once!');
<del> warned = true;
<ide> }
<ide> }
<ide> emitMyWarning();
| 1
|
Javascript
|
Javascript
|
add crypto check to test-benchmark-tls
|
e9ba0cfd46d75d21e9e2359e40710c3ee46a296a
|
<ide><path>test/sequential/test-benchmark-tls.js
<ide>
<ide> const common = require('../common');
<ide>
<add>if (!common.hasCrypto)
<add> common.skip('missing crypto');
<add>
<ide> if (!common.enoughTestMem)
<ide> common.skip('Insufficient memory for TLS benchmark test');
<ide>
| 1
|
Javascript
|
Javascript
|
propagate sass parser errors from worker
|
73aa6fcca62edeaf99a48a9d39e1a4033410e8f3
|
<ide><path>client/src/client/workers/sass-compile.js
<ide> import Sass from 'sass.js';
<ide> onmessage = e => {
<ide> const data = e.data;
<ide> Sass.compile(data, result => {
<del> self.postMessage(result.text);
<add> if (result.status === 0) {
<add> self.postMessage(result.text);
<add> } else {
<add> throw result.formatted;
<add> }
<ide> });
<ide> };
| 1
|
Text
|
Text
|
add timer usage for es6
|
728922253503f7936066e3bf1d20c54e0b40a5f6
|
<ide><path>docs/Timers.md
<ide> var Component = React.createClass({
<ide> });
<ide> ```
<ide>
<del>We strongly discourage using the global `setTimeout(...)` and recommend instead that you use `this.setTimeout(...)` provided by react-timer-mixin. This will eliminate a lot of hard work tracking down bugs, such as crashes caused by timeouts firing after a component has been unmounted.
<add>This will eliminate a lot of hard work tracking down bugs, such as crashes caused by timeouts firing after a component has been unmounted.
<add>
<add>Keep in mind that if you use ES6 classes for your React components [there is no built-in API for mixins](https://facebook.github.io/react/blog/2015/01/27/react-v0.13.0-beta-1.html#mixins). To use `TimerMixin` with ES6 classes, we recommend [react-mixin](https://github.com/brigand/react-mixin).
| 1
|
Javascript
|
Javascript
|
fix lint errors
|
1bedeeb41d0ca8eb6e4d116fb295121b087ed4e1
|
<ide><path>benchmark/arrays/var-int.js
<ide> 'use strict';
<ide> var common = require('../common.js');
<ide> var bench = common.createBenchmark(main, {
<del> type: 'Array Buffer Int8Array Uint8Array Int16Array Uint16Array Int32Array Uint32Array Float32Array Float64Array'.split(' '),
<add> type: ['Array', 'Buffer', 'Int8Array', 'Uint8Array', 'Int16Array',
<add> 'Uint16Array', 'Int32Array', 'Uint32Array', 'Float32Array',
<add> 'Float64Array'],
<ide> n: [25]
<ide> });
<ide>
<ide><path>benchmark/arrays/zero-float.js
<ide> 'use strict';
<ide> var common = require('../common.js');
<ide> var bench = common.createBenchmark(main, {
<del> type: 'Array Buffer Int8Array Uint8Array Int16Array Uint16Array Int32Array Uint32Array Float32Array Float64Array'.split(' '),
<add> type: ['Array', 'Buffer', 'Int8Array', 'Uint8Array', 'Int16Array',
<add> 'Uint16Array', 'Int32Array', 'Uint32Array', 'Float32Array',
<add> 'Float64Array'],
<ide> n: [25]
<ide> });
<ide>
<ide><path>benchmark/arrays/zero-int.js
<ide> 'use strict';
<ide> var common = require('../common.js');
<ide> var bench = common.createBenchmark(main, {
<del> type: 'Array Buffer Int8Array Uint8Array Int16Array Uint16Array Int32Array Uint32Array Float32Array Float64Array'.split(' '),
<add> type: ['Array', 'Buffer', 'Int8Array', 'Uint8Array', 'Int16Array',
<add> 'Uint16Array', 'Int32Array', 'Uint32Array', 'Float32Array',
<add> 'Float64Array'],
<ide> n: [25]
<ide> });
<ide>
<ide><path>benchmark/buffers/buffer-creation.js
<ide> function main(conf) {
<ide> var clazz = conf.type === 'fast' ? Buffer : SlowBuffer;
<ide> bench.start();
<ide> for (var i = 0; i < n * 1024; i++) {
<del> b = new clazz(len);
<add> new clazz(len);
<ide> }
<ide> bench.end(n);
<ide> }
<ide><path>benchmark/buffers/buffer-indexof.js
<ide> 'use strict';
<ide> var common = require('../common.js');
<ide> var fs = require('fs');
<add>const path = require('path');
<ide>
<ide> var bench = common.createBenchmark(main, {
<ide> search: ['@', 'SQ', '10x', '--l', 'Alice', 'Gryphon', 'Panther',
<ide> var bench = common.createBenchmark(main, {
<ide>
<ide> function main(conf) {
<ide> var iter = (conf.iter) * 100000;
<del> var aliceBuffer = fs.readFileSync(__dirname + '/../fixtures/alice.html');
<add> var aliceBuffer = fs.readFileSync(
<add> path.resolve(__dirname, '../fixtures/alice.html')
<add> );
<ide> var search = conf.search;
<ide> var encoding = conf.encoding;
<ide>
<ide><path>benchmark/compare.js
<ide> function run() {
<ide>
<ide> var out = '';
<ide> var child;
<del> if (Array.isArray(benchmarks) && benchmarks.length)
<del> child = spawn(node, ['benchmark/common.js'].concat(benchmarks), { env: env });
<del> else
<add> if (Array.isArray(benchmarks) && benchmarks.length) {
<add> child = spawn(
<add> node,
<add> ['benchmark/common.js'].concat(benchmarks),
<add> { env: env }
<add> );
<add> } else {
<ide> child = spawn('make', [runBench], { env: env });
<add> }
<ide> child.stdout.setEncoding('utf8');
<ide> child.stdout.on('data', function(c) {
<ide> out += c;
<ide> function compare() {
<ide> if (show === 'green' && !g || show === 'red' && !r)
<ide> return;
<ide>
<del> var r0 = util.format('%s%s: %d%s', g, nodes[0], n0.toPrecision(5), g ? reset : '');
<del> var r1 = util.format('%s%s: %d%s', r, nodes[1], n1.toPrecision(5), r ? reset : '');
<add> var r0 = util.format(
<add> '%s%s: %d%s',
<add> g,
<add> nodes[0],
<add> n0.toPrecision(5), g ? reset : ''
<add> );
<add> var r1 = util.format(
<add> '%s%s: %d%s',
<add> r,
<add> nodes[1],
<add> n1.toPrecision(5), r ? reset : ''
<add> );
<ide> pct = c + pct + '%' + reset;
<ide> var l = util.format('%s: %s %s', bench, r0, r1);
<ide> maxLen = Math.max(l.length + pct.length, maxLen);
<ide><path>benchmark/domain/domain-fn-args.js
<ide> function main(conf) {
<ide> for (var i = 0; i < n; i++) {
<ide> if (myArguments.length >= 2) {
<ide> args = Array.prototype.slice.call(myArguments, 1);
<del> ret = fn.apply(this, args);
<add> fn.apply(this, args);
<ide> } else {
<del> ret = fn.call(this);
<add> fn.call(this);
<ide> }
<ide> }
<ide> bdomain.exit();
<ide><path>benchmark/fs/bench-readdir.js
<ide>
<ide> const common = require('../common');
<ide> const fs = require('fs');
<add>const path = require('path');
<ide>
<ide> const bench = common.createBenchmark(main, {
<ide> n: [1e4],
<ide> function main(conf) {
<ide> (function r(cntr) {
<ide> if (--cntr <= 0)
<ide> return bench.end(n);
<del> fs.readdir(__dirname + '/../../lib/', function() {
<add> fs.readdir(path.resolve(__dirname, '../../lib/'), function() {
<ide> r(cntr);
<ide> });
<ide> }(n));
<ide><path>benchmark/fs/bench-readdirSync.js
<ide>
<ide> const common = require('../common');
<ide> const fs = require('fs');
<add>const path = require('path');
<ide>
<ide> const bench = common.createBenchmark(main, {
<ide> n: [1e4],
<ide> function main(conf) {
<ide>
<ide> bench.start();
<ide> for (var i = 0; i < n; i++) {
<del> fs.readdirSync(__dirname + '/../../lib/');
<add> fs.readdirSync(path.resolve(__dirname, '../../lib/'));
<ide> }
<ide> bench.end(n);
<ide> }
<ide><path>benchmark/http_simple_auto.js
<ide> server.listen(port, function() {
<ide> });
<ide>
<ide> function dump_mm_stats() {
<del> if (typeof gc != 'function') return;
<add> if (typeof global.gc != 'function') return;
<ide>
<ide> var before = process.memoryUsage();
<del> for (var i = 0; i < 10; ++i) gc();
<add> for (var i = 0; i < 10; ++i) global.gc();
<ide> var after = process.memoryUsage();
<ide> setTimeout(print_stats, 250); // give GC time to settle
<ide>
<ide><path>benchmark/http_simple_cluster.js
<ide> 'use strict';
<del>var cluster = require('cluster');
<del>var os = require('os');
<add>const cluster = require('cluster');
<add>const os = require('os');
<add>const path = require('path');
<ide>
<ide> if (cluster.isMaster) {
<ide> console.log('master running on pid %d', process.pid);
<ide> for (var i = 0, n = os.cpus().length; i < n; ++i) cluster.fork();
<ide> } else {
<del> require(__dirname + '/http_simple.js');
<add> require(path.join(__dirname, 'http_simple.js'));
<ide> }
<ide><path>benchmark/idle_clients.js
<ide> function connect() {
<ide>
<ide> s.on('close', function() {
<ide> if (gotConnected) connections--;
<del> lastClose = new Date();
<ide> });
<ide>
<ide> s.on('error', function() {
<ide><path>benchmark/misc/freelist.js
<ide> function main(conf) {
<ide>
<ide> bench.start();
<ide>
<del> for (i = 0; i < n; i++){
<add> for (i = 0; i < n; i++) {
<ide> // Return all the items to the pool
<ide> for (j = 0; j < poolSize; j++) {
<ide> list.free(used[j]);
<ide><path>benchmark/misc/v8-bench.js
<ide> global.load = function(filename) {
<ide> global.RegExp = $RegExp;
<ide> };
<ide>
<del>load('run.js');
<add>global.load('run.js');
<ide><path>benchmark/module/module-loader.js
<ide> function main(conf) {
<ide> var n = +conf.thousands * 1e3;
<ide> for (var i = 0; i <= n; i++) {
<ide> fs.mkdirSync(benchmarkDirectory + i);
<del> fs.writeFileSync(benchmarkDirectory + i + '/package.json', '{"main": "index.js"}');
<del> fs.writeFileSync(benchmarkDirectory + i + '/index.js', 'module.exports = "";');
<add> fs.writeFileSync(
<add> benchmarkDirectory + i + '/package.json',
<add> '{"main": "index.js"}'
<add> );
<add> fs.writeFileSync(
<add> benchmarkDirectory + i + '/index.js',
<add> 'module.exports = "";'
<add> );
<ide> }
<ide>
<ide> measure(n);
<ide><path>benchmark/tls/throughput.js
<ide> function main(conf) {
<ide>
<ide> server = tls.createServer(options, onConnection);
<ide> setTimeout(done, dur * 1000);
<add> var conn;
<ide> server.listen(common.PORT, function() {
<ide> var opt = { port: common.PORT, rejectUnauthorized: false };
<del> var conn = tls.connect(opt, function() {
<add> conn = tls.connect(opt, function() {
<ide> bench.start();
<ide> conn.on('drain', write);
<ide> write();
<ide> function main(conf) {
<ide> function done() {
<ide> var mbits = (received * 8) / (1024 * 1024);
<ide> bench.end(mbits);
<del> conn.destroy();
<add> if (conn)
<add> conn.destroy();
<ide> server.close();
<ide> }
<ide> }
| 16
|
Javascript
|
Javascript
|
remove duplicate bindactioncreators test
|
2f196ae67f05d738f6506e950f917dd512412ab5
|
<ide><path>test/bindActionCreators.spec.js
<del>import expect from 'expect';
<del>import { bindActionCreators, createRedux } from '../src';
<del>import * as helpers from './_helpers';
<del>
<del>const { todoActions, todoStore } = helpers;
<del>
<del>describe('Utils', () => {
<del> describe('bindActionCreators', () => {
<del>
<del> let redux;
<del>
<del> beforeEach(() => {
<del> redux = createRedux({ todoStore });
<del> });
<del>
<del> it('should bind given actions to the dispatcher', done => {
<del> let expectedCallCount = 2;
<del> // just for monitoring the dispatched actions
<del> redux.subscribe(() => {
<del> expectedCallCount--;
<del> if (expectedCallCount === 0) {
<del> const state = redux.getState();
<del> expect(state.todoStore).toEqual([
<del> { id: 2, text: 'World' },
<del> { id: 1, text: 'Hello' }
<del> ]);
<del> done();
<del> }
<del> });
<del> const actions = bindActionCreators(todoActions, redux.dispatch);
<del> expect(Object.keys(actions)).toEqual(Object.keys(todoActions));
<del>
<del> actions.addTodo('Hello');
<del> actions.addTodoAsync('World');
<del> });
<del> });
<del>});
<ide><path>test/utils/bindActionCreators.spec.js
<ide> import expect from 'expect';
<ide> import { bindActionCreators, createRedux } from '../../src';
<add>import * as helpers from '../_helpers';
<ide>
<del>const fakeState = { foo: 'bar' };
<del>
<del>function fakeStore(state = 0, action) {
<del> if (action.type) {
<del> return fakeState;
<del> }
<del> return state;
<del>}
<del>
<del>const fakeActionCreators = {
<del> foo() {
<del> return { type: 'FOO' };
<del> },
<del> fooAsync() {
<del> return dispatch => {
<del> setImmediate(() => {
<del> dispatch({ type: 'FOO_ASYNC' });
<del> });
<del> };
<del> }
<del>};
<add>const { todoActions, todoStore } = helpers;
<ide>
<ide> describe('Utils', () => {
<ide> describe('bindActionCreators', () => {
<ide>
<ide> let redux;
<ide>
<ide> beforeEach(() => {
<del> redux = createRedux({ fakeStore });
<add> redux = createRedux({ todoStore });
<ide> });
<ide>
<ide> it('should bind given actions to the dispatcher', done => {
<ide> let expectedCallCount = 2;
<del> // Let us subscribe to monitor the dispatched actions
<add> // just for monitoring the dispatched actions
<ide> redux.subscribe(() => {
<ide> expectedCallCount--;
<del> const state = redux.getState();
<del>
<del> expect(state.fakeStore).toEqual(fakeState);
<del> if (expectedCallCount === 0) { done(); }
<add> if (expectedCallCount === 0) {
<add> const state = redux.getState();
<add> expect(state.todoStore).toEqual([
<add> { id: 2, text: 'World' },
<add> { id: 1, text: 'Hello' }
<add> ]);
<add> done();
<add> }
<ide> });
<del> const actions = bindActionCreators(fakeActionCreators, redux.dispatch);
<del> expect(Object.keys(actions))
<del> .toEqual(Object.keys(fakeActionCreators));
<add> const actions = bindActionCreators(todoActions, redux.dispatch);
<add> expect(Object.keys(actions)).toEqual(Object.keys(todoActions));
<ide>
<del> actions.foo();
<del> actions.fooAsync();
<add> actions.addTodo('Hello');
<add> actions.addTodoAsync('World');
<ide> });
<ide> });
<ide> });
| 2
|
Python
|
Python
|
fix failing doctests
|
b570fbbc488b4b9480fdf0f5df9fe7369af65d7c
|
<ide><path>numpy/core/fromnumeric.py
<ide> def prod(a, axis=None, dtype=None, out=None):
<ide>
<ide> >>> x = np.array([1, 2, 3], dtype=np.uint8)
<ide> >>> np.prod(x).dtype == np.uint
<add> True
<ide>
<ide> If `x` is of a signed integer type, then the output type
<ide> is the default platform integer:
<ide>
<ide> >>> x = np.array([1, 2, 3], dtype=np.int8)
<ide> >>> np.prod(x).dtype == np.int
<add> True
<ide>
<ide> """
<ide> try:
<ide><path>numpy/core/numeric.py
<ide> def tensordot(a, b, axes=2):
<ide> >>> b = np.arange(24.).reshape(4,3,2)
<ide> >>> c = np.tensordot(a,b, axes=([1,0],[0,1]))
<ide> >>> c.shape
<del> (5,2)
<add> (5, 2)
<ide> >>> c
<ide> array([[ 4400., 4730.],
<ide> [ 4532., 4874.],
<ide> def tensordot(a, b, axes=2):
<ide> [ 4928., 5306.]])
<ide>
<ide> >>> # A slower but equivalent way of computing the same...
<del> >>> c = zeros((5,2))
<add> >>> c = np.zeros((5,2))
<ide> >>> for i in range(5):
<ide> ... for j in range(2):
<ide> ... for k in range(3):
| 2
|
Python
|
Python
|
implement deutsch-jozsa algorithm in qiskit
|
beb2c35dd864f093c49b43934064c5da7155859c
|
<ide><path>quantum/deutsch_jozsa.py
<add>#!/usr/bin/env python3
<add>"""
<add>Deutsch-Josza Algorithm is one of the first examples of a quantum
<add>algorithm that is exponentially faster than any possible deterministic
<add>classical algorithm
<add>
<add>Premise:
<add>We are given a hidden Boolean function f,
<add>which takes as input a string of bits, and returns either 0 or 1:
<add>
<add>f({x0,x1,x2,...}) -> 0 or 1, where xn is 0 or 1
<add>
<add>The property of the given Boolean function is that it is guaranteed to
<add>either be balanced or constant. A constant function returns all 0's
<add>or all 1's for any input, while a balanced function returns 0's for
<add>exactly half of all inputs and 1's for the other half. Our task is to
<add>determine whether the given function is balanced or constant.
<add>
<add>References:
<add>- https://en.wikipedia.org/wiki/Deutsch-Jozsa_algorithm
<add>- https://qiskit.org/textbook/ch-algorithms/deutsch-jozsa.html
<add>"""
<add>
<add>import numpy as np
<add>import qiskit as q
<add>
<add>
<add>def dj_oracle(case: str, num_qubits: int) -> q.QuantumCircuit:
<add> """
<add> Returns a Quantum Circuit for the Oracle function.
<add> The circuit returned can represent balanced or constant function,
<add> according to the arguments passed
<add> """
<add> # This circuit has num_qubits+1 qubits: the size of the input,
<add> # plus one output qubit
<add> oracle_qc = q.QuantumCircuit(num_qubits + 1)
<add>
<add> # First, let's deal with the case in which oracle is balanced
<add> if case == "balanced":
<add> # First generate a random number that tells us which CNOTs to
<add> # wrap in X-gates:
<add> b = np.random.randint(1, 2 ** num_qubits)
<add> # Next, format 'b' as a binary string of length 'n', padded with zeros:
<add> b_str = format(b, f"0{num_qubits}b")
<add> # Next, we place the first X-gates. Each digit in our binary string
<add> # correspopnds to a qubit, if the digit is 0, we do nothing, if it's 1
<add> # we apply an X-gate to that qubit:
<add> for index, bit in enumerate(b_str):
<add> if bit == "1":
<add> oracle_qc.x(index)
<add> # Do the controlled-NOT gates for each qubit, using the output qubit
<add> # as the target:
<add> for index in range(num_qubits):
<add> oracle_qc.cx(index, num_qubits)
<add> # Next, place the final X-gates
<add> for index, bit in enumerate(b_str):
<add> if bit == "1":
<add> oracle_qc.x(index)
<add>
<add> # Case in which oracle is constant
<add> if case == "constant":
<add> # First decide what the fixed output of the oracle will be
<add> # (either always 0 or always 1)
<add> output = np.random.randint(2)
<add> if output == 1:
<add> oracle_qc.x(num_qubits)
<add>
<add> oracle_gate = oracle_qc.to_gate()
<add> oracle_gate.name = "Oracle" # To show when we display the circuit
<add> return oracle_gate
<add>
<add>
<add>def dj_algorithm(oracle: q.QuantumCircuit, num_qubits: int) -> q.QuantumCircuit:
<add> """
<add> Returns the complete Deustch-Jozsa Quantum Circuit,
<add> adding Input & Output registers and Hadamard & Measurement Gates,
<add> to the Oracle Circuit passed in arguments
<add> """
<add> dj_circuit = q.QuantumCircuit(num_qubits + 1, num_qubits)
<add> # Set up the output qubit:
<add> dj_circuit.x(num_qubits)
<add> dj_circuit.h(num_qubits)
<add> # And set up the input register:
<add> for qubit in range(num_qubits):
<add> dj_circuit.h(qubit)
<add> # Let's append the oracle gate to our circuit:
<add> dj_circuit.append(oracle, range(num_qubits + 1))
<add> # Finally, perform the H-gates again and measure:
<add> for qubit in range(num_qubits):
<add> dj_circuit.h(qubit)
<add>
<add> for i in range(num_qubits):
<add> dj_circuit.measure(i, i)
<add>
<add> return dj_circuit
<add>
<add>
<add>def deutsch_jozsa(case: str, num_qubits: int) -> q.result.counts.Counts:
<add> """
<add> Main function that builds the circuit using other helper functions,
<add> runs the experiment 1000 times & returns the resultant qubit counts
<add> >>> deutsch_jozsa("constant", 3)
<add> {'000': 1000}
<add> >>> deutsch_jozsa("balanced", 3)
<add> {'111': 1000}
<add> """
<add> # Use Aer's qasm_simulator
<add> simulator = q.Aer.get_backend("qasm_simulator")
<add>
<add> oracle_gate = dj_oracle(case, num_qubits)
<add> dj_circuit = dj_algorithm(oracle_gate, num_qubits)
<add>
<add> # Execute the circuit on the qasm simulator
<add> job = q.execute(dj_circuit, simulator, shots=1000)
<add>
<add> # Return the histogram data of the results of the experiment.
<add> return job.result().get_counts(dj_circuit)
<add>
<add>
<add>if __name__ == "__main__":
<add> print(f"Deutsch Jozsa - Constant Oracle: {deutsch_jozsa('constant', 3)}")
<add> print(f"Deutsch Jozsa - Balanced Oracle: {deutsch_jozsa('balanced', 3)}")
| 1
|
Python
|
Python
|
fix t5 beam search using parallelize
|
bd3b599c12cfcf5ef517c5ffe526afbdbaa92539
|
<ide><path>src/transformers/models/t5/modeling_t5.py
<ide> def _reorder_cache(self, past, beam_idx):
<ide> for layer_past_state in layer_past_states:
<ide> # need to set correct `past` for each of the four key / value states
<ide> reordered_layer_past_states = reordered_layer_past_states + (
<del> layer_past_state.index_select(0, beam_idx),
<add> layer_past_state.index_select(0, beam_idx.to(layer_past_state.device)),
<ide> )
<ide>
<ide> assert reordered_layer_past_states[0].shape == layer_past_states[0].shape
| 1
|
Text
|
Text
|
add content into activity section
|
eb150cedbc74e6c6585545b32a1d398796803caf
|
<ide><path>guide/english/android-development/core-components/index.md
<ide> An activity facilitates the following key interactions between system and app:
<ide> - Keeping track of what the user currently cares about (what is on screen) to ensure that the system keeps running the process that is hosting the activity.
<ide> - Knowing that previously used processes contain things the user may return to (stopped activities), and thus more highly prioritize keeping those processes around.
<ide> - Helping the app handle having its process killed so the user can return to activities with their previous state restored.
<del>
<add>- Providing a way for apps to implement user flows between each other, and for the system to coordinate these flows. (The most classic example here being share.)
<ide>
<ide> #### [Activity Lifecycle](https://developer.android.com/guide/components/activities/activity-lifecycle)
<ide> 
| 1
|
Javascript
|
Javascript
|
add flow libdefs for hermesinternaltype
|
ff4b33672a6a27d2ed68ae6602e9d29d9b7c3eb1
|
<ide><path>Libraries/Core/polyfillPromise.js
<ide> if (global?.HermesInternal?.hasPromise?.()) {
<ide> if (typeof HermesPromise !== 'function') {
<ide> console.error('HermesPromise does not exist');
<ide> }
<del> global.HermesInternal.enablePromiseRejectionTracker(
<add> global.HermesInternal?.enablePromiseRejectionTracker?.(
<ide> require('../promiseRejectionTrackingOptions').default,
<ide> );
<ide> }
<ide><path>Libraries/Core/setUpTimers.js
<ide> if (__DEV__) {
<ide>
<ide> // Currently, Hermes `Promise` is implemented via Internal Bytecode.
<ide> const hasHermesPromiseQueuedToJSVM =
<del> global?.HermesInternal?.hasPromise?.() &&
<del> global?.HermesInternal?.useEngineQueue?.();
<add> global.HermesInternal?.hasPromise?.() === true &&
<add> global.HermesInternal?.useEngineQueue?.() === true;
<ide>
<ide> const hasNativePromise = isNativeFunction(Promise);
<ide> const hasPromiseQueuedToJSVM = hasNativePromise || hasHermesPromiseQueuedToJSVM;
<ide> if (hasPromiseQueuedToJSVM) {
<ide> */
<ide> if (hasHermesPromiseQueuedToJSVM) {
<ide> // Fast path for Hermes.
<del> polyfillGlobal('queueMicrotask', () => global.HermesInternal.enqueueJob);
<add> polyfillGlobal('queueMicrotask', () => global.HermesInternal?.enqueueJob);
<ide> } else {
<ide> // Polyfill it with promise (regardless it's polyfiled or native) otherwise.
<ide> polyfillGlobal(
<ide><path>flow/HermesInternalType.js
<add>/**
<add> * Copyright (c) Facebook, Inc. and its affiliates.
<add> *
<add> * This source code is licensed under the MIT license found in the
<add> * LICENSE file in the root directory of this source tree.
<add> *
<add> * @flow strict
<add> * @format
<add> */
<add>
<add>// Declarations for functionality exposed by the Hermes VM.
<add>//
<add>// For backwards-compatibility, code that uses such functionality must also
<add>// check explicitly at run-time whether the object(s) and method(s) exist, and
<add>// fail safely if not.
<add>
<add>/**
<add> * HermesInternalType is an object containing functions used to interact with
<add> * the VM in a way that is not standardized by the JS spec.
<add> * There are limited guarantees about these functions, and they should not be
<add> * widely used. Consult with the Hermes team before using any of these.
<add> * There may be other visible properties on this object; however, those are
<add> * only exposed for testing purposes: do not use them.
<add> */
<add>declare type $HermesInternalType = {
<add> // All members are optional because they may not exist when OTA'd to older
<add> // VMs.
<add>
<add> +getNumGCs?: () => number,
<add> +getGCTime?: () => number,
<add> +getNativeCallTime?: () => number,
<add> +getNativeCallCount?: () => number,
<add> +getGCCPUTime?: () => number,
<add>
<add> /**
<add> * Hermes can embed an "epilogue" to the bytecode file with arbitrary bytes.
<add> * At most one epilogue will exist per bytecode module (which can be
<add> * different than a JS module).
<add> * Calling this function will return all such epilogues and convert the
<add> * bytes to numbers in the range of 0-255.
<add> */
<add> +getEpilogues?: () => Array<Array<number>>,
<add>
<add> /**
<add> * Query the VM for various statistics about performance.
<add> * There are no guarantees about what keys exist in it, but they can be
<add> * printed for informational purposes.
<add> * @return An object that maps strings to various types of performance
<add> * statistics.
<add> */
<add> +getInstrumentedStats?: () => {[string]: number | string, ...},
<add>
<add> /**
<add> * Query the VM for any sort of runtime properties that it wants to report.
<add> * There are no guarantees about what keys exist in it, but they can be
<add> * printed for informational purposes.
<add> * @return An object that maps strings to various types of runtime properties.
<add> */
<add> +getRuntimeProperties?: () => {
<add> 'OSS Release Version': string,
<add> 'Build': string,
<add> [string]: mixed,
<add> },
<add>
<add> /**
<add> * Tell Hermes that at this point the surface has transitioned from TTI to
<add> * post-TTI. The VM can change some of its internal behavior to optimize for
<add> * post-TTI scenarios.
<add> * This can be called several times but will have no effect after the first
<add> * call.
<add> */
<add> +ttiReached?: () => void,
<add>
<add> /**
<add> * Tell Hermes that at this point the surface has transitioned from TTRC to
<add> * post-TTRC. The VM can change some of its internal behavior to optimize for
<add> * post-TTRC scenarios.
<add> * This can be called several times but will have no effect after the first
<add> * call.
<add> */
<add> +ttrcReached?: () => void,
<add>
<add> /**
<add> * Query the VM to see whether or not it enabled Promise.
<add> */
<add> +hasPromise?: () => boolean,
<add>
<add> /**
<add> * Enable promise rejection tracking with the given options.
<add> * The API mirrored the `promise` npm package, therefore it's typed same as
<add> * the `enable` function of module `promise/setimmediate/rejection-tracking`
<add> * declared in ./flow-typed/npm/promise_v8.x.x.js.
<add> */
<add> +enablePromiseRejectionTracker?: (
<add> options: ?{
<add> whitelist?: ?Array<mixed>,
<add> allRejections?: ?boolean,
<add> onUnhandled?: ?(number, mixed) => void,
<add> onHandled?: ?(number, mixed) => void,
<add> },
<add> ) => void,
<add>
<add> /**
<add> * Query the VM to see whether or not it use the engine Job queue.
<add> */
<add> +useEngineQueue?: () => boolean,
<add>
<add> /**
<add> * Enqueue a JavaScript callback function as a Job into the engine Job queue.
<add> */
<add> +enqueueJob?: <TArguments: Array<mixed>>(
<add> jobCallback: (...args: TArguments) => mixed,
<add> ) => void,
<add>};
<ide><path>flow/global.js
<ide> * writeability (`+`) when defining types.
<ide> */
<ide> declare var global: {
<add> +HermesInternal: ?$HermesInternalType,
<add>
<ide> // Undeclared properties are implicitly `any`.
<ide> [string | symbol]: any,
<ide> };
| 4
|
Javascript
|
Javascript
|
remove unnecessary event handlers
|
973b8e0b151b2a54ef85001030374db6fce3e3cf
|
<ide><path>lib/internal/http2/compat.js
<ide> function onStreamDrain() {
<ide> response.emit('drain');
<ide> }
<ide>
<del>// TODO Http2Stream does not emit 'close'
<del>function onStreamClosedRequest() {
<del> const request = this[kRequest];
<del> if (request !== undefined)
<del> request.push(null);
<del>}
<del>
<del>// TODO Http2Stream does not emit 'close'
<del>function onStreamClosedResponse() {
<del> const response = this[kResponse];
<del> if (response !== undefined)
<del> response.emit('finish');
<del>}
<del>
<ide> function onStreamAbortedRequest() {
<ide> const request = this[kRequest];
<ide> if (request !== undefined && request[kState].closed === false) {
<ide> class Http2ServerRequest extends Readable {
<ide> stream.on('trailers', onStreamTrailers);
<ide> stream.on('end', onStreamEnd);
<ide> stream.on('error', onStreamError);
<del> stream.on('close', onStreamClosedRequest);
<ide> stream.on('aborted', onStreamAbortedRequest);
<ide> const onfinish = this[kFinish].bind(this);
<ide> stream.on('close', onfinish);
<ide> class Http2ServerResponse extends Stream {
<ide> stream[kResponse] = this;
<ide> this.writable = true;
<ide> stream.on('drain', onStreamDrain);
<del> stream.on('close', onStreamClosedResponse);
<ide> stream.on('aborted', onStreamAbortedResponse);
<ide> const onfinish = this[kFinish].bind(this);
<ide> stream.on('close', onfinish);
| 1
|
Python
|
Python
|
use normalize_axis_index in cross
|
09d4d35bb979dd0d5d0781946f59f362765eca66
|
<ide><path>numpy/core/numeric.py
<ide> def cross(a, b, axisa=-1, axisb=-1, axisc=-1, axis=None):
<ide> a = asarray(a)
<ide> b = asarray(b)
<ide> # Check axisa and axisb are within bounds
<del> axis_msg = "'axis{0}' out of bounds"
<del> if axisa < -a.ndim or axisa >= a.ndim:
<del> raise ValueError(axis_msg.format('a'))
<del> if axisb < -b.ndim or axisb >= b.ndim:
<del> raise ValueError(axis_msg.format('b'))
<add> axisa = normalize_axis_index(axisa, a.ndim, msg_prefix='axisa')
<add> axisb = normalize_axis_index(axisb, b.ndim, msg_prefix='axisb')
<add>
<ide> # Move working axis to the end of the shape
<ide> a = rollaxis(a, axisa, a.ndim)
<ide> b = rollaxis(b, axisb, b.ndim)
<ide> def cross(a, b, axisa=-1, axisb=-1, axisc=-1, axis=None):
<ide> if a.shape[-1] == 3 or b.shape[-1] == 3:
<ide> shape += (3,)
<ide> # Check axisc is within bounds
<del> if axisc < -len(shape) or axisc >= len(shape):
<del> raise ValueError(axis_msg.format('c'))
<add> axisc = normalize_axis_index(axisc, len(shape), msg_prefix='axisc')
<ide> dtype = promote_types(a.dtype, b.dtype)
<ide> cp = empty(shape, dtype)
<ide>
<ide><path>numpy/core/tests/test_numeric.py
<ide> def test_broadcasting_shapes(self):
<ide> u = np.ones((10, 3, 5))
<ide> v = np.ones((2, 5))
<ide> assert_equal(np.cross(u, v, axisa=1, axisb=0).shape, (10, 5, 3))
<del> assert_raises(ValueError, np.cross, u, v, axisa=1, axisb=2)
<del> assert_raises(ValueError, np.cross, u, v, axisa=3, axisb=0)
<add> assert_raises(np.AxisError, np.cross, u, v, axisa=1, axisb=2)
<add> assert_raises(np.AxisError, np.cross, u, v, axisa=3, axisb=0)
<ide> u = np.ones((10, 3, 5, 7))
<ide> v = np.ones((5, 7, 2))
<ide> assert_equal(np.cross(u, v, axisa=1, axisc=2).shape, (10, 5, 3, 7))
<del> assert_raises(ValueError, np.cross, u, v, axisa=-5, axisb=2)
<del> assert_raises(ValueError, np.cross, u, v, axisa=1, axisb=-4)
<add> assert_raises(np.AxisError, np.cross, u, v, axisa=-5, axisb=2)
<add> assert_raises(np.AxisError, np.cross, u, v, axisa=1, axisb=-4)
<ide> # gh-5885
<ide> u = np.ones((3, 4, 2))
<ide> for axisc in range(-2, 2):
| 2
|
PHP
|
PHP
|
replace function is_integer (alias) by is_int
|
9b6b258fede89ec09275234270fe6ee0e2d61af1
|
<ide><path>cake/libs/controller/components/cookie.php
<ide> function __expire($expires = null) {
<ide> return $this->__expires;
<ide> }
<ide> $this->__reset = $this->__expires;
<del> if (is_integer($expires) || is_numeric($expires)) {
<add> if (is_int($expires) || is_numeric($expires)) {
<ide> return $this->__expires = $now + intval($expires);
<ide> }
<ide> return $this->__expires = strtotime($expires, $now);
<ide><path>cake/libs/view/helpers/time.php
<ide> function fromString($dateString, $userOffset = null) {
<ide> if (empty($dateString)) {
<ide> return false;
<ide> }
<del> if (is_integer($dateString) || is_numeric($dateString)) {
<add> if (is_int($dateString) || is_numeric($dateString)) {
<ide> $date = intval($dateString);
<ide> } else {
<ide> $date = strtotime($dateString);
| 2
|
Ruby
|
Ruby
|
add annotations to image index
|
c193cb984847f4e5583d44396016cc66075e34df
|
<ide><path>Library/Homebrew/dev-cmd/pr-upload.rb
<ide> def pr_upload
<ide> Upload bottles described by these JSON files to #{service}:
<ide> #{json_files.join("\n ")}
<ide> EOS
<del> return
<add> return unless github_packages?(bottles_hash)
<ide> end
<ide>
<ide> check_bottled_formulae(bottles_hash)
<ide> def pr_upload
<ide> elsif github_packages?(bottles_hash)
<ide> github_org = args.github_org || "homebrew"
<ide> github_packages = GitHubPackages.new(org: github_org)
<del> github_packages.upload_bottles(bottles_hash)
<add> github_packages.upload_bottles(bottles_hash, dry_run: args.dry_run?)
<ide> else
<ide> odie "Service specified by root_url is not recognized"
<ide> end
<ide><path>Library/Homebrew/github_packages.rb
<ide> class GitHubPackages
<ide>
<ide> URL_DOMAIN = "ghcr.io"
<ide> URL_PREFIX = "https://#{URL_DOMAIN}/v2/"
<del> URL_REGEX = %r{#{Regexp.escape(URL_PREFIX)}([\w-]+)/([\w-]+)}.freeze
<add> DOCKER_PREFIX = "docker://#{URL_DOMAIN}/"
<add> URL_REGEX = %r{(?:#{Regexp.escape(URL_PREFIX)}|#{Regexp.escape(DOCKER_PREFIX)})([\w-]+)/([\w-]+)}.freeze
<ide>
<ide> sig { returns(String) }
<ide> def inspect
<ide> def initialize(org: "homebrew")
<ide> ENV["HOMEBREW_FORCE_HOMEBREW_ON_LINUX"] = "1" if @github_org == "homebrew" && !OS.mac?
<ide> end
<ide>
<del> sig { params(bottles_hash: T::Hash[String, T.untyped]).void }
<del> def upload_bottles(bottles_hash)
<add> sig { params(bottles_hash: T::Hash[String, T.untyped], dry_run: T::Boolean).void }
<add> def upload_bottles(bottles_hash, dry_run:)
<ide> user = Homebrew::EnvConfig.github_packages_user
<ide> token = Homebrew::EnvConfig.github_packages_token
<ide>
<ide> def upload_bottles(bottles_hash)
<ide> load_schemas!
<ide>
<ide> bottles_hash.each_value do |bottle_hash|
<del> upload_bottle(user, token, skopeo, bottle_hash)
<add> upload_bottle(user, token, skopeo, bottle_hash, dry_run: dry_run)
<ide> end
<ide> end
<ide>
<ide> def validate_schema!(schema_uri, json)
<ide> exit 1
<ide> end
<ide>
<del> def upload_bottle(user, token, skopeo, bottle_hash)
<add> def upload_bottle(user, token, skopeo, bottle_hash, dry_run:)
<ide> formula_path = HOMEBREW_REPOSITORY/bottle_hash["formula"]["path"]
<ide> formula = Formulary.factory(formula_path)
<ide> formula_name = formula.name
<ide> def upload_bottle(user, token, skopeo, bottle_hash)
<ide> ".#{rebuild}"
<ide> end
<ide> version_rebuild = "#{version}#{rebuild}"
<del> root = Pathname("#{formula_name}-#{version_rebuild}")
<add> root = Pathname("#{formula_name}--#{version_rebuild}")
<ide> FileUtils.rm_rf root
<ide>
<ide> write_image_layout(root)
<ide> def upload_bottle(user, token, skopeo, bottle_hash)
<ide> git_revision = formula.tap.git_head
<ide> git_path = formula_path.to_s.delete_prefix("#{formula.tap.path}/")
<ide> source = "https://github.com/#{org}/#{repo}/blob/#{git_revision}/#{git_path}"
<add> documentation = if formula.tap.core_tap?
<add> "https://formulae.brew.sh/formula/#{formula_name}"
<add> else
<add> formula.tap.remote
<add> end
<ide>
<ide> formula_annotations_hash = {
<del> "org.opencontainers.image.created" => Time.now.strftime("%F"),
<del> "org.opencontainers.image.description" => formula.desc,
<del> "org.opencontainers.image.license" => formula.license,
<del> "org.opencontainers.image.revision" => git_revision,
<del> "org.opencontainers.image.source" => source,
<del> "org.opencontainers.image.url" => formula.homepage,
<del> "org.opencontainers.image.vendor" => org,
<del> "org.opencontainers.image.version" => version,
<add> "org.opencontainers.image.created" => Time.now.strftime("%F"),
<add> "org.opencontainers.image.description" => formula.desc,
<add> "org.opencontainers.image.documentation" => documentation,
<add> "org.opencontainers.image.license" => formula.license,
<add> "org.opencontainers.image.ref.name" => version_rebuild,
<add> "org.opencontainers.image.revision" => git_revision,
<add> "org.opencontainers.image.source" => source,
<add> "org.opencontainers.image.title" => formula.full_name,
<add> "org.opencontainers.image.url" => formula.homepage,
<add> "org.opencontainers.image.vendor" => org,
<add> "org.opencontainers.image.version" => version,
<ide> }
<ide> formula_annotations_hash.each do |key, value|
<ide> formula_annotations_hash.delete(key) if value.blank?
<ide> end
<ide>
<add> created_times = []
<ide> manifests = bottle_hash["bottle"]["tags"].map do |bottle_tag, tag_hash|
<ide> local_file = tag_hash["local_filename"]
<ide> odebug "Uploading #{local_file}"
<ide> def upload_bottle(user, token, skopeo, bottle_hash)
<ide>
<ide> created_time = tab.source_modified_time
<ide> created_time ||= Time.now
<add> created_times << created_time
<ide> documentation = "https://formulae.brew.sh/#{formulae_dir}/#{formula_name}" if formula.tap.core_tap?
<ide> tag = "#{version}.#{bottle_tag}#{rebuild}"
<ide> title = "#{formula.full_name} #{tag}"
<ide> def upload_bottle(user, token, skopeo, bottle_hash)
<ide> package_name = "#{repo.delete_prefix("homebrew-")}/#{formula_name}"
<ide> image_tag = "#{org_prefix}/#{package_name}:#{version_rebuild}"
<ide> puts
<del> system_command!(skopeo, verbose: true, print_stdout: true, args: [
<del> "copy", "--all", "--dest-creds=#{user}:#{token}",
<del> "oci:#{root}", "docker://#{image_tag}"
<del> ])
<del> ohai "Uploaded to https://github.com/orgs/Homebrew/packages/container/package/#{package_name}"
<add> args = ["copy", "--all", "oci:#{root}", image_tag.to_s]
<add> if dry_run
<add> puts "#{skopeo} #{args.join(" ")} --dest-creds=#{user}:$HOMEBREW_GITHUB_PACKAGES_TOKEN"
<add> else
<add> args << "--dest-creds=#{user}:#{token}"
<add> system_command!(skopeo, verbose: true, print_stdout: true, args: args)
<add> ohai "Uploaded to https://github.com/orgs/Homebrew/packages/container/package/#{package_name}"
<add> end
<ide> end
<ide>
<ide> def write_image_layout(root)
| 2
|
Text
|
Text
|
add the text "intermediary"
|
43e6147f7175d2307095699f3679edad0c528eda
|
<ide><path>guide/english/blockchain/cryptocurrency/index.md
<ide> title: Cryptocurrency
<ide>
<ide> Cryptocurrency is a subset of digital currency, which acts as a medium of exchange amongst two parties. It is known as crypto-currency because of the utilization of cryptography to strictly assure the security of transactions taking place on the network. There are various cryptocurrencies including Litecoin, Dash, Ethereum, Ripple, and the currently most popular currency - Bitcoin.
<ide>
<del>The original purpose of cryptocurrency was to remove the intervention of middlemen or third-parties in order to make transactions swift and secure, with anonymity maintained for all users operating through their respective systems (called 'nodes') on a network.
<add>The original purpose of cryptocurrency was to remove the intervention of an intermediary (middleman) or third-parties in order to make transactions swift and secure, with anonymity maintained for all users operating through their respective systems (called 'nodes') on a network.
<ide>
<ide> There are hundreds to thousands of cryptocurrencies as of 2018 with new ones coming out on a regularly. Most will never take off. This form of peer-to-peer value exchange is in its infancy still; Bitcoin and Cryptocurrencies have yet to become widely used. Venezuela has made history by becoming the first nation to introduce a national cryptocurrency in a time of national emergency with a goal of circumventing US sanctions and access to international financing.
<ide>
| 1
|
Javascript
|
Javascript
|
hold listener during text composition
|
a4e6d962d78b26f5112d48c4f88c1e6234d0cae7
|
<ide><path>src/ng/directive/input.js
<ide> var inputType = {
<ide>
<ide>
<ide> function textInputType(scope, element, attr, ctrl, $sniffer, $browser) {
<add> // In composition mode, users are still inputing intermediate text buffer,
<add> // hold the listener until composition is done.
<add> // More about composition events: https://developer.mozilla.org/en-US/docs/Web/API/CompositionEvent
<add> var composing = false;
<add>
<add> element.on('compositionstart', function() {
<add> composing = true;
<add> });
<add>
<add> element.on('compositionend', function() {
<add> composing = false;
<add> });
<ide>
<ide> var listener = function() {
<add> if (composing) return;
<ide> var value = element.val();
<ide>
<ide> // By default we will trim the value
<ide><path>test/ng/directive/inputSpec.js
<ide> describe('input', function() {
<ide> expect(scope.name).toEqual('adam');
<ide> });
<ide>
<add> it('should not update the model between "compositionstart" and "compositionend"', function() {
<add> compileInput('<input type="text" ng-model="name" name="alias"" />');
<add> changeInputValueTo('a');
<add> expect(scope.name).toEqual('a');
<add> if (!(msie < 9)) {
<add> browserTrigger(inputElm, 'compositionstart');
<add> changeInputValueTo('adam');
<add> expect(scope.name).toEqual('a');
<add> browserTrigger(inputElm, 'compositionend');
<add> }
<add> changeInputValueTo('adam');
<add> expect(scope.name).toEqual('adam');
<add> });
<add>
<ide> describe('"paste" and "cut" events', function() {
<ide> beforeEach(function() {
<ide> // Force browser to report a lack of an 'input' event
| 2
|
Javascript
|
Javascript
|
handle classname properly on svg nodes
|
37f61c479e3d79eebff6049b0e980ff6d82b4ec2
|
<ide><path>src/browser/ui/dom/HTMLDOMPropertyConfig.js
<ide> "use strict";
<ide>
<ide> var DOMProperty = require('DOMProperty');
<add>var ExecutionEnvironment = require('ExecutionEnvironment');
<ide>
<ide> var MUST_USE_ATTRIBUTE = DOMProperty.injection.MUST_USE_ATTRIBUTE;
<ide> var MUST_USE_PROPERTY = DOMProperty.injection.MUST_USE_PROPERTY;
<ide> var HAS_POSITIVE_NUMERIC_VALUE =
<ide> var HAS_OVERLOADED_BOOLEAN_VALUE =
<ide> DOMProperty.injection.HAS_OVERLOADED_BOOLEAN_VALUE;
<ide>
<add>var hasSVG;
<add>if (ExecutionEnvironment.canUseDOM) {
<add> var implementation = document.implementation;
<add> hasSVG = (
<add> implementation &&
<add> implementation.hasFeature &&
<add> implementation.hasFeature(
<add> 'http://www.w3.org/TR/SVG11/feature#BasicStructure',
<add> '1.1'
<add> )
<add> );
<add>}
<add>
<add>
<ide> var HTMLDOMPropertyConfig = {
<ide> isCustomAttribute: RegExp.prototype.test.bind(
<ide> /^(data|aria)-[a-z_][a-z\d_.\-]*$/
<ide> var HTMLDOMPropertyConfig = {
<ide> cellSpacing: null,
<ide> charSet: MUST_USE_ATTRIBUTE,
<ide> checked: MUST_USE_PROPERTY | HAS_BOOLEAN_VALUE,
<del> className: MUST_USE_PROPERTY,
<add> // To set className on SVG elements, it's necessary to use .setAttribute;
<add> // this works on HTML elements too in all browsers except IE8. Conveniently,
<add> // IE8 doesn't support SVG and so we can simply use the attribute in
<add> // browsers that support SVG and the property in browsers that don't,
<add> // regardless of whether the element is HTML or SVG.
<add> className: hasSVG ? MUST_USE_ATTRIBUTE : MUST_USE_PROPERTY,
<ide> cols: MUST_USE_ATTRIBUTE | HAS_POSITIVE_NUMERIC_VALUE,
<ide> colSpan: null,
<ide> content: null,
| 1
|
Ruby
|
Ruby
|
require arel master in the benchmark template
|
0267019a55540bcc13952cb720c7869a76168014
|
<ide><path>guides/bug_report_templates/benchmark.rb
<ide> gemfile(true) do
<ide> source "https://rubygems.org"
<ide> gem "rails", github: "rails/rails"
<add> gem "arel", github: "rails/arel"
<ide> gem "benchmark-ips"
<ide> end
<ide>
| 1
|
Javascript
|
Javascript
|
restore correct logic
|
a5b456edff358092fdcf4ac6460ade3bec183352
|
<ide><path>src/data.js
<ide> Data.prototype = {
<ide> hasData: function( owner ) {
<ide> var index = Data.index( this.owners, owner );
<ide>
<del> return index !== -1 && jQuery.isEmptyObject( this.cache[ index ] );
<add> return index !== -1 && !jQuery.isEmptyObject( this.cache[ index ] );
<ide> },
<ide> discard: function( owner ) {
<ide> var index = Data.index( this.owners, owner );
| 1
|
Go
|
Go
|
remove unneeded time.duration()
|
4a3b36f44309ff8e650be2cff74f3ec436353298
|
<ide><path>registry/registry.go
<ide> func pingRegistryEndpoint(endpoint string) (RegistryInfo, error) {
<ide> }
<ide> httpDial := func(proto string, addr string) (net.Conn, error) {
<ide> // Set the connect timeout to 5 seconds
<del> conn, err := net.DialTimeout(proto, addr, time.Duration(5)*time.Second)
<add> conn, err := net.DialTimeout(proto, addr, 5*time.Second)
<ide> if err != nil {
<ide> return nil, err
<ide> }
<ide> // Set the recv timeout to 10 seconds
<del> conn.SetDeadline(time.Now().Add(time.Duration(10) * time.Second))
<add> conn.SetDeadline(time.Now().Add(10 * time.Second))
<ide> return conn, nil
<ide> }
<ide> httpTransport := &http.Transport{
<ide> func NewRegistry(authConfig *AuthConfig, factory *utils.HTTPRequestFactory, inde
<ide> if err != nil {
<ide> return nil, err
<ide> }
<del> conn = utils.NewTimeoutConn(conn, time.Duration(1)*time.Minute)
<add> conn = utils.NewTimeoutConn(conn, 1*time.Minute)
<ide> return conn, nil
<ide> }
<ide> }
| 1
|
Javascript
|
Javascript
|
simplify the readonly properties of icu
|
83a5eef6f2f7a7dc12f408ac23869392c11f2454
|
<ide><path>lib/internal/bootstrap_node.js
<ide> // of possible types.
<ide> const versionTypes = icu.getVersion().split(',');
<ide>
<del> function makeGetter(name) {
<del> return () => {
<del> // With an argument, getVersion(type) returns
<del> // the actual version string.
<del> const version = icu.getVersion(name);
<del> // Replace the current getter with a new property.
<del> delete process.versions[name];
<del> Object.defineProperty(process.versions, name, {
<del> value: version,
<del> writable: false,
<del> enumerable: true
<del> });
<del> return version;
<del> };
<del> }
<del>
<ide> for (var n = 0; n < versionTypes.length; n++) {
<ide> var name = versionTypes[n];
<add> const version = icu.getVersion(name);
<ide> Object.defineProperty(process.versions, name, {
<del> configurable: true,
<add> writable: false,
<ide> enumerable: true,
<del> get: makeGetter(name)
<add> value: version
<ide> });
<ide> }
<ide> }
<ide><path>test/parallel/test-process-versions.js
<ide> assert(commonTemplate.test(process.versions.zlib));
<ide> assert(/^\d+\.\d+\.\d+(?:\.\d+)?(?: \(candidate\))?$/
<ide> .test(process.versions.v8));
<ide> assert(/^\d+$/.test(process.versions.modules));
<add>
<add>for (let i = 0; i < expected_keys.length; i++) {
<add> const key = expected_keys[i];
<add> const descriptor = Object.getOwnPropertyDescriptor(process.versions, key);
<add> assert.strictEqual(descriptor.writable, false);
<add>}
| 2
|
Ruby
|
Ruby
|
allow access to mem_cache_store's stats hash
|
09517e3aeadba204d5b0b8c45a03e8084c4fc0b6
|
<ide><path>activesupport/lib/active_support/cache/mem_cache_store.rb
<ide> def delete_matched(matcher, options = nil)
<ide>
<ide> def clear
<ide> @data.flush_all
<add> end
<add>
<add> def stats
<add> @data.stats
<ide> end
<ide>
<ide> private
| 1
|
Javascript
|
Javascript
|
add shortcut for goto page
|
807e4fef342ad7eeb0bc30ce120e0de1c97545a4
|
<ide><path>web/viewer.js
<ide> window.addEventListener('keydown', function keydown(evt) {
<ide> SecondaryToolbar.presentationModeClick();
<ide> handled = true;
<ide> break;
<add> case 71: // g
<add> // focuses input#pageNumber field
<add> document.getElementById('pageNumber').select();
<add> handled = true;
<add> break;
<ide> }
<ide> }
<ide>
| 1
|
Text
|
Text
|
revise capitalization in title
|
5682f4bbf0543158a435ae08b630123a2919c953
|
<ide><path>guides/source/classic_to_zeitwerk_howto.md
<ide> When upgrading to Rails 6.x, it is highly encouraged to switch to `zeitwerk` mod
<ide>
<ide> Rails 7 ends the transition period and does not include `classic` mode.
<ide>
<del>I am scared
<add>I am Scared
<ide> -----------
<ide>
<ide> Don't :).
| 1
|
Javascript
|
Javascript
|
allow zooming while rotating
|
a58012bf39cfb602007d868505e5b271164379c9
|
<ide><path>examples/js/controls/OrbitControls.js
<ide> THREE.OrbitControls = function ( object, domElement ) {
<ide>
<ide> function onMouseWheel( event ) {
<ide>
<del> if ( scope.enabled === false || scope.enableZoom === false || state !== STATE.NONE ) return;
<add> if ( scope.enabled === false || scope.enableZoom === false || ( state !== STATE.NONE && state !== STATE.ROTATE ) ) return;
<ide>
<ide> event.preventDefault();
<ide> event.stopPropagation();
| 1
|
Python
|
Python
|
fix typo in cpp define
|
52648a4a8064a29fb483cd54995bcbfdc173a6db
|
<ide><path>numpy/core/setup.py
<ide> def generate_config_h(ext, build_dir):
<ide> #endif
<ide>
<ide> #ifdef HAVE_EXPF
<del>#define HAVE_FUNCS_FUNCS
<add>#define HAVE_FLOAT_FUNCS
<ide> #endif
<ide> """)
<ide> target_f.close()
| 1
|
Text
|
Text
|
add deprecated badge to legacy url methods
|
181ba95819565f8f76acffe538d3f06a50499d05
|
<ide><path>doc/api/url.md
<ide> pathToFileURL('/some/path%.c'); // Correct: file:///some/path%25.c (POSIX)
<ide> ```
<ide>
<ide> ## Legacy URL API
<del>
<del>> Stability: 0 - Deprecated: Use the WHATWG URL API instead.
<add><!-- YAML
<add>deprecated: v11.0.0
<add>-->
<ide>
<ide> ### Legacy `urlObject`
<ide> <!-- YAML
<ide> changes:
<ide> description: The Legacy URL API is deprecated. Use the WHATWG URL API.
<ide> -->
<ide>
<add>> Stability: 0 - Deprecated: Use the WHATWG URL API instead.
<add>
<ide> The legacy `urlObject` (`require('url').Url`) is created and returned by the
<ide> `url.parse()` function.
<ide>
<ide> changes:
<ide> times.
<ide> -->
<ide>
<add>> Stability: 0 - Deprecated: Use the WHATWG URL API instead.
<add>
<ide> * `urlObject` {Object|string} A URL object (as returned by `url.parse()` or
<ide> constructed otherwise). If a string, it is converted to an object by passing
<ide> it to `url.parse()`.
<ide> changes:
<ide> when no query string is present.
<ide> -->
<ide>
<add>> Stability: 0 - Deprecated: Use the WHATWG URL API instead.
<add>
<ide> * `urlString` {string} The URL string to parse.
<ide> * `parseQueryString` {boolean} If `true`, the `query` property will always
<ide> be set to an object returned by the [`querystring`][] module's `parse()`
<ide> changes:
<ide> contains a hostname.
<ide> -->
<ide>
<add>> Stability: 0 - Deprecated: Use the WHATWG URL API instead.
<add>
<ide> * `from` {string} The Base URL being resolved against.
<ide> * `to` {string} The HREF URL being resolved.
<ide>
| 1
|
Mixed
|
Ruby
|
allow aliased attributes in update
|
72e63c71bb3f73870e280964def25b7578818b1b
|
<ide><path>activerecord/CHANGELOG.md
<add>* Allow aliased attributes to be used in `#update_columns` and `#update`.
<add>
<add> *Gannon McGibbon*
<add>
<ide> * Allow spaces in postgres table names.
<ide>
<ide> Fixes issue where "user post" is misinterpreted as "\"user\".\"post\"" when quoting table names with the postgres adapter.
<ide><path>activerecord/lib/active_record/attribute_methods/dirty.rb
<ide> def attributes_in_database
<ide> end
<ide>
<ide> private
<del> def write_attribute_without_type_cast(attr_name, _)
<del> result = super
<del> clear_attribute_change(attr_name)
<add> def write_attribute_without_type_cast(attr_name, value)
<add> name = attr_name.to_s
<add> if self.class.attribute_alias?(name)
<add> name = self.class.attribute_alias(name)
<add> end
<add> result = super(name, value)
<add> clear_attribute_change(name)
<ide> result
<ide> end
<ide>
<ide><path>activerecord/test/cases/attribute_methods_test.rb
<ide> def setup
<ide> assert_raises(ActiveModel::UnknownAttributeError) { topic.update(no_column_exists: "Hello!") }
<ide> end
<ide>
<add> test "write_attribute allows writing to aliased attributes" do
<add> topic = Topic.first
<add> assert_nothing_raised { topic.update_columns(heading: "Hello!") }
<add> assert_nothing_raised { topic.update(heading: "Hello!") }
<add> end
<add>
<ide> test "read_attribute" do
<ide> topic = Topic.new
<ide> topic.title = "Don't change the topic"
| 3
|
PHP
|
PHP
|
add resource missing option
|
c887875c23f393e3443b1fd2a8dd0c748e6f13ea
|
<ide><path>src/Illuminate/Routing/ResourceRegistrar.php
<ide> class ResourceRegistrar
<ide> */
<ide> protected $resourceDefaults = ['index', 'create', 'store', 'show', 'edit', 'update', 'destroy'];
<ide>
<del> /**
<del> * Actions that use model binding.
<del> *
<del> * @var string[]
<del> */
<del> protected $modelBoundMethods = ['show', 'edit', 'update', 'destroy'];
<del>
<ide> /**
<ide> * The parameters set for this resource instance.
<ide> *
<ide> protected function addResourceIndex($name, $base, $controller, $options)
<ide> {
<ide> $uri = $this->getResourceUri($name);
<ide>
<add> unset($options['missing']);
<add>
<ide> $action = $this->getResourceAction($name, $controller, 'index', $options);
<ide>
<ide> return $this->router->get($uri, $action);
<ide> protected function addResourceCreate($name, $base, $controller, $options)
<ide> {
<ide> $uri = $this->getResourceUri($name).'/'.static::$verbs['create'];
<ide>
<add> unset($options['missing']);
<add>
<ide> $action = $this->getResourceAction($name, $controller, 'create', $options);
<ide>
<ide> return $this->router->get($uri, $action);
<ide> protected function addResourceStore($name, $base, $controller, $options)
<ide> {
<ide> $uri = $this->getResourceUri($name);
<ide>
<add> unset($options['missing']);
<add>
<ide> $action = $this->getResourceAction($name, $controller, 'store', $options);
<ide>
<ide> return $this->router->post($uri, $action);
<ide> protected function getResourceAction($resource, $controller, $method, $options)
<ide> $action['where'] = $options['wheres'];
<ide> }
<ide>
<del> if (isset($options['missing']) && in_array($method, $this->modelBoundMethods)) {
<add> if (isset($options['missing'])) {
<ide> $action['missing'] = $options['missing'];
<ide> }
<ide>
| 1
|
Python
|
Python
|
add a test for weakref checking when resizing
|
b2b32aaa2a8984ab52dfb2161e4bb9ed83c4aaff
|
<ide><path>numpy/core/tests/test_multiarray.py
<ide> def test_empty_view(self):
<ide> x_view.resize((0, 10))
<ide> x_view.resize((0, 100))
<ide>
<add> def test_check_weakref(self):
<add> x = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
<add> xref = weakref.ref(x)
<add> assert_raises(ValueError, x.resize, (5, 1))
<add> del xref # avoid pyflakes unused variable warning.
<add>
<ide>
<ide> class TestRecord(object):
<ide> def test_field_rename(self):
| 1
|
Ruby
|
Ruby
|
build assets automatically if necessary
|
a942e3128042b8796348909a0a23527340e1d2cb
|
<ide><path>railties/test/isolation/abstract_unit.rb
<ide> class ActiveSupport::TestCase
<ide> f.puts "require 'rails/all'"
<ide> end
<ide>
<add> unless File.exist?("#{RAILS_FRAMEWORK_ROOT}/actionview/lib/assets/compiled/rails-ujs.js")
<add> Dir.chdir("#{RAILS_FRAMEWORK_ROOT}/actionview") { `yarn build` }
<add> end
<add>
<ide> assets_path = "#{RAILS_FRAMEWORK_ROOT}/railties/test/isolation/assets"
<add> unless Dir.exist?("#{assets_path}/node_modules")
<add> Dir.chdir(assets_path) { `yarn install` }
<add> end
<ide> FileUtils.cp("#{assets_path}/package.json", "#{app_template_path}/package.json")
<ide> FileUtils.cp("#{assets_path}/config/webpacker.yml", "#{app_template_path}/config/webpacker.yml")
<ide> FileUtils.cp_r("#{assets_path}/config/webpack", "#{app_template_path}/config/webpack")
| 1
|
Mixed
|
Python
|
update documentation for cifar datasets
|
39a69b3f528a1a83bb1767135d6e57ba81e6e3be
|
<ide><path>docs/sources/datasets.md
<ide>
<ide> `keras.datasets.cifar10`
<ide>
<del>Dataset of 50,000 32x32 color images, labeled over 10 categories.
<add>Dataset of 50,000 32x32 color training images, labeled over 10 categories, and 10,000 test images.
<ide>
<ide> ### Usage:
<ide>
<ide> ```python
<del>(X_train, y_train), (X_test, y_test) = cifar10.load_data(test_split=0.1, seed=113)
<add>(X_train, y_train), (X_test, y_test) = cifar10.load_data()
<ide> ```
<ide>
<ide> - __Return:__
<ide> - 2 tuples:
<ide> - __X_train, X_test__: uint8 array of RGB image data with shape (nb_samples, 3, 32, 32).
<ide> - __y_train, y_test__: uint8 array of category labels (integers in range 0-9) with shape (nb_samples,).
<ide>
<add>---
<add>
<add>## CIFAR100 small image classification
<add>
<add>`keras.datasets.cifar100`
<add>
<add>Dataset of 50,000 32x32 color training images, labeled over 100 categories, and 10,000 test images.
<add>
<add>### Usage:
<add>
<add>```python
<add>(X_train, y_train), (X_test, y_test) = cifar100.load_data(label_mode='fine')
<add>```
<add>
<add>- __Return:__
<add> - 2 tuples:
<add> - __X_train, X_test__: uint8 array of RGB image data with shape (nb_samples, 3, 32, 32).
<add> - __y_train, y_test__: uint8 array of category labels with shape (nb_samples,).
<add>
<ide> - __Arguments:__
<ide>
<del> - __test_split__: float. Fraction of the dataset to be used as test data.
<del> - __seed__: int. Seed for reproducible data shuffling.
<add> - __label_mode__: "fine" or "coarse".
<ide>
<ide> ---
<ide>
<ide><path>examples/cifar10_cnn.py
<ide> data_augmentation = True
<ide>
<ide> # the data, shuffled and split between tran and test sets
<del>(X_train, y_train), (X_test, y_test) = cifar10.load_data(test_split=0.1)
<add>(X_train, y_train), (X_test, y_test) = cifar10.load_data()
<ide> print(X_train.shape[0], 'train samples')
<ide> print(X_test.shape[0], 'test samples')
<ide>
| 2
|
Text
|
Text
|
add note about png files to image static resources
|
dee4ea8ea348937340fc5d5f005c2cfc042d3452
|
<ide><path>docs/Image.md
<ide> When your entire codebase respects this convention, you're able to do interestin
<ide>
<ide> *This process is currently being improved, a much better workflow will be available shortly.*
<ide>
<add>> **NOTE**: PNG images are required when loading with `require('image!my-icon')`
<add>>
<add>> At this time, only PNG images are supported in iOS. There is an [issue](https://github.com/facebook/react-native/issues/646) that is currently addressing this bug. In the meantime a quick fix is to rename your files to image.png or to use the `isStatic` flag like: `source={{ uri: 'image', isStatic: true }}`.
<add>
<ide> ### Adding Static Resources to your Android app
<ide>
<ide> Add your images as [bitmap drawables](http://developer.android.com/guide/topics/resources/drawable-resource.html#Bitmap) to the android project (`<yourapp>/android/app/src/main/res`). To provide different resolutions of your assets, check out [using configuration qualifiers](http://developer.android.com/guide/practices/screens_support.html#qualifiers). Normally, you will want to put your assets in the following directories (create them under `res` if they don't exist):
| 1
|
PHP
|
PHP
|
remove `numeric` from `unsigned` config
|
00fb663f9045eac6c183ed885f50c0ae19c0744a
|
<ide><path>lib/Cake/Model/Datasource/Database/Mysql.php
<ide> class Mysql extends DboSource {
<ide> 'value' => 'UNSIGNED', 'quote' => false, 'join' => ' ', 'column' => false, 'position' => 'beforeDefault',
<ide> 'noVal' => true,
<ide> 'options' => array(true),
<del> 'types' => array('integer', 'float', 'biginteger', 'numeric', 'decimal')
<add> 'types' => array('integer', 'float', 'decimal', 'biginteger')
<ide> )
<ide> );
<ide>
<ide><path>lib/Cake/Test/Case/Model/Datasource/Database/MysqlTest.php
<ide> public function buildColumnUnsignedProvider() {
<ide> array(
<ide> array(
<ide> 'name' => 'testName',
<del> 'type' => 'numeric',
<add> 'type' => 'decimal',
<ide> 'unsigned' => true
<ide> ),
<ide> '`testName` decimal UNSIGNED'
<ide> public function buildColumnUnsignedProvider() {
<ide> array(
<ide> array(
<ide> 'name' => 'testName',
<del> 'type' => 'numeric',
<add> 'type' => 'decimal',
<ide> 'unsigned' => true,
<ide> 'default' => 1
<ide> ),
<ide> '`testName` decimal UNSIGNED DEFAULT 1'
<del> ),
<del> //set #8
<del> array(
<del> array(
<del> 'name' => 'testName',
<del> 'type' => 'decimal',
<del> 'unsigned' => true
<del> ),
<del> '`testName` decimal UNSIGNED'
<ide> )
<ide> );
<ide> }
| 2
|
Javascript
|
Javascript
|
add getcontext/filetimestamps to watcher
|
74a8c45ea10ebfc692dca4b66f490969f2062abe
|
<ide><path>lib/WatchIgnorePlugin.js
<ide> class IgnoringWatchFileSystem {
<ide> const ignoredFiles = files.filter(ignored);
<ide> const ignoredDirs = dirs.filter(ignored);
<ide>
<del> this.wfs.watch(files.filter(notIgnored), dirs.filter(notIgnored), missing, startTime, options, (err, filesModified, dirsModified, missingModified, fileTimestamps, dirTimestamps) => {
<add> const watcher = this.wfs.watch(files.filter(notIgnored), dirs.filter(notIgnored), missing, startTime, options, (err, filesModified, dirsModified, missingModified, fileTimestamps, dirTimestamps) => {
<ide> if(err) return callback(err);
<ide>
<ide> ignoredFiles.forEach(path => {
<ide> class IgnoringWatchFileSystem {
<ide>
<ide> callback(err, filesModified, dirsModified, missingModified, fileTimestamps, dirTimestamps);
<ide> }, callbackUndelayed);
<add>
<add> return {
<add> close: () => watcher.close(),
<add> pause: () => watcher.pause(),
<add> getContextTimestamps: () => {
<add> const dirTimestamps = watcher.getContextTimestamps();
<add> ignoredDirs.forEach(path => {
<add> dirTimestamps[path] = 1;
<add> });
<add> return dirTimestamps;
<add> },
<add> getFileTimestamps: () => {
<add> const fileTimestamps = watcher.getFileTimestamps();
<add> ignoredFiles.forEach(path => {
<add> fileTimestamps[path] = 1;
<add> });
<add> return fileTimestamps;
<add> }
<add> };
<ide> }
<ide> }
<ide><path>lib/Watching.js
<ide> class Watching {
<ide>
<ide> this.compiler.fileTimestamps = fileTimestamps;
<ide> this.compiler.contextTimestamps = contextTimestamps;
<del> this.invalidate();
<add> this._invalidate();
<ide> }, (fileName, changeTime) => {
<ide> this.compiler.hooks.invalid.call(fileName, changeTime);
<ide> });
<ide> class Watching {
<ide> if(callback) {
<ide> this.callbacks.push(callback);
<ide> }
<add> if(this.watcher) {
<add> this.compiler.fileTimestamps = this.watcher.getFileTimestamps();
<add> this.compiler.contextTimestamps = this.watcher.getContextTimestamps();
<add> }
<add> return this._invalidate();
<add> }
<add>
<add> _invalidate() {
<ide> if(this.watcher) {
<ide> this.pausedWatcher = this.watcher;
<ide> this.watcher.pause();
<ide><path>lib/node/NodeWatchFileSystem.js
<ide> class NodeWatchFileSystem {
<ide> if(this.watcher) {
<ide> this.watcher.pause();
<ide> }
<add> },
<add> getFileTimestamps: () => {
<add> if(this.watcher)
<add> return this.watcher.getTimes();
<add> else
<add> return {};
<add> },
<add> getContextTimestamps: () => {
<add> if(this.watcher)
<add> return this.watcher.getTimes();
<add> else
<add> return {};
<ide> }
<ide> };
<ide> }
| 3
|
Python
|
Python
|
remove incubator from the link
|
1d46a20c492729bf15d86de1585b137166ba4258
|
<ide><path>setup.py
<ide> libcloud.utils.SHOW_DEPRECATION_WARNING = False
<ide>
<ide> HTML_VIEWSOURCE_BASE = 'https://svn.apache.org/viewvc/libcloud/trunk'
<del>PROJECT_BASE_DIR = 'http://incubator.apache.org/libcloud/'
<add>PROJECT_BASE_DIR = 'http://libcloud.apache.org'
<ide> TEST_PATHS = [ 'test', 'test/compute', 'test/storage' , 'test/loadbalancer']
<ide> DOC_TEST_MODULES = [ 'libcloud.compute.drivers.dummy',
<ide> 'libcloud.storage.drivers.dummy' ]
| 1
|
Ruby
|
Ruby
|
remove useless check of adapter
|
e8b809070d4a669410a621db70b184d97e29fa3e
|
<ide><path>activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb
<ide> def sqlite3_connection(config) # :nodoc:
<ide> config[:database] = File.expand_path(config[:database], Rails.root)
<ide> end
<ide>
<del> unless 'sqlite3' == config[:adapter]
<del> raise ArgumentError, 'adapter name should be "sqlite3"'
<del> end
<del>
<ide> db = SQLite3::Database.new(
<ide> config[:database],
<ide> :results_as_hash => true
| 1
|
PHP
|
PHP
|
change default view
|
638b261a68913bae9a64f6d540612b862fa3c4dd
|
<ide><path>config/auth.php
<ide> 'passwords' => [
<ide> 'users' => [
<ide> 'provider' => 'users',
<del> 'email' => 'emails.password',
<add> 'email' => 'auth.emails.password',
<ide> 'table' => 'password_resets',
<ide> 'expire' => 60,
<ide> ],
| 1
|
Ruby
|
Ruby
|
update ar tests
|
1d803e51890e842f0c25ee3a016ed0311f2fa1b4
|
<ide><path>activerecord/test/cases/fixtures_test.rb
<ide> def test_foo
<ide> class FixtureLoadingTest < ActiveRecord::TestCase
<ide> uses_mocha 'reloading_fixtures_through_accessor_methods' do
<ide> def test_logs_message_for_failed_dependency_load
<del> Test::Unit::TestCase.expects(:require_dependency).with(:does_not_exist).raises(LoadError)
<add> ActiveRecord::TestCase.expects(:require_dependency).with(:does_not_exist).raises(LoadError)
<ide> ActiveRecord::Base.logger.expects(:warn)
<del> Test::Unit::TestCase.try_to_load_dependency(:does_not_exist)
<add> ActiveRecord::TestCase.try_to_load_dependency(:does_not_exist)
<ide> end
<ide>
<ide> def test_does_not_logs_message_for_successful_dependency_load
<del> Test::Unit::TestCase.expects(:require_dependency).with(:works_out_fine)
<add> ActiveRecord::TestCase.expects(:require_dependency).with(:works_out_fine)
<ide> ActiveRecord::Base.logger.expects(:warn).never
<del> Test::Unit::TestCase.try_to_load_dependency(:works_out_fine)
<add> ActiveRecord::TestCase.try_to_load_dependency(:works_out_fine)
<ide> end
<ide> end
<ide> end
<ide><path>activerecord/test/cases/validations_i18n_test.rb
<ide> require 'models/topic'
<ide> require 'models/reply'
<ide>
<del>class ActiveRecordValidationsI18nTests < Test::Unit::TestCase
<add>class ActiveRecordValidationsI18nTests < ActiveSupport::TestCase
<ide> def setup
<ide> reset_callbacks Topic
<ide> @topic = Topic.new
| 2
|
Javascript
|
Javascript
|
use dumpcache() rather than manual reset
|
32d3d7774afde084888a6cf7885cb0da84b79cd6
|
<ide><path>src/core/__tests__/ReactComponent-test.js
<ide> var reactComponentExpect;
<ide>
<ide> describe('ReactComponent', function() {
<ide> beforeEach(function() {
<add> require('mock-modules').dumpCache();
<add>
<ide> React = require('React');
<ide> ReactMount = require('ReactMount');
<ide> ReactTestUtils = require('ReactTestUtils');
<ide> reactComponentExpect = require('reactComponentExpect');
<del>
<del> // This module is whitelisted from automocking so manually
<del> // reset it.
<del> ReactMount.allowFullPageRender = false;
<ide> });
<ide>
<ide> it('should be able to switch root constructors via state', function() {
| 1
|
Javascript
|
Javascript
|
add object id to streams to prevent infinite loops
|
532d7246ea1b67f5b9752f6b3a2c3dbf2c09c9c6
|
<ide><path>src/core/evaluator.js
<ide> var PartialEvaluator = (function PartialEvaluatorClosure() {
<ide> if (!isStream(xObject)) {
<ide> continue;
<ide> }
<add> if (xObject.dict.objId) {
<add> if (processed[xObject.dict.objId]) {
<add> // stream has objId and is processed already
<add> continue;
<add> }
<add> processed[xObject.dict.objId] = true;
<add> }
<ide> var xResources = xObject.dict.get('Resources');
<ide> // Checking objId to detect an infinite loop.
<ide> if (isDict(xResources) &&
<ide><path>src/core/obj.js
<ide> var XRef = (function XRefClosure() {
<ide> } else {
<ide> xrefEntry = this.fetchCompressed(xrefEntry, suppressEncryption);
<ide> }
<del>
<del> if (isDict(xrefEntry)) {
<add> if (isDict(xrefEntry)){
<ide> xrefEntry.objId = 'R' + ref.num + '.' + ref.gen;
<add> } else if (isStream(xrefEntry)) {
<add> xrefEntry.dict.objId = 'R' + ref.num + '.' + ref.gen;
<ide> }
<ide> return xrefEntry;
<ide> },
| 2
|
PHP
|
PHP
|
add dropsoftdeletes method to migrations
|
c77a9fd08e8b0ad3368acc9a700d572c4dff251c
|
<ide><path>src/Illuminate/Database/Schema/Blueprint.php
<ide> public function dropTimestamps()
<ide> $this->dropColumn('created_at', 'updated_at');
<ide> }
<ide>
<add> /**
<add> * Indicate that the soft delete column should be dropped.
<add> *
<add> * @return void
<add> */
<add> public function dropSoftDeletes()
<add> {
<add> $this->dropColumn('deleted_at');
<add> }
<add>
<ide> /**
<ide> * Rename the table to a given name.
<ide> *
| 1
|
Text
|
Text
|
remove stray files
|
2c5ec74e4583271aa95e43c4d422fb3e489ae08f
|
<ide><path>curriculum/challenges/italian/13-relational-databases/learn-relational-databases/learn-advanced-bash-by-building.md
<del>---
<del>id: 602da0de22201c65d2a019f6
<del>title: Impara Bash avanzato costruendo
<del>challengeType: 12
<del>helpCategory: Relational Databases
<del>url: https://github.com/moT01/.learn-advanced-bash-by-building
<del>dashedName: learn-advanced-bash-by-building
<del>---
<del>
<del># --description--
<del>
<del># --instructions--
<del>
<del># --hints--
<del>
<del># --seed--
<del>
<del># --solutions--
<ide><path>curriculum/challenges/portuguese/13-relational-databases/learn-relational-databases/learn-advanced-bash-by-building.md
<del>---
<del>id: 602da0de22201c65d2a019f6
<del>title: Aprenda Bash avançado construindo
<del>challengeType: 12
<del>helpCategory: Relational Databases
<del>url: https://github.com/moT01/.learn-advanced-bash-by-building
<del>dashedName: learn-advanced-bash-by-building
<del>---
<del>
<del># --description--
<del>
<del># --instructions--
<del>
<del># --hints--
<del>
<del># --seed--
<del>
<del># --solutions--
| 2
|
Python
|
Python
|
remove debug output from tests
|
1003e7ccec483ed7d9ad3995224af89350a7bbae
|
<ide><path>spacy/tests/parser/test_parse.py
<ide> def test_arc_eager_finalize_state(EN):
<ide> # right branching
<ide> example = EN.tokenizer.tokens_from_list(u"a b c d e".split(' '))
<ide> apply_transition_sequence(EN, example, ['R-nsubj','D','R-nsubj','R-nsubj','D','R-ROOT'])
<del> print [ '%s/%s' % (t.dep_,t.head.i) for t in example ]
<ide>
<ide> assert example[0].n_lefts == 0
<ide> assert example[0].n_rights == 2
<ide> def test_arc_eager_finalize_state(EN):
<ide> # left branching
<ide> example = EN.tokenizer.tokens_from_list(u"a b c d e".split(' '))
<ide> apply_transition_sequence(EN, example, ['S','L-nsubj','L-ROOT','S','L-nsubj','L-nsubj'])
<del> print [ '%s/%s' % (t.dep_,t.head.i) for t in example ]
<ide>
<ide> assert example[0].n_lefts == 0
<ide> assert example[0].n_rights == 0
<ide><path>spacy/tests/parser/test_sbd.py
<ide> def test_sbd_for_root_label_dependents(EN):
<ide> example = EN.tokenizer.tokens_from_list(u"I saw a firefly It glowed".split(' '))
<ide> EN.tagger(example)
<ide> apply_transition_sequence(EN, example, ['L-nsubj','S','L-det','R-dobj','D','S','L-nsubj','R-ROOT'])
<del> print ['%s/%s' % (t.dep_,t.head.i) for t in example]
<ide>
<ide> assert example[1].head.i == 1
<ide> assert example[5].head.i == 5
| 2
|
Python
|
Python
|
update parser docstrings. closes #968
|
5fa100245cbf71a47c7a1ea7a869d03049380130
|
<ide><path>rest_framework/parsers.py
<ide> class JSONParser(BaseParser):
<ide>
<ide> def parse(self, stream, media_type=None, parser_context=None):
<ide> """
<del> Returns a 2-tuple of `(data, files)`.
<del>
<del> `data` will be an object which is the parsed content of the response.
<del> `files` will always be `None`.
<add> Parses the incoming bytestream as JSON and returns the resulting data.
<ide> """
<ide> parser_context = parser_context or {}
<ide> encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
<ide> class YAMLParser(BaseParser):
<ide>
<ide> def parse(self, stream, media_type=None, parser_context=None):
<ide> """
<del> Returns a 2-tuple of `(data, files)`.
<del>
<del> `data` will be an object which is the parsed content of the response.
<del> `files` will always be `None`.
<add> Parses the incoming bytestream as YAML and returns the resulting data.
<ide> """
<ide> assert yaml, 'YAMLParser requires pyyaml to be installed'
<ide>
<ide> class FormParser(BaseParser):
<ide>
<ide> def parse(self, stream, media_type=None, parser_context=None):
<ide> """
<del> Returns a 2-tuple of `(data, files)`.
<del>
<del> `data` will be a :class:`QueryDict` containing all the form parameters.
<del> `files` will always be :const:`None`.
<add> Parses the incoming bytestream as a URL encoded form,
<add> and returns the resulting QueryDict.
<ide> """
<ide> parser_context = parser_context or {}
<ide> encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
<ide> class MultiPartParser(BaseParser):
<ide>
<ide> def parse(self, stream, media_type=None, parser_context=None):
<ide> """
<del> Returns a DataAndFiles object.
<add> Parses the incoming bytestream as a multipart encoded form,
<add> and returns a DataAndFiles object.
<ide>
<ide> `.data` will be a `QueryDict` containing all the form parameters.
<ide> `.files` will be a `QueryDict` containing all the form files.
<ide> class XMLParser(BaseParser):
<ide> media_type = 'application/xml'
<ide>
<ide> def parse(self, stream, media_type=None, parser_context=None):
<add> """
<add> Parses the incoming bytestream as XML and returns the resulting data.
<add> """
<ide> assert etree, 'XMLParser requires defusedxml to be installed'
<ide>
<ide> parser_context = parser_context or {}
<ide> class FileUploadParser(BaseParser):
<ide>
<ide> def parse(self, stream, media_type=None, parser_context=None):
<ide> """
<del> Returns a DataAndFiles object.
<add> Treats the incoming bytestream as a raw file upload and returns
<add> a `DateAndFiles` object.
<ide>
<ide> `.data` will be None (we expect request body to be a file content).
<ide> `.files` will be a `QueryDict` containing one 'file' element.
| 1
|
Ruby
|
Ruby
|
remove extranous spaces from assignment
|
bd49a4b175e056c77610c78ac109eab0c3411af8
|
<ide><path>actionview/lib/action_view/digestor.rb
<ide>
<ide> module ActionView
<ide> class Digestor
<del> @@digest_mutex = Mutex.new
<add> @@digest_mutex = Mutex.new
<ide>
<ide> class << self
<ide> # Supported options:
| 1
|
Ruby
|
Ruby
|
permit running jobs in system tests
|
c8396e030f4b1edd48340975696dcaf6aa8291c9
|
<ide><path>actionpack/lib/action_dispatch/railtie.rb
<ide> class Railtie < Rails::Railtie # :nodoc:
<ide>
<ide> ActionDispatch.test_app = app
<ide> end
<add>
<add> initializer "action_dispatch.system_tests" do |app|
<add> ActiveSupport.on_load(:action_dispatch_system_test_case) do
<add> include app.routes.url_helpers
<add> end
<add> end
<ide> end
<ide> end
<ide><path>actionpack/lib/action_dispatch/system_test_case.rb
<ide> require "action_dispatch/system_testing/server"
<ide> require "action_dispatch/system_testing/test_helpers/screenshot_helper"
<ide> require "action_dispatch/system_testing/test_helpers/setup_and_teardown"
<del>require "action_dispatch/system_testing/test_helpers/undef_methods"
<ide>
<ide> module ActionDispatch
<ide> # = System Testing
<ide> module ActionDispatch
<ide> # Because <tt>ActionDispatch::SystemTestCase</tt> is a shim between Capybara
<ide> # and Rails, any driver that is supported by Capybara is supported by system
<ide> # tests as long as you include the required gems and files.
<del> class SystemTestCase < IntegrationTest
<add> class SystemTestCase < ActiveSupport::TestCase
<ide> include Capybara::DSL
<ide> include Capybara::Minitest::Assertions
<ide> include SystemTesting::TestHelpers::SetupAndTeardown
<ide> include SystemTesting::TestHelpers::ScreenshotHelper
<del> include SystemTesting::TestHelpers::UndefMethods
<ide>
<ide> def initialize(*) # :nodoc:
<ide> super
<ide> def self.driven_by(driver, using: :chrome, screen_size: [1400, 1400], options: {
<ide>
<ide> driven_by :selenium
<ide>
<add> def url_options # :nodoc:
<add> default_url_options.merge(host: Capybara.app_host)
<add> end
<add>
<ide> ActiveSupport.run_load_hooks(:action_dispatch_system_test_case, self)
<ide> end
<ide>
<ide><path>actionpack/lib/action_dispatch/system_testing/test_helpers/setup_and_teardown.rb
<ide> module SetupAndTeardown # :nodoc:
<ide> DEFAULT_HOST = "http://127.0.0.1"
<ide>
<ide> def host!(host)
<del> super
<ide> Capybara.app_host = host
<ide> end
<ide>
<ide><path>actionpack/lib/action_dispatch/system_testing/test_helpers/undef_methods.rb
<del># frozen_string_literal: true
<del>
<del>module ActionDispatch
<del> module SystemTesting
<del> module TestHelpers
<del> module UndefMethods # :nodoc:
<del> extend ActiveSupport::Concern
<del> included do
<del> METHODS = %i(get post put patch delete).freeze
<del>
<del> METHODS.each do |verb|
<del> undef_method verb
<del> end
<del>
<del> def method_missing(method, *args, &block)
<del> if METHODS.include?(method)
<del> raise NoMethodError, "System tests cannot make direct requests via ##{method}; use #visit and #click_on instead. See http://www.rubydoc.info/github/teamcapybara/capybara/master#The_DSL for more information."
<del> else
<del> super
<del> end
<del> end
<del> end
<del> end
<del> end
<del> end
<del>end
<ide><path>actionpack/test/dispatch/system_testing/system_test_case_test.rb
<ide> class SetHostTest < DrivenByRackTest
<ide> assert_equal "http://example.com", Capybara.app_host
<ide> end
<ide> end
<del>
<del>class UndefMethodsTest < DrivenBySeleniumWithChrome
<del> test "get" do
<del> exception = assert_raise NoMethodError do
<del> get "http://example.com"
<del> end
<del> assert_equal "System tests cannot make direct requests via #get; use #visit and #click_on instead. See http://www.rubydoc.info/github/teamcapybara/capybara/master#The_DSL for more information.", exception.message
<del> end
<del>
<del> test "post" do
<del> exception = assert_raise NoMethodError do
<del> post "http://example.com"
<del> end
<del> assert_equal "System tests cannot make direct requests via #post; use #visit and #click_on instead. See http://www.rubydoc.info/github/teamcapybara/capybara/master#The_DSL for more information.", exception.message
<del> end
<del>
<del> test "put" do
<del> exception = assert_raise NoMethodError do
<del> put "http://example.com"
<del> end
<del> assert_equal "System tests cannot make direct requests via #put; use #visit and #click_on instead. See http://www.rubydoc.info/github/teamcapybara/capybara/master#The_DSL for more information.", exception.message
<del> end
<del>
<del> test "patch" do
<del> exception = assert_raise NoMethodError do
<del> patch "http://example.com"
<del> end
<del> assert_equal "System tests cannot make direct requests via #patch; use #visit and #click_on instead. See http://www.rubydoc.info/github/teamcapybara/capybara/master#The_DSL for more information.", exception.message
<del> end
<del>
<del> test "delete" do
<del> exception = assert_raise NoMethodError do
<del> delete "http://example.com"
<del> end
<del> assert_equal "System tests cannot make direct requests via #delete; use #visit and #click_on instead. See http://www.rubydoc.info/github/teamcapybara/capybara/master#The_DSL for more information.", exception.message
<del> end
<del>end
| 5
|
Javascript
|
Javascript
|
allow listing of months and weekdays
|
de08cc5023d1103ece6d34af9c73c93536185508
|
<ide><path>moment.js
<ide> return units ? unitAliases[units] || units.toLowerCase().replace(/(.)s$/, '$1') : units;
<ide> }
<ide>
<add> function listLocal(field, format) {
<add> var i, m, str, method, count, setter,
<add> months = [];
<add>
<add> if (field.match(/^week/)) {
<add> count = 7;
<add> setter = 'day';
<add> }
<add> else if (field.match(/^month/)) {
<add> count = 12;
<add> setter = 'month';
<add> }
<add> else {
<add> return [];
<add> }
<add>
<add> for (i = 0; i < count; i++) {
<add> m = moment().utc().set(setter, i);
<add> method = moment.fn._lang[field] || Language.prototype[field];
<add> str = method.call(moment.fn._lang, m, format || '');
<add> months.push(str);
<add> }
<add>
<add> return months;
<add> }
<ide>
<ide> /************************************
<ide> Languages
<ide> return getLangDefinition(key);
<ide> };
<ide>
<add> moment.months = function (format) {
<add> return listLocal('months', format);
<add> };
<add>
<add> moment.monthsShort = function (format) {
<add> return listLocal('monthsShort', format);
<add> };
<add>
<add> moment.weekdays = function () {
<add> return listLocal('weekdays');
<add> };
<add>
<add> moment.weekdaysShort = function () {
<add> return listLocal('weekdaysShort');
<add> };
<add>
<add> moment.weekdaysMin = function () {
<add> return listLocal('weekdaysMin');
<add> };
<add>
<ide> // compare moment object
<ide> moment.isMoment = function (obj) {
<ide> return obj instanceof Moment;
<ide> set : function (units, value) {
<ide> units = normalizeUnits(units);
<ide> this[units.toLowerCase()](value);
<add> return this;
<ide> },
<ide>
<ide> // If passed a language key, it will set the language for this
<ide><path>test/moment/listers.js
<add>var moment = require("../../moment");
<add>
<add>exports.listers = {
<add> "default" : function (test) {
<add> moment.lang('en');
<add>
<add> test.expect(5);
<add> test.deepEqual(moment.months(), ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]);
<add> test.deepEqual(moment.monthsShort(), ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]);
<add> test.deepEqual(moment.weekdays(), ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"]);
<add> test.deepEqual(moment.weekdaysShort(), ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"]);
<add> test.deepEqual(moment.weekdaysMin(), ["Su", "Mo", "Tu", "We", "Th", "Fr", "Sa"]);
<add> test.done();
<add> },
<add>
<add> "localized" : function (test) {
<add> var months = "one_two_three_four_five_six_seven_eight_nine_ten_eleven_twelve".split('_'),
<add> monthsShort = "on_tw_th_fo_fi_si_se_ei_ni_te_el_tw".split("_"),
<add> weekdays = "one_two_three_four_five_six_seven".split("_"),
<add> weekdaysShort = "on_tw_th_fo_fi_si_se".split("_"),
<add> weekdaysMin = "1_2_3_4_5_6_7".split("_");
<add>
<add> moment.lang('numerologists', {
<add> months : months,
<add> monthsShort : monthsShort,
<add> weekdays : weekdays,
<add> weekdaysShort: weekdaysShort,
<add> weekdaysMin: weekdaysMin
<add> });
<add>
<add> test.expect(5);
<add> test.deepEqual(moment.months(), months);
<add> test.deepEqual(moment.monthsShort(), monthsShort);
<add> test.deepEqual(moment.weekdays(), weekdays);
<add> test.deepEqual(moment.weekdaysShort(), weekdaysShort);
<add> test.deepEqual(moment.weekdaysMin(), weekdaysMin);
<add> test.done();
<add> },
<add>
<add> "with functions" : function (test) {
<add> var monthsShort = "one_two_three_four_five_six_seven_eight_nine_ten_eleven_twelve".split('_'),
<add> monthsShortWeird = "onesy_twosy_threesy_foursy_fivesy_sixsy_sevensy_eightsy_ninesy_tensy_elevensy_twelvesy".split('_');
<add>
<add> moment.lang("difficult", {
<add>
<add> monthsShort: function (m, format) {
<add> var arr = format.match(/-MMM-/) ? monthsShortWeird : monthsShort;
<add> return arr[m.month()];
<add> }
<add> });
<add>
<add> test.expect(3);
<add> test.deepEqual(moment.monthsShort(), monthsShort);
<add> test.deepEqual(moment.monthsShort('MMM'), monthsShort);
<add> test.deepEqual(moment.monthsShort('-MMM-'), monthsShortWeird);
<add> test.done();
<add> }
<add>};
| 2
|
Ruby
|
Ruby
|
fix style of recursive_requirements tests
|
33b955a3f24d835f45d439f97ecabb7d6d51b7ad
|
<ide><path>Library/Homebrew/test/formula_test.rb
<ide> def test_requirements
<ide> url "f1-1"
<ide>
<ide> depends_on :python
<del> depends_on :x11 => :recommended
<del> depends_on :xcode => ['1.0', :optional]
<add> depends_on x11: :recommended
<add> depends_on xcode: ["1.0", :optional]
<ide> end
<ide> stub_formula_loader f1
<ide>
<ide> def test_requirements
<ide> assert_equal Set[python, x11, xcode], Set.new(f2.recursive_requirements {})
<ide>
<ide> # Requirements can be pruned
<del> requirements = f2.recursive_requirements do |dependent, requirement|
<add> requirements = f2.recursive_requirements do |_dependent, requirement|
<ide> Requirement.prune if requirement.is_a?(PythonRequirement)
<ide> end
<ide> assert_equal Set[x11, xcode], Set.new(requirements)
| 1
|
Go
|
Go
|
improve some logging
|
068ab5144244648bcba3b7261c41836a4ba5cf0f
|
<ide><path>layer/layer_store.go
<ide> func (ls *layerStore) assembleTarTo(graphID string, metadata io.ReadCloser, size
<ide> func (ls *layerStore) Cleanup() error {
<ide> orphanLayers, err := ls.store.getOrphan()
<ide> if err != nil {
<del> logrus.Errorf("Cannot get orphan layers: %v", err)
<add> logrus.WithError(err).Error("cannot get orphan layers")
<add> }
<add> if len(orphanLayers) > 0 {
<add> logrus.Debugf("found %v orphan layers", len(orphanLayers))
<ide> }
<del> logrus.Debugf("found %v orphan layers", len(orphanLayers))
<ide> for _, orphan := range orphanLayers {
<del> logrus.Debugf("removing orphan layer, chain ID: %v , cache ID: %v", orphan.chainID, orphan.cacheID)
<add> logrus.WithField("cache-id", orphan.cacheID).Debugf("removing orphan layer, chain ID: %v", orphan.chainID)
<ide> err = ls.driver.Remove(orphan.cacheID)
<ide> if err != nil && !os.IsNotExist(err) {
<ide> logrus.WithError(err).WithField("cache-id", orphan.cacheID).Error("cannot remove orphan layer")
| 1
|
Python
|
Python
|
remove duplicate assignment of dictionary key
|
c082254420464aea70b737d8cef7d14397a3196f
|
<ide><path>libcloud/compute/drivers/ec2.py
<ide> def GiB(value):
<ide> 'os1.8xlarge'
<ide> ]
<ide> },
<del> 'us-east-2': {
<del> 'endpoint': 'fcu.us-east-2.outscale.com',
<del> 'api_name': 'osc_sas_us_east_2',
<del> 'country': 'USA',
<del> 'instance_types': [
<del> 't1.micro',
<del> 'm1.small',
<del> 'm1.medium',
<del> 'm1.large',
<del> 'm1.xlarge',
<del> 'c1.medium',
<del> 'c1.xlarge',
<del> 'm2.xlarge',
<del> 'm2.2xlarge',
<del> 'm2.4xlarge',
<del> 'nv1.small',
<del> 'nv1.medium',
<del> 'nv1.large',
<del> 'nv1.xlarge',
<del> 'cc1.4xlarge',
<del> 'cc2.8xlarge',
<del> 'm3.xlarge',
<del> 'm3.2xlarge',
<del> 'cr1.8xlarge',
<del> 'os1.8xlarge'
<del> ]
<del> },
<del> 'us-east-2': {
<del> 'endpoint': 'fcu.us-east-2.outscale.com',
<del> 'api_name': 'osc_sas_us_east_2',
<del> 'country': 'USA',
<del> 'instance_types': [
<del> 't1.micro',
<del> 'm1.small',
<del> 'm1.medium',
<del> 'm1.large',
<del> 'm1.xlarge',
<del> 'c1.medium',
<del> 'c1.xlarge',
<del> 'm2.xlarge',
<del> 'm2.2xlarge',
<del> 'm2.4xlarge',
<del> 'nv1.small',
<del> 'nv1.medium',
<del> 'nv1.large',
<del> 'nv1.xlarge',
<del> 'cc1.4xlarge',
<del> 'cc2.8xlarge',
<del> 'm3.xlarge',
<del> 'm3.2xlarge',
<del> 'p2.xlarge',
<del> 'p2.8xlarge',
<del> 'p2.16xlarge',
<del> 'cr1.8xlarge',
<del> 'os1.8xlarge'
<del> ]
<del> },
<del> 'us-east-2': {
<del> 'endpoint': 'fcu.us-east-2.outscale.com',
<del> 'api_name': 'osc_sas_us_east_2',
<del> 'country': 'USA',
<del> 'instance_types': [
<del> 't1.micro',
<del> 'm1.small',
<del> 'm1.medium',
<del> 'm1.large',
<del> 'm1.xlarge',
<del> 'c1.medium',
<del> 'c1.xlarge',
<del> 'm2.xlarge',
<del> 'm2.2xlarge',
<del> 'm2.4xlarge',
<del> 'nv1.small',
<del> 'nv1.medium',
<del> 'nv1.large',
<del> 'nv1.xlarge',
<del> 'cc1.4xlarge',
<del> 'cc2.8xlarge',
<del> 'm3.xlarge',
<del> 'm3.2xlarge',
<del> 'cr1.8xlarge',
<del> 'os1.8xlarge'
<del> ]
<del> },
<ide> 'us-east-2': {
<ide> 'endpoint': 'fcu.us-east-2.outscale.com',
<ide> 'api_name': 'osc_sas_us_east_2',
| 1
|
Javascript
|
Javascript
|
fix race in test-http-big-proxy-responses.js
|
2219c64a0418d0a1604631b547a644a1ff7fe5aa
|
<ide><path>test/disabled/test-http-big-proxy-responses.js
<ide> var chargen = http.createServer(function (req, res) {
<ide> }
<ide> res.end();
<ide> });
<del>chargen.listen(9000);
<add>chargen.listen(9000, ready);
<ide>
<ide> // Proxy to the chargen server.
<ide> var proxy = http.createServer(function (req, res) {
<ide> var proxy = http.createServer(function (req, res) {
<ide>
<ide> proxy_req.end();
<ide> });
<del>proxy.listen(9001);
<add>proxy.listen(9001, ready);
<ide>
<ide> var done = false;
<ide>
<ide> function call_chargen(list) {
<ide> }
<ide> }
<ide>
<del>call_chargen([ 100, 1000, 10000, 100000, 1000000 ]);
<add>serversRunning = 0;
<add>function ready () {
<add> if (++serversRunning < 2) return;
<add> call_chargen([ 100, 1000, 10000, 100000, 1000000 ]);
<add>}
<ide>
<ide> process.addListener('exit', function () {
<ide> assert.ok(done);
| 1
|
Text
|
Text
|
fix initial state example for recat.createclass
|
c9ec4bc4459b5f0cd8ca6f1ef5c5b1b27948fe98
|
<ide><path>docs/docs/05-reusable-components.md
<ide> With `React.createClass()`, you have to provide a separate `getInitialState` met
<ide> ```javascript
<ide> var Counter = React.createClass({
<ide> getInitialState: function() {
<del> return {count: props.initialCount};
<add> return {count: this.props.initialCount};
<ide> },
<ide> // ...
<ide> });
| 1
|
Text
|
Text
|
add arguments to test case titles.
|
f844ea3620878c04c1348d5e4de5a6b03afea759
|
<ide><path>curriculum/challenges/english/10-coding-interview-prep/rosetta-code/hofstadter-figure-figure-sequences.english.md
<ide> tests:
<ide> testString: assert(Number.isInteger(ffr(1)));
<ide> - text: <code>ffs</code> should return integer.
<ide> testString: assert(Number.isInteger(ffs(1)));
<del> - text: <code>ffr()</code> should return <code>69</code>
<add> - text: <code>ffr(10)</code> should return <code>69</code>
<ide> testString: assert.equal(ffr(ffrParamRes[0][0]), ffrParamRes[0][1]);
<del> - text: <code>ffr()</code> should return <code>1509</code>
<add> - text: <code>ffr(50)</code> should return <code>1509</code>
<ide> testString: assert.equal(ffr(ffrParamRes[1][0]), ffrParamRes[1][1]);
<del> - text: <code>ffr()</code> should return <code>5764</code>
<add> - text: <code>ffr(100)</code> should return <code>5764</code>
<ide> testString: assert.equal(ffr(ffrParamRes[2][0]), ffrParamRes[2][1]);
<del> - text: <code>ffr()</code> should return <code>526334</code>
<add> - text: <code>ffr(1000)</code> should return <code>526334</code>
<ide> testString: assert.equal(ffr(ffrParamRes[3][0]), ffrParamRes[3][1]);
<del> - text: <code>ffs()</code> should return <code>14</code>
<add> - text: <code>ffs(10)</code> should return <code>14</code>
<ide> testString: assert.equal(ffs(ffsParamRes[0][0]), ffsParamRes[0][1]);
<del> - text: <code>ffs()</code> should return <code>59</code>
<add> - text: <code>ffs(50)</code> should return <code>59</code>
<ide> testString: assert.equal(ffs(ffsParamRes[1][0]), ffsParamRes[1][1]);
<del> - text: <code>ffs()</code> should return <code>112</code>
<add> - text: <code>ffs(100)</code> should return <code>112</code>
<ide> testString: assert.equal(ffs(ffsParamRes[2][0]), ffsParamRes[2][1]);
<del> - text: <code>ffs()</code> should return <code>1041</code>
<add> - text: <code>ffs(1000)</code> should return <code>1041</code>
<ide> testString: assert.equal(ffs(ffsParamRes[3][0]), ffsParamRes[3][1]);
<ide>
<ide> ```
| 1
|
Go
|
Go
|
use strconv instead of fmt.sprintf()
|
533ecb44b184d376cdbee6b8cac5ec039c870ab4
|
<ide><path>daemon/logger/awslogs/cloudwatchlogs_test.go
<ide> func TestCollectBatchWithDuplicateTimestamps(t *testing.T) {
<ide> times := maximumLogEventsPerPut
<ide> timestamp := time.Now()
<ide> for i := 0; i < times; i++ {
<del> line := fmt.Sprintf("%d", i)
<add> line := strconv.Itoa(i)
<ide> if i%2 == 0 {
<ide> timestamp.Add(1 * time.Nanosecond)
<ide> }
<ide><path>daemon/logger/jsonfilelog/jsonfilelog_test.go
<ide> import (
<ide> "bytes"
<ide> "compress/gzip"
<ide> "encoding/json"
<del> "fmt"
<ide> "io"
<ide> "os"
<ide> "path/filepath"
<ide> func BenchmarkJSONFileLoggerLog(b *testing.B) {
<ide> bytes.Repeat([]byte("a long string"), 100),
<ide> bytes.Repeat([]byte("a really long string"), 10000),
<ide> } {
<del> b.Run(fmt.Sprintf("%d", len(data)), func(b *testing.B) {
<add> b.Run(strconv.Itoa(len(data)), func(b *testing.B) {
<ide> testMsg := &logger.Message{
<ide> Line: data,
<ide> Source: "stderr",
<ide><path>daemon/logger/local/local_test.go
<ide> package local
<ide> import (
<ide> "bytes"
<ide> "encoding/binary"
<del> "fmt"
<ide> "io"
<ide> "os"
<ide> "path/filepath"
<add> "strconv"
<ide> "testing"
<ide> "time"
<ide>
<ide> func BenchmarkLogWrite(b *testing.B) {
<ide> bytes.Repeat([]byte("a long string"), 100),
<ide> bytes.Repeat([]byte("a really long string"), 10000),
<ide> } {
<del> b.Run(fmt.Sprintf("%d", len(data)), func(b *testing.B) {
<add> b.Run(strconv.Itoa(len(data)), func(b *testing.B) {
<ide> entry := &logdriver.LogEntry{Line: data, Source: "stdout", TimeNano: t.UnixNano()}
<ide> b.SetBytes(int64(entry.Size() + encodeBinaryLen + encodeBinaryLen))
<ide> b.ResetTimer()
<ide><path>daemon/logger/splunk/splunk_test.go
<ide> import (
<ide> "fmt"
<ide> "net/http"
<ide> "runtime"
<add> "strconv"
<ide> "testing"
<ide> "time"
<ide>
<ide> func TestBatching(t *testing.T) {
<ide> }
<ide>
<ide> for i := 0; i < defaultStreamChannelSize*4; i++ {
<del> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<add> if err := loggerDriver.Log(&logger.Message{Line: []byte(strconv.Itoa(i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<ide> t.Fatal(err)
<ide> }
<ide> }
<ide> func TestBatching(t *testing.T) {
<ide> if event, err := message.EventAsMap(); err != nil {
<ide> t.Fatal(err)
<ide> } else {
<del> if event["line"] != fmt.Sprintf("%d", i) {
<add> if event["line"] != strconv.Itoa(i) {
<ide> t.Fatalf("Unexpected event in message %v", event)
<ide> }
<ide> }
<ide> func TestFrequency(t *testing.T) {
<ide> }
<ide>
<ide> for i := 0; i < 10; i++ {
<del> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<add> if err := loggerDriver.Log(&logger.Message{Line: []byte(strconv.Itoa(i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<ide> t.Fatal(err)
<ide> }
<ide> time.Sleep(15 * time.Millisecond)
<ide> func TestFrequency(t *testing.T) {
<ide> if event, err := message.EventAsMap(); err != nil {
<ide> t.Fatal(err)
<ide> } else {
<del> if event["line"] != fmt.Sprintf("%d", i) {
<add> if event["line"] != strconv.Itoa(i) {
<ide> t.Fatalf("Unexpected event in message %v", event)
<ide> }
<ide> }
<ide> func TestOneMessagePerRequest(t *testing.T) {
<ide> }
<ide>
<ide> for i := 0; i < 10; i++ {
<del> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<add> if err := loggerDriver.Log(&logger.Message{Line: []byte(strconv.Itoa(i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<ide> t.Fatal(err)
<ide> }
<ide> }
<ide> func TestOneMessagePerRequest(t *testing.T) {
<ide> if event, err := message.EventAsMap(); err != nil {
<ide> t.Fatal(err)
<ide> } else {
<del> if event["line"] != fmt.Sprintf("%d", i) {
<add> if event["line"] != strconv.Itoa(i) {
<ide> t.Fatalf("Unexpected event in message %v", event)
<ide> }
<ide> }
<ide> func TestSkipVerify(t *testing.T) {
<ide> }
<ide>
<ide> for i := 0; i < defaultStreamChannelSize*2; i++ {
<del> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<add> if err := loggerDriver.Log(&logger.Message{Line: []byte(strconv.Itoa(i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<ide> t.Fatal(err)
<ide> }
<ide> }
<ide> func TestSkipVerify(t *testing.T) {
<ide> hec.simulateErr(false)
<ide>
<ide> for i := defaultStreamChannelSize * 2; i < defaultStreamChannelSize*4; i++ {
<del> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<add> if err := loggerDriver.Log(&logger.Message{Line: []byte(strconv.Itoa(i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<ide> t.Fatal(err)
<ide> }
<ide> }
<ide> func TestSkipVerify(t *testing.T) {
<ide> if event, err := message.EventAsMap(); err != nil {
<ide> t.Fatal(err)
<ide> } else {
<del> if event["line"] != fmt.Sprintf("%d", i) {
<add> if event["line"] != strconv.Itoa(i) {
<ide> t.Fatalf("Unexpected event in message %v", event)
<ide> }
<ide> }
<ide> func TestBufferMaximum(t *testing.T) {
<ide> }
<ide>
<ide> for i := 0; i < 11; i++ {
<del> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<add> if err := loggerDriver.Log(&logger.Message{Line: []byte(strconv.Itoa(i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<ide> t.Fatal(err)
<ide> }
<ide> }
<ide> func TestServerAlwaysDown(t *testing.T) {
<ide> }
<ide>
<ide> for i := 0; i < 5; i++ {
<del> if err := loggerDriver.Log(&logger.Message{Line: []byte(fmt.Sprintf("%d", i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<add> if err := loggerDriver.Log(&logger.Message{Line: []byte(strconv.Itoa(i)), Source: "stdout", Timestamp: time.Now()}); err != nil {
<ide> t.Fatal(err)
<ide> }
<ide> }
| 4
|
Text
|
Text
|
bring releases.md up to date
|
f1653cc9b80b521ef5b129537ce1df9098bbdc1b
|
<ide><path>doc/releases.md
<ide> Notes:
<ide> - Dates listed below as _"YYYY-MM-DD"_ should be the date of the release **as UTC**. Use `date -u +'%Y-%m-%d'` to find out what this is.
<ide> - Version strings are listed below as _"vx.y.z"_. Substitute for the release version.
<ide>
<del>### 1. Ensure that HEAD Is Stable
<add>### 1. Cherry-picking from `master` and other branches
<ide>
<del>Run a **[node-test-pull-request](https://ci.nodejs.org/job/node-test-pull-request/)** test run to ensure that the build is stable and the HEAD commit is ready for release.
<add>Create a new branch named _"vx.y.z-proposal"_, or something similar. Using `git cherry-pick`, bring the appropriate commits into your new branch. To determine the relevant commits, use [`branch-diff`](https://github.com/rvagg/branch-diff) and [`changelog-maker`](https://github.com/rvagg/changelog-maker/) (both are available on npm and should be installed globally). These tools depend on our commit metadata, as well as the `semver-minor` and `semver-major` GitHub labels. One drawback is that when the `PR-URL` metadata is accidentally omitted from a commit, the commit will show up because it's unsure if it's a duplicate or not.
<ide>
<del>### 2. Produce a Nightly Build _(optional)_
<add>Carefully review the list of commits looking for errors (incorrect `PR-URL`, incorrect semver, etc.). Commits labeled as semver minor or semver major should only be cherry-picked when appropriate for the type of release being made. Previous release commits and version bumps do not need to be cherry-picked.
<ide>
<del>If there is a reason to produce a test release for the purpose of having others try out installers or specifics of builds, produce a nightly build using **[iojs+release](https://ci.nodejs.org/job/iojs+release/)** and wait for it to drop in <https://nodejs.org/download/nightly/>. Follow the directions and enter a proper length commit SHA, enter a date string, and select "nightly" for "disttype".
<add>### 2. Update `src/node_version.h`
<ide>
<del>This is particularly recommended if there has been recent work relating to the OS X or Windows installers as they are not tested in any way by CI.
<add>Set the version for the proposed release using the following macros, which are already defined in `src/node_version.h`:
<ide>
<del>### 3. Update the _CHANGELOG.md_
<add>```
<add>#define NODE_MAJOR_VERSION x
<add>#define NODE_MINOR_VERSION y
<add>#define NODE_PATCH_VERSION z
<add>```
<ide>
<del>Collect a formatted list of commits since the last release. Use [changelog-maker](https://github.com/rvagg/changelog-maker) (available from npm: `npm install changelog-maker -g`) to do this.
<add>Set the `NODE_VERSION_IS_RELEASE` macro value to `1`. This causes the build to be produced with a version string that does not have a trailing pre-release tag:
<add>
<add>```
<add>#define NODE_VERSION_IS_RELEASE 1
<add>```
<add>
<add>**Also consider whether to bump `NODE_MODULE_VERSION`**:
<add>
<add>This macro is used to signal an ABI version for native addons. It currently has two common uses in the community:
<add>
<add>* Determining what API to work against for compiling native addons, e.g. [NAN](https://github.com/rvagg/nan) uses it to form a compatibility-layer for much of what it wraps.
<add>* Determining the ABI for downloading pre-built binaries of native addons, e.g. [node-pre-gyp](https://github.com/mapbox/node-pre-gyp) uses this value as exposed via `process.versions.modules` to help determine the appropriate binary to download at install-time.
<add>
<add>The general rule is to bump this version when there are _breaking ABI_ changes and also if there are non-trivial API changes. The rules are not yet strictly defined, so if in doubt, please confer with someone that will have a more informed perspective, such as a member of the NAN team.
<add>
<add>**Note** that it is current TSC policy to bump major version when ABI changes. If you see a need to bump `NODE_MODULE_VERSION` then you should consult the TSC. Commits may need to be reverted or a major version bump may need to happen.
<add>
<add>### 3. Update `CHANGELOG.md`
<add>
<add>Collect a formatted list of commits since the last release. Use [`changelog-maker`](https://github.com/rvagg/changelog-maker) to do this.
<ide>
<ide> ```
<ide> $ changelog-maker --group
<ide> Note that changelog-maker counts commits since the last tag and if the last tag
<ide> $ changelog-maker --group --start-ref v2.3.1
<ide> ```
<ide>
<del>The _CHANGELOG.md_ entry should take the following form:
<add>The `CHANGELOG.md` entry should take the following form:
<ide>
<ide> ```
<del>## YYYY-MM-DD, Version x.y.z, @releaser
<add>## YYYY-MM-DD, Version x.y.z (Release Type), @releaser
<ide>
<ide> ### Notable changes
<ide>
<ide> See https://github.com/nodejs/node/labels/confirmed-bug for complete and current
<ide>
<ide> ### Commits
<ide>
<del>* Include the full list of commits since the last release here
<add>* Include the full list of commits since the last release here. Do not include "Working on X.Y.Z+1" commits.
<ide> ```
<ide>
<del>### 4. Update _src/node_version.h_
<add>The release type should be either Stable, LTS, or Maintenance, depending on the type of release being produced.
<ide>
<del>The following macros should already be set for the release since they will have been updated directly following the last release. They shouldn't require changing:
<add>### 4. Create Release Commit
<ide>
<del>```
<del>#define NODE_MAJOR_VERSION x
<del>#define NODE_MINOR_VERSION y
<del>#define NODE_PATCH_VERSION z
<del>```
<del>
<del>However, the `NODE_VERSION_IS_RELEASE` macro needs to be set to `1` for the build to be produced with a version string that does not have a trailing pre-release tag:
<add>The `CHANGELOG.md` and `src/node_version.h` changes should be the final commit that will be tagged for the release. When committing these to git, use the following message format:
<ide>
<ide> ```
<del>#define NODE_VERSION_IS_RELEASE 1
<del>```
<add>YYYY-MM-DD, Version x.y.z (Release Type)
<ide>
<del>**Also consider whether to bump `NODE_MODULE_VERSION`**:
<add>Notable changes:
<ide>
<del>This macro is used to signal an ABI version for native addons. It currently has two common uses in the community:
<add>* Copy the notable changes list here, reformatted for plain-text
<add>```
<ide>
<del>* Determining what API to work against for compiling native addons, e.g. [NAN](https://github.com/rvagg/nan) uses it to form a compatibility-layer for much of what it wraps.
<del>* Determining the ABI for downloading pre-built binaries of native addons, e.g. [node-pre-gyp](https://github.com/mapbox/node-pre-gyp) uses this value as exposed via `process.versions.modules` to help determine the appropriate binary to download at install-time.
<add>### 5. Propose Release on GitHub
<ide>
<del>The general rule is to bump this version when there are _breaking ABI_ changes and also if there are non-trivial API changes. The rules are not yet strictly defined, so if in doubt, please confer with someone that will have a more informed perspective, such as a member of the NAN team.
<add>Push the release branch to `nodejs/node`, not to your own fork. This allows release branches to more easily be passed between members of the release team if necessary.
<ide>
<del>**Note** that it is current TSC policy to bump major version when ABI changes. If you see a need to bump `NODE_MODULE_VERSION` then you should consult the TSC. Commits may need to be reverted or a major version bump may need to happen.
<add>Create a pull request targeting the correct release line. For example, a v5.3.0-proposal PR should target v5.x, not master. Paste the CHANGELOG modifications into the body of the PR so that collaborators can see what is changing. These PRs should be left open for at least 24 hours, and can be updated as new commits land.
<ide>
<del>### 5. Create Release Commit
<add>If you need any additional information about any of the commits, this PR is a good place to @-mention the relevant contributors.
<ide>
<del>The _CHANGELOG.md_ and _src/node_version.h_ changes should be the final commit that will be tagged for the release.
<add>This is also a good time to update the release commit to include `PR-URL` metadata.
<ide>
<del>When committing these to git, use the following message format:
<add>### 6. Ensure that the Release Branch is Stable
<ide>
<del>```
<del>YYYY-MM-DD node.js vx.y.z Release
<add>Run a **[node-test-pull-request](https://ci.nodejs.org/job/node-test-pull-request/)** test run to ensure that the build is stable and the HEAD commit is ready for release.
<ide>
<del>Notable changes:
<add>Perform some smoke-testing. We have [citgm](https://github.com/nodejs/citgm) for this. You can also manually test important modules from the ecosystem. Remember that node-gyp and npm both take a `--nodedir` flag to point to your local repository so that you can test unreleased versions without needing node-gyp to download headers for you.
<ide>
<del>* Copy the notable changes list here, reformatted for plain-text
<del>```
<add>### 7. Produce a Nightly Build _(optional)_
<ide>
<del>### 6. Push to GitHub
<add>If there is a reason to produce a test release for the purpose of having others try out installers or specifics of builds, produce a nightly build using **[iojs+release](https://ci.nodejs.org/job/iojs+release/)** and wait for it to drop in <https://nodejs.org/download/nightly/>. Follow the directions and enter a proper length commit SHA, enter a date string, and select "nightly" for "disttype".
<ide>
<del>Note that it is not essential that the release builds be created from the Node.js repository. They may be created from your own fork if you desire. It is preferable, but not essential, that the commits remain the same between that used to build and the tagged commit in the Node.js repository.
<add>This is particularly recommended if there has been recent work relating to the OS X or Windows installers as they are not tested in any way by CI.
<ide>
<del>### 7. Produce Release Builds
<add>### 8. Produce Release Builds
<ide>
<ide> Use **[iojs+release](https://ci.nodejs.org/job/iojs+release/)** to produce release artifacts. Enter the commit that you want to build from and select "release" for "disttype".
<ide>
<ide> All release slaves should achieve "SUCCESS" (and be green, not red). A release w
<ide>
<ide> You can rebuild the release as many times as you need prior to promoting them if you encounter problems.
<ide>
<del>Note that you do not have to wait for the ARM builds if they take longer than the others. It is only necessary to have the main Linux (x64 and x86), OS X .pkg and .tar.gz, Windows (x64 and x86) .msi and .exe, source, headers and docs (both produced currently by an OS X slave). That is, the slaves with "arm" in their name don't need to have finished to progress to the next step. However, **if you promote builds _before_ ARM builds have finished, you must repeat the promotion step for the ARM builds when they are ready**.
<add>If you have an error on Windows and need to start again, be aware that you'll get immediate failure unless you wait up to 2 minutes for the linker to stop from previous jobs. i.e. if a build fails after having started compiling, that slave will still have a linker process that's running for another couple of minutes which will prevent Jenkins from clearing the workspace to start a new one. This isn't a big deal, it's just a hassle because it'll result in another failed build if you start again!
<add>
<add>ARMv7 takes the longest to compile. Unfortunately ccache isn't as effective on release builds, I think it's because of the additional macro settings that go in to a release build that nullify previous builds. Also most of the release build machines are separate to the test build machines so they don't get any benefit from ongoing compiles between releases. You can expect 1.5 hours for the ARMv7 builder to complete and you should normally wait for this to finish. It is possible to rush a release out if you want and add additional builds later but we normally provide ARMv7 from initial promotion.
<ide>
<del>### 8. Tag and Sign the Release Commit
<add>You do not have to wait for the ARMv6 / Raspberry PI builds if they take longer than the others. It is only necessary to have the main Linux (x64 and x86), OS X .pkg and .tar.gz, Windows (x64 and x86) .msi and .exe, source, headers and docs (both produced currently by an OS X slave). **If you promote builds _before_ ARM builds have finished, you must repeat the promotion step for the ARM builds when they are ready**.
<ide>
<del>Tag the release as <b><code>vx.y.z</code></b> and sign **using the same GPG key that will be used to sign SHASUMS256.txt**.
<add>### 9. Test the Build
<add>
<add>Jenkins collects the artifacts from the builds, allowing you to download and install the new build. Make sure that the build appears correct. Check the version numbers, and perform some basic checks to confirm that all is well with the build before moving forward.
<add>
<add>### 10. Tag and Sign the Release Commit
<add>
<add>Once you have produced builds that you're happy with, create a new tag. By waiting until this stage to create tags, you can discard a proposed release if something goes wrong or additional commits are required. Once you have created a tag and pushed it to GitHub, you ***should not*** delete and re-tag. If you make a mistake after tagging then you'll have to version-bump and start again and count that tag/version as lost.
<add>
<add>Tag summaries have a predictable format, look at a recent tag to see, `git tag -v v5.3.0`. The message should look something like `2015-12-16 Node.js v5.3.0 (Stable) Release`.
<add>
<add>Create a tag using the following command:
<ide>
<ide> ```
<del>$ git tag <vx.y.z> <commit-sha> -sm 'YYYY-MM-DD node.js vz.y.x Release'
<add>$ git tag <vx.y.z> <commit-sha> -sm 'YYYY-MM-DD Node.js vx.y.z (Release Type) Release'
<ide> ```
<ide>
<del>Push the tag to GitHub.
<add>The tag **must** be signed using the GPG key that's listed for you on the project README.
<add>
<add>Push the tag to the repo before you promote the builds. If you haven't pushed your tag first, then build promotion won't work properly. Push the tag using the following command:
<ide>
<ide> ```
<del>$ git push origin <vx.y.z>
<add>$ git push <remote> <vx.y.z>
<ide> ```
<ide>
<del>### 9. Set Up For the Next Release
<add>### 11. Set Up For the Next Release
<ide>
<del>Edit _src/node_version.h_ again and:
<add>On release proposal branch, edit `src/node_version.h` again and:
<ide>
<ide> * Increment `NODE_PATCH_VERSION` by one
<ide> * Change `NODE_VERSION_IS_RELEASE` back to `0`
<ide>
<del>Commit this change with:
<add>Commit this change with the following commit message format:
<ide>
<ide> ```
<del>$ git commit -am 'Working on vx.y.z' # where 'z' is the incremented patch number
<add>Working on vx.y.z # where 'z' is the incremented patch number
<add>
<add>PR-URL: <full URL to your release proposal PR>
<ide> ```
<ide>
<ide> This sets up the branch so that nightly builds are produced with the next version number _and_ a pre-release tag.
<ide>
<del>### 10. Promote and Sign the Release Builds
<add>Merge your release branch into the stable branch that you are releasing from (not master).
<ide>
<del>**It is important that the same individual who signed the release tag be the one to promote the builds as the SHASUMS256.txt file needs to be signed with the same GPG key!**
<add>Cherry-pick the release commit to `master`. After cherry-picking, edit `src/node_version.h` to ensure the version macros contain whatever values were previously on `master`. `NODE_VERSION_IS_RELEASE` should be `0`.
<ide>
<del>When you are confident that the build slaves have properly produced usable artifacts and uploaded them to the web server, you can promote them to release status. This is done by interacting with the web server via the _dist_ user.
<add>### 12. Promote and Sign the Release Builds
<ide>
<del>The _tools/release.sh_ script should be used to promote and sign the build. When run, it will perform the following actions:
<add>**It is important that the same individual who signed the release tag be the one to promote the builds as the SHASUMS256.txt file needs to be signed with the same GPG key!**
<add>
<add>Use `tools/release.sh` to promote and sign the build. When run, it will perform the following actions:
<ide>
<ide> **a.** Select a GPG key from your private keys. It will use a command similar to: `gpg --list-secret-keys` to list your keys. If you don't have any keys, it will bail. (Why are you releasing? Your tag should be signed!) If you have only one key, it will use that. If you have more than one key it will ask you to select one from the list. Be sure to use the same key that you signed your git tag with.
<ide>
<ide> The _tools/release.sh_ script should be used to promote and sign the build. When
<ide>
<ide> **f.** Output an ASCII armored version of your public GPG key using a command similar to: `gpg --default-key YOURKEY --armor --export --output /path/to/SHASUMS256.txt.gpg`. This does not require your password and is mainly a convenience for users, although not the recommended way to get a copy of your key.
<ide>
<del>**g.** Upload the SHASUMS256.txt\* files back to the server into the release directory.
<add>**g.** Upload the SHASUMS256.txt files back to the server into the release directory.
<ide>
<del>If you didn't wait for ARM builds in the previous step before promoting the release, you should re-run _tools/release.sh_ after the ARM builds have finished. That will move the ARM artifacts into the correct location. You will be prompted to re-sign SHASUMS256.txt.
<add>If you didn't wait for ARM builds in the previous step before promoting the release, you should re-run `tools/release.sh` after the ARM builds have finished. That will move the ARM artifacts into the correct location. You will be prompted to re-sign SHASUMS256.txt.
<ide>
<del>### 11. Check the Release
<add>### 13. Check the Release
<ide>
<ide> Your release should be available at <https://nodejs.org/dist/vx.y.z/> and <https://nodejs.org/dist/latest/>. Check that the appropriate files are in place. You may want to check that the binaries are working as appropriate and have the right internal version strings. Check that the API docs are available at <https://nodejs.org/api/>. Check that the release catalog files are correct at <https://nodejs.org/dist/index.tab> and <https://nodejs.org/dist/index.json>.
<ide>
<del>### 12. Announce
<add>### 14. Create a Blog Post
<add>
<add>There is an automatic build that is kicked off when you promote new builds, so within a few minutes nodejs.org will be listing your new version as the latest release. However, the blog post is not yet fully automatic.
<add>
<add>Create a new blog post by running the [nodejs.org release-post.js script](https://github.com/nodejs/nodejs.org/blob/master/scripts/release-post.js). This script will use the promoted builds and changelog to generate the post. Run `npm serve` to preview the post locally before pushing to the [nodejs.org](https://github.com/nodejs/nodejs.org) repo.
<add>
<add>* You can add a short blurb just under the main heading if you want to say something important, otherwise the text should be publication ready.
<add>* The links to the download files won't be complete unless you waited for the ARMv6 builds. Any downloads that are missing will have `*Coming soon*` next to them. It's your responsibility to manually update these later when you have the outstanding builds.
<add>* The SHASUMS256.txt.asc content is at the bottom of the post. When you update the list of tarballs you'll need to copy/paste the new contents of this file to reflect those changes.
<add>* Always use pull-requests on the nodejs.org repo. Be respectful of that working group, but you shouldn't have to wait for PR sign-off. Opening a PR and merging it immediately _should_ be fine.
<add>* Changes to `master` on the nodejs.org repo will trigger a new build of nodejs.org so your changes should appear in a few minutes after pushing.
<add>
<add>### 15. Announce
<add>
<add>The nodejs.org website will automatically rebuild and include the new version. You simply need to announce the build, preferably via Twitter with a message such as:
<add>
<add>> v5.3.0 of @nodejs is out @ https://nodejs.org/dist/latest/ changelog @ https://github.com/nodejs/node/blob/master/CHANGELOG.md#2015-12-16-version-530-stable-cjihrig … something here about notable changes
<ide>
<del>The nodejs.org website will automatically rebuild and include the new version. You simply need to announce the build, preferably via twitter with a message such as:
<add>### 16. Cleanup
<ide>
<del>> v2.3.2 of @official_iojs is out @ https://nodejs.org/dist/latest/ changelog @ https://github.com/nodejs/node/blob/master/CHANGELOG.md#2015-07-01-version-232-rvagg … something here about notable changes
<add>Close your release proposal PR and remove the proposal branch.
<ide>
<del>### 13. Celebrate
<add>### 17. Celebrate
<ide>
<ide> _In whatever form you do this..._
| 1
|
PHP
|
PHP
|
fix custom cast
|
62b1f8d7af46cc288ab95a5ecf087d1f736f4922
|
<ide><path>src/Illuminate/Database/Eloquent/Concerns/HasAttributes.php
<ide> protected function getClassCastableAttributeValue($key)
<ide> $caster = $this->resolveCasterClass($key);
<ide>
<ide> return $this->classCastCache[$key] = $caster instanceof CastsInboundAttributes
<del> ? $this->attributes[$key]
<add> ? ($this->attributes[$key] ?? null)
<ide> : $caster->get($this, $key, $this->attributes[$key] ?? null, $this->attributes);
<ide> }
<ide> }
<ide><path>tests/Database/DatabaseEloquentModelTest.php
<ide> use DateTimeInterface;
<ide> use Exception;
<ide> use Foo\Bar\EloquentModelNamespacedStub;
<add>use Illuminate\Contracts\Database\Eloquent\CastsInboundAttributes;
<ide> use Illuminate\Contracts\Events\Dispatcher;
<ide> use Illuminate\Database\Connection;
<ide> use Illuminate\Database\ConnectionResolverInterface;
<ide> public function testGetOriginalCastsAttributes()
<ide> $this->assertEquals(['foo' => 'bar'], $model->getOriginal('collectionAttribute')->toArray());
<ide> $this->assertEquals(['foo' => 'bar2'], $model->getAttribute('collectionAttribute')->toArray());
<ide> }
<add>
<add> public function testUnsavedModel()
<add> {
<add> $user = new UnsavedModel;
<add> $user->name = null;
<add>
<add> $this->assertNull($user->name);
<add> }
<ide> }
<ide>
<ide> class EloquentTestObserverStub
<ide> class EloquentModelWithUpdatedAtNull extends Model
<ide> protected $table = 'stub';
<ide> const UPDATED_AT = null;
<ide> }
<add>
<add>class UnsavedModel extends Model
<add>{
<add> protected $casts = ['name' => Uppercase::class];
<add>}
<add>
<add>class Uppercase implements CastsInboundAttributes
<add>{
<add> public function set($model, string $key, $value, array $attributes)
<add> {
<add> return is_string($value) ? strtoupper($value) : $value;
<add> }
<add>}
| 2
|
Python
|
Python
|
remove unused arguments in example scripts
|
aa50fd196f16c693daa0d15f53272849819bc75b
|
<ide><path>examples/run_classifier.py
<ide> def main():
<ide> type=float,
<ide> help="Proportion of training to perform linear learning rate warmup for. "
<ide> "E.g., 0.1 = 10%% of training.")
<del> parser.add_argument("--save_checkpoints_steps",
<del> default=1000,
<del> type=int,
<del> help="How often to save the model checkpoint.")
<ide> parser.add_argument("--no_cuda",
<ide> default=False,
<ide> action='store_true',
<ide><path>examples/run_squad.py
<ide> def main():
<ide> parser.add_argument("--warmup_proportion", default=0.1, type=float,
<ide> help="Proportion of training to perform linear learning rate warmup for. E.g., 0.1 = 10% "
<ide> "of training.")
<del> parser.add_argument("--save_checkpoints_steps", default=1000, type=int,
<del> help="How often to save the model checkpoint.")
<del> parser.add_argument("--iterations_per_loop", default=1000, type=int,
<del> help="How many steps to make in each estimator call.")
<ide> parser.add_argument("--n_best_size", default=20, type=int,
<ide> help="The total number of n-best predictions to generate in the nbest_predictions.json "
<ide> "output file.")
| 2
|
Ruby
|
Ruby
|
add missing test for callbacks
|
f1a3e7ba01418ab4219eb2cfce8bc1f5c60acbca
|
<ide><path>actionpack/test/dispatch/callbacks_test.rb
<ide> def test_before_and_after_callbacks
<ide> dispatch
<ide> assert_equal 4, Foo.a
<ide> assert_equal 4, Foo.b
<add>
<add> dispatch do |env|
<add> raise "error"
<add> end rescue nil
<add> assert_equal 6, Foo.a
<add> assert_equal 6, Foo.b
<ide> end
<ide>
<ide> def test_to_prepare_and_cleanup_delegation
<ide> def test_to_prepare_and_cleanup_delegation
<ide> private
<ide>
<ide> def dispatch(&block)
<del> @dispatcher ||= ActionDispatch::Callbacks.new(block || DummyApp.new)
<del> @dispatcher.call({'rack.input' => StringIO.new('')})
<add> ActionDispatch::Callbacks.new(block || DummyApp.new).call(
<add> {'rack.input' => StringIO.new('')}
<add> )
<ide> end
<ide>
<ide> end
| 1
|
PHP
|
PHP
|
add basic querystring support
|
de00c98f8a733fb09f5a44b37953d901bf570a19
|
<ide><path>lib/Cake/Network/Http/Client.php
<ide> public function config($config = null) {
<ide> */
<ide> public function get($url, $data = [], $options = []) {
<ide> $options = $this->_mergeOptions($options);
<del> $request = $this->_createRequest(Request::METHOD_GET, $url, $data, $options);
<add> $url = $this->buildUrl($url, $data, $options);
<add> $request = $this->_createRequest(
<add> Request::METHOD_GET,
<add> $url,
<add> [],
<add> $options
<add> );
<ide> return $this->send($request, $options);
<ide> }
<ide>
<ide> public function send(Request $request, $options = []) {
<ide> * Generate a URL based on the scoped client options.
<ide> *
<ide> * @param string $url Either a full URL or just the path.
<add> * @param array $query The query data for the URL.
<ide> * @param array $options The config options stored with Client::config()
<ide> * @return string A complete url with scheme, port, host, path.
<ide> */
<del> public function buildUrl($url, $options = []) {
<del> if (empty($options)) {
<add> public function buildUrl($url, $query = [], $options = []) {
<add> if (empty($options) && empty($query)) {
<ide> return $url;
<ide> }
<add> if ($query) {
<add> $url .= '?' . http_build_query($query);
<add> }
<ide> if (preg_match('#^https?://#', $url)) {
<ide> return $url;
<ide> }
<ide> public function buildUrl($url, $options = []) {
<ide> /**
<ide> * Creates a new request object based on the parameters.
<ide> *
<del> *
<add> * @param string $method HTTP method name.
<add> * @param string $url The url including query string.
<add> * @param mixed $data The request body content.
<add> * @param array $options The options to use. Contains auth, proxy etc.
<ide> * @return Cake\Network\Http\Request
<ide> */
<ide> protected function _createRequest($method, $url, $data, $options) {
<del> $url = $this->buildUrl($url, $options);
<ide> $request = new Request();
<ide> $request->method($method)
<ide> ->url($url)
<ide><path>lib/Cake/Test/TestCase/Network/Http/ClientTest.php
<ide> public static function urlProvider() {
<ide> [
<ide> 'http://example.com/test.html',
<ide> 'http://example.com/test.html',
<add> [],
<ide> null,
<ide> 'Null options'
<ide> ],
<ide> [
<ide> 'http://example.com/test.html',
<ide> 'http://example.com/test.html',
<ide> [],
<add> [],
<ide> 'Simple string'
<ide> ],
<ide> [
<ide> 'http://example.com/test.html',
<ide> '/test.html',
<add> [],
<ide> ['host' => 'example.com'],
<ide> 'host name option',
<ide> ],
<ide> [
<ide> 'https://example.com/test.html',
<ide> '/test.html',
<add> [],
<ide> ['host' => 'example.com', 'scheme' => 'https'],
<ide> 'HTTPS',
<ide> ],
<ide> [
<ide> 'http://example.com:8080/test.html',
<ide> '/test.html',
<add> [],
<ide> ['host' => 'example.com', 'port' => '8080'],
<ide> 'Non standard port',
<ide> ],
<ide> [
<ide> 'http://example.com/test.html',
<ide> '/test.html',
<add> [],
<ide> ['host' => 'example.com', 'port' => '80'],
<ide> 'standard port, does not display'
<ide> ],
<ide> [
<ide> 'https://example.com/test.html',
<ide> '/test.html',
<add> [],
<ide> ['host' => 'example.com', 'scheme' => 'https', 'port' => '443'],
<ide> 'standard port, does not display'
<ide> ],
<ide> [
<ide> 'http://example.com/test.html',
<ide> 'http://example.com/test.html',
<add> [],
<ide> ['host' => 'example.com', 'scheme' => 'https'],
<ide> 'options do not duplicate'
<ide> ],
<add> [
<add> 'http://example.com/search?q=hi+there&cat%5Bid%5D%5B0%5D=2&cat%5Bid%5D%5B1%5D=3',
<add> 'http://example.com/search',
<add> ['q' => 'hi there', 'cat' => ['id' => [2, 3]]],
<add> [],
<add> 'query string data.'
<add> ],
<ide> ];
<ide> }
<ide>
<ide> /**
<ide> * @dataProvider urlProvider
<ide> */
<del> public function testBuildUrl($expected, $url, $opts) {
<add> public function testBuildUrl($expected, $url, $query, $opts) {
<ide> $http = new Client();
<ide>
<del> $result = $http->buildUrl($url, $opts);
<add> $result = $http->buildUrl($url, $query, $opts);
<ide> $this->assertEquals($expected, $result);
<ide> }
<ide>
<ide> public function testGetSimpleWithHeadersAndCookies() {
<ide> $this->assertSame($result, $response);
<ide> }
<ide>
<add>/**
<add> * test get request with querystring data
<add> *
<add> * @return void
<add> */
<add> public function testGetQuerystring() {
<add> $response = new Response();
<add>
<add> $mock = $this->getMock('Cake\Network\Http\Adapter\Stream', ['send']);
<add> $mock->expects($this->once())
<add> ->method('send')
<add> ->with($this->logicalAnd(
<add> $this->isInstanceOf('Cake\Network\Http\Request'),
<add> $this->attributeEqualTo('_url', 'http://cakephp.org/search?q=hi+there&Category%5Bid%5D%5B0%5D=2&Category%5Bid%5D%5B1%5D=3')
<add> ))
<add> ->will($this->returnValue($response));
<add>
<add> $http = new Client([
<add> 'host' => 'cakephp.org',
<add> 'adapter' => $mock
<add> ]);
<add> $result = $http->get('/search', [
<add> 'q' => 'hi there',
<add> 'Category' => ['id' => [2, 3]]
<add> ]);
<add> $this->assertSame($result, $response);
<add> }
<ide> }
| 2
|
Text
|
Text
|
improve python version of merge sort.
|
01ae4e25a173dfda78ba77978ba8e18167416686
|
<ide><path>guide/english/algorithms/sorting-algorithms/merge-sort/index.md
<ide> void mergesort(int A[],int size_a,int B[],int size_b,int C[])
<ide> ### Implementation in Python
<ide>
<ide> ```python
<del>temp = None
<del>def merge(arr, left, right):
<del> global temp, inversions
<del> mid = (left + right) // 2
<del> for i in range(left, right + 1):
<del> temp[i] = arr[i]
<del>
<del> k, L, R = left, left, mid + 1
<del> while L <= mid and R <= right:
<del> if temp[L] <= temp[R]:
<del> arr[k] = temp[L]
<del> L += 1
<del> else:
<del> arr[k] = temp[R]
<del> R += 1
<del> k += 1
<del>
<del> while L <= mid:
<del> arr[k] = temp[L]
<del> L += 1
<del> k += 1
<del>
<del> while R <= right:
<del> arr[k] = temp[R]
<del> R += 1
<del> k += 1
<del>
<del>def merge_sort(arr, left, right):
<del> if left >= right:
<del> return
<del>
<del> mid = (left + right) // 2
<del> merge_sort(arr, left, mid)
<del> merge_sort(arr, mid + 1, right)
<del> merge(arr, left, right)
<add>def merge(left,right,compare):
<add> result = []
<add> i,j = 0,0
<add> while (i < len(left) and j < len(right)):
<add> if compare(left[i],right[j]):
<add> result.append(left[i])
<add> i += 1
<add> else:
<add> result.append(right[j])
<add> j += 1
<add> while (i < len(left)):
<add> result.append(left[i])
<add> i += 1
<add> while (j < len(right)):
<add> result.append(right[j])
<add> j += 1
<add> return result
<add>
<add>def merge_sort(arr, compare = lambda x, y: x < y):
<add> #Used lambda function to sort array in both(increasing and decresing) order.
<add> #By default it sorts array in increasing order
<add> if len(arr) < 2:
<add> return arr[:]
<add> else:
<add> middle = len(arr) // 2
<add> left = merge_sort(arr[:middle], compare)
<add> right = merge_sort(arr[middle:], compare)
<add> return merge(left, right, compare)
<ide>
<del>arr = [1,6,3,1,8,4,2,9,3]
<del>temp = [None for _ in range(len(arr))]
<del>merge_sort(arr, 0, len(arr) - 1)
<del>print(arr, inversions)
<add>arr = [2,1,4,5,3]
<add>print(merge_sort(arr))
<ide> ```
<ide>
<ide> ### Implementation in Java
| 1
|
Python
|
Python
|
fix tf template
|
161a6461db3c673672edd5b994848b0d50db1b67
|
<ide><path>templates/adding_a_new_model/cookiecutter-template-{{cookiecutter.modelname}}/modeling_tf_{{cookiecutter.lowercase_modelname}}.py
<ide> class TF{{cookiecutter.camelcase_modelname}}Intermediate(tf.keras.layers.Layer):
<ide> def __init__(self, config, **kwargs):
<ide> super().__init__(**kwargs)
<ide>
<del> self.dense = tf.keras.layers.Dense(
<del> config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), name="dense"
<add> self.dense = tf.keras.layers.experimental.EinsumDense(
<add> equation="abc,cd->abd",
<add> output_shape=(None, config.intermediate_size),
<add> bias_axes="d",
<add> kernel_initializer=get_initializer(initializer_range=config.initializer_range),
<add> name="dense",
<ide> )
<ide>
<ide> if isinstance(config.hidden_act, str):
<del> self.intermediate_act_fn = get_tf_activation(config.hidden_act)
<add> self.intermediate_act_fn = get_tf_activation(activation_string=config.hidden_act)
<ide> else:
<ide> self.intermediate_act_fn = config.hidden_act
<ide>
<ide> def call(self, hidden_states):
<del> hidden_states = self.dense(hidden_states)
<del> hidden_states = self.intermediate_act_fn(hidden_states)
<add> hidden_states = self.dense(inputs=hidden_states)
<add> hidden_states = self.intermediate_act_fn(inputs=hidden_states)
<ide>
<ide> return hidden_states
<ide>
<ide> class TF{{cookiecutter.camelcase_modelname}}Output(tf.keras.layers.Layer):
<ide> def __init__(self, config, **kwargs):
<ide> super().__init__(**kwargs)
<ide>
<del> self.dense = tf.keras.layers.Dense(
<del> config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), name="dense"
<add> self.dense = tf.keras.layers.experimental.EinsumDense(
<add> equation="abc,cd->abd",
<add> bias_axes="d",
<add> output_shape=(None, config.hidden_size),
<add> kernel_initializer=get_initializer(config.initializer_range),
<add> name="dense",
<ide> )
<ide> self.LayerNorm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name="LayerNorm")
<del> self.dropout = tf.keras.layers.Dropout(config.hidden_dropout_prob)
<add> self.dropout = tf.keras.layers.Dropout(rate=config.hidden_dropout_prob)
<ide>
<ide> def call(self, hidden_states, input_tensor, training=False):
<del> hidden_states = self.dense(hidden_states)
<del> hidden_states = self.dropout(hidden_states, training=training)
<del> hidden_states = self.LayerNorm(hidden_states + input_tensor)
<add> hidden_states = self.dense(inputs=hidden_states)
<add> hidden_states = self.dropout(inputs=hidden_states, training=training)
<add> hidden_states = self.LayerNorm(inputs=hidden_states + input_tensor)
<ide>
<ide> return hidden_states
<ide>
| 1
|
Javascript
|
Javascript
|
add duration.fn back
|
eeb83f8176febcc121ca85f63557558e12d13885
|
<ide><path>src/lib/duration/create.js
<ide> export function createDuration (input, key) {
<ide> return ret;
<ide> }
<ide>
<add>createDuration.fn = Duration.prototype;
<add>
<ide> function parseIso (inp, sign) {
<ide> // We'd normally use ~~inp for this, but unfortunately it also
<ide> // converts floats to ints.
<ide><path>src/test/moment/duration.js
<ide> test('JSON.stringify duration', function (assert) {
<ide>
<ide> assert.equal(JSON.stringify(d), '"' + d.toISOString() + '"', 'JSON.stringify on duration should return ISO string');
<ide> });
<add>
<add>test('duration plugins', function (assert) {
<add> var durationObject = moment.duration();
<add> moment.duration.fn.foo = function (arg) {
<add> assert.equal(this, durationObject);
<add> assert.equal(arg, 5);
<add> };
<add> durationObject.foo(5);
<add>});
<add>
| 2
|
Javascript
|
Javascript
|
fix mmd audio example
|
d9f437b84fa116a48908e20913cb15303802bb06
|
<ide><path>examples/js/loaders/MMDLoader.js
<ide> THREE.MMDAudioManager = function ( audio, listener, p ) {
<ide> this.currentTime = 0.0;
<ide> this.delayTime = params.delayTime !== undefined ? params.delayTime : 0.0;
<ide>
<del> this.audioDuration = this.audio.source.buffer.duration;
<add> this.audioDuration = this.audio.buffer.duration;
<ide> this.duration = this.audioDuration + this.delayTime;
<ide>
<ide> };
| 1
|
PHP
|
PHP
|
add configure timezone to boostrap
|
708d462bf980f85c31d9e6584ad8f22ce3fae41f
|
<ide><path>app/Config/core.php
<ide> */
<ide> //date_default_timezone_set('UTC');
<ide>
<add>/**
<add> * ‘Config.timezone’ is available which you can set to user’s timezone string.
<add> * If a method of CakeTime class is called with $timezone parameter as null and ‘Config.timezone’ is set,
<add> * then the value of ‘Config.timezone’ will be used. This feature allows you to set user’s timezone just
<add> * once instead of passing it each time in function calls.
<add> */
<add> //Configure::write('Config.timezone', 'Europe/Paris')
<add>
<ide> /**
<ide> *
<ide> * Cache Engine Configuration
| 1
|
Text
|
Text
|
add changelog entry for 9d6e28
|
0185aae747676e636a52eb079a0a10a6f053fa2c
|
<ide><path>actionpack/CHANGELOG.md
<add>* Changed the default system test screenshot output from `inline` to `simple`.
<add>
<add> `inline` works well for iTerm2 but not everyone uses iTerm2. Some terminals like
<add> Terminal.app ignore the `inline` and output the path to the file since it can't
<add> render the image. Other terminals, like those on Ubuntu, cannot handle the image
<add> inline, but also don't handle it gracefully and instead of outputting the file
<add> path, it dumps binary into the terminal.
<add>
<add> Commit 9d6e28 fixes this by changing the default for screenshot to be `simple`.
<add>
<add> *Eileen M. Uchitelle*
<add>
<ide> * Register most popular audio/video/font mime types supported by modern browsers.
<ide>
<ide> *Guillermo Iguaran*
| 1
|
PHP
|
PHP
|
apply fixes from styleci
|
8e618d00d880f75e31230891a6461379eb704180
|
<ide><path>src/Illuminate/Queue/Queue.php
<ide>
<ide> namespace Illuminate\Queue;
<ide>
<del>use Illuminate\Support\Arr;
<ide> use Illuminate\Container\Container;
<ide>
<ide> abstract class Queue
| 1
|
Ruby
|
Ruby
|
pass explicit sort to handle apfs
|
16ea29a641f1672baa1fead658de2b0ad4f308b2
|
<ide><path>Library/Homebrew/diagnostic.rb
<ide> def __check_linked_brew(f)
<ide> def check_for_linked_keg_only_brews
<ide> return unless HOMEBREW_CELLAR.exist?
<ide>
<del> linked = Formula.installed.select do |f|
<add> linked = Formula.installed.sort.select do |f|
<ide> f.keg_only? && __check_linked_brew(f)
<ide> end
<ide> return if linked.empty?
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.