content_type stringclasses 8 values | main_lang stringclasses 7 values | message stringlengths 1 50 | sha stringlengths 40 40 | patch stringlengths 52 962k | file_count int64 1 300 |
|---|---|---|---|---|---|
Go | Go | move names to package api | dba271a42ab4841dbcf2e953491e9ee728cd8e16 | <add><path>api/names.go
<del><path>utils/names.go
<del>package utils
<add>package api
<ide>
<ide> import "regexp"
<ide>
<ide><path>daemon/checkpoint.go
<ide> import (
<ide> "os"
<ide> "path/filepath"
<ide>
<add> "github.com/docker/docker/api"
<ide> "github.com/docker/docker/api/types"
<del> "github.com/docker/docker/utils"
<ide> )
<ide>
<ide> var (
<del> validCheckpointNameChars = utils.RestrictedNameChars
<del> validCheckpointNamePattern = utils.RestrictedNamePattern
<add> validCheckpointNameChars = api.RestrictedNameChars
<add> validCheckpointNamePattern = api.RestrictedNamePattern
<ide> )
<ide>
<ide> // CheckpointCreate checkpoints the process running in a container with CRIU
<ide><path>daemon/names.go
<ide> import (
<ide> "strings"
<ide>
<ide> "github.com/Sirupsen/logrus"
<add> "github.com/docker/docker/api"
<ide> "github.com/docker/docker/container"
<ide> "github.com/docker/docker/pkg/namesgenerator"
<ide> "github.com/docker/docker/pkg/registrar"
<ide> "github.com/docker/docker/pkg/stringid"
<del> "github.com/docker/docker/utils"
<ide> )
<ide>
<ide> var (
<del> validContainerNameChars = utils.RestrictedNameChars
<del> validContainerNamePattern = utils.RestrictedNamePattern
<add> validContainerNameChars = api.RestrictedNameChars
<add> validContainerNamePattern = api.RestrictedNamePattern
<ide> )
<ide>
<ide> func (daemon *Daemon) registerName(container *container.Container) error {
<ide><path>volume/local/local.go
<ide> import (
<ide> "github.com/pkg/errors"
<ide>
<ide> "github.com/Sirupsen/logrus"
<add> "github.com/docker/docker/api"
<ide> "github.com/docker/docker/pkg/idtools"
<ide> "github.com/docker/docker/pkg/mount"
<del> "github.com/docker/docker/utils"
<ide> "github.com/docker/docker/volume"
<ide> )
<ide>
<ide> var (
<ide> // volumeNameRegex ensures the name assigned for the volume is valid.
<ide> // This name is used to create the bind directory, so we need to avoid characters that
<ide> // would make the path to escape the root directory.
<del> volumeNameRegex = utils.RestrictedNamePattern
<add> volumeNameRegex = api.RestrictedNamePattern
<ide> )
<ide>
<ide> type validationError struct {
<ide> func (r *Root) validateName(name string) error {
<ide> return validationError{fmt.Errorf("volume name is too short, names should be at least two alphanumeric characters")}
<ide> }
<ide> if !volumeNameRegex.MatchString(name) {
<del> return validationError{fmt.Errorf("%q includes invalid characters for a local volume name, only %q are allowed. If you intented to pass a host directory, use absolute path", name, utils.RestrictedNameChars)}
<add> return validationError{fmt.Errorf("%q includes invalid characters for a local volume name, only %q are allowed. If you intented to pass a host directory, use absolute path", name, api.RestrictedNameChars)}
<ide> }
<ide> return nil
<ide> } | 4 |
Javascript | Javascript | use index instead of debugid | 2d7a7adadefab7accc153feedd519a888e5f44ed | <ide><path>lib/optimize/RemoveParentModulesPlugin.js
<ide> function listToSet(list, chunk) {
<ide> var set = {};
<ide> list.forEach(function(module) {
<del> set[module.debugId] = {
<add> set[module._RemoveParentModulesPlugin_index] = {
<ide> module: module,
<ide> chunks: [chunk]
<ide> };
<ide> module.exports = RemoveParentModulesPlugin;
<ide> RemoveParentModulesPlugin.prototype.apply = function(compiler) {
<ide> compiler.plugin("compilation", function(compilation) {
<ide> compilation.plugin(["optimize-chunks-basic", "optimize-extracted-chunks-basic"], function(chunks) {
<del> var todo = chunks.slice().reverse();
<add> this.modules.forEach(function(module, idx) {
<add> module._RemoveParentModulesPlugin_index = idx;
<add> })
<add> var todo = chunks.slice();
<ide> todo.forEach(function(chunk, idx) {
<ide> chunk._RemoveParentModulesPlugin_processed = false;
<ide> chunk._RemoveParentModulesPlugin_availableModulesByChunk = {};
<ide> RemoveParentModulesPlugin.prototype.apply = function(compiler) {
<ide> chunk._RemoveParentModulesPlugin_processed = true;
<ide> continue;
<ide> }
<del> var set = listToSet(chunk.modules);
<add> var set = listToSet(chunk.modules, chunk);
<ide> if(availableModules)
<ide> set = mergeSets(set, availableModules);
<ide> chunk.chunks.forEach(function(child) {
<ide> RemoveParentModulesPlugin.prototype.apply = function(compiler) {
<ide> delete chunk._RemoveParentModulesPlugin_availableModules;
<ide> delete chunk._RemoveParentModulesPlugin_processed;
<ide> delete chunk._RemoveParentModulesPlugin_index;
<add> if(chunk.entry) return;
<ide> if(!availableModules) return;
<ide> chunk.modules.slice().forEach(function(module) {
<del> if(chunk.entry) return;
<del> var info = availableModules[module.debugId];
<add> var info = availableModules[module._RemoveParentModulesPlugin_index];
<ide> if(!info) return;
<ide> var parentChunksWithModule = info.chunks;
<ide> parentChunksWithModule = parentChunksWithModule.filter(function(chunk, idx) { | 1 |
PHP | PHP | fix another connection() call | c43e414c61e109f1320cbf6283eb9e9c231cd3d1 | <ide><path>tests/TestCase/ORM/Association/HasManyTest.php
<ide> public function testValueBinderUpdateOnSubQueryStrategy()
<ide> protected function assertJoin($expected, $query)
<ide> {
<ide> if ($this->autoQuote) {
<del> $driver = $query->connection()->driver();
<add> $driver = $query->getConnection()->getDriver();
<ide> $quoter = new IdentifierQuoter($driver);
<ide> foreach ($expected as &$join) {
<ide> $join['table'] = $driver->quoteIdentifier($join['table']);
<ide> protected function assertOrderClause($expected, $query)
<ide> protected function assertSelectClause($expected, $query)
<ide> {
<ide> if ($this->autoQuote) {
<del> $connection = $query->connection();
<add> $connection = $query->getConnection();
<ide> foreach ($expected as $key => $value) {
<ide> $expected[$connection->quoteIdentifier($key)] = $connection->quoteIdentifier($value);
<ide> unset($expected[$key]); | 1 |
Javascript | Javascript | add suspenselist to devtools | 60ba723bf78b9a28f60dce854e88e206fab52301 | <ide><path>packages/react-devtools-shared/src/__tests__/utils-test.js
<ide> * @flow
<ide> */
<ide>
<del>import {getDisplayName} from 'react-devtools-shared/src/utils';
<add>import {
<add> getDisplayName,
<add> getDisplayNameForReactElement,
<add>} from 'react-devtools-shared/src/utils';
<add>import {
<add> REACT_SUSPENSE_LIST_TYPE as SuspenseList,
<add> REACT_STRICT_MODE_TYPE as StrictMode,
<add>} from 'shared/ReactSymbols';
<add>import {createElement} from 'react/src/ReactElement';
<ide>
<ide> describe('utils', () => {
<ide> describe('getDisplayName', () => {
<ide> describe('utils', () => {
<ide> expect(getDisplayName(FauxComponent, 'Fallback')).toEqual('Fallback');
<ide> });
<ide> });
<add> describe('getDisplayNameForReactElement', () => {
<add> it('should return correct display name for an element with function type', () => {
<add> function FauxComponent() {}
<add> FauxComponent.displayName = 'OverrideDisplayName';
<add> const element = createElement(FauxComponent);
<add> expect(getDisplayNameForReactElement(element)).toEqual(
<add> 'OverrideDisplayName',
<add> );
<add> });
<add> it('should return correct display name for an element with a type of StrictMode', () => {
<add> const element = createElement(StrictMode);
<add> expect(getDisplayNameForReactElement(element)).toEqual('StrictMode');
<add> });
<add> it('should return correct display name for an element with a type of SuspenseList', () => {
<add> const element = createElement(SuspenseList);
<add> expect(getDisplayNameForReactElement(element)).toEqual('SuspenseList');
<add> });
<add> it('should return NotImplementedInDevtools for an element with invalid symbol type', () => {
<add> const element = createElement(Symbol('foo'));
<add> expect(getDisplayNameForReactElement(element)).toEqual(
<add> 'NotImplementedInDevtools',
<add> );
<add> });
<add> it('should return NotImplementedInDevtools for an element with invalid type', () => {
<add> const element = createElement(true);
<add> expect(getDisplayNameForReactElement(element)).toEqual(
<add> 'NotImplementedInDevtools',
<add> );
<add> });
<add> it('should return Element for null type', () => {
<add> const element = createElement();
<add> expect(getDisplayNameForReactElement(element)).toEqual('Element');
<add> });
<add> });
<ide> });
<ide><path>packages/react-devtools-shared/src/utils.js
<ide> import {
<ide> StrictMode,
<ide> Suspense,
<ide> } from 'react-is';
<add>import {REACT_SUSPENSE_LIST_TYPE as SuspenseList} from 'shared/ReactSymbols';
<ide> import {
<ide> TREE_OPERATION_ADD,
<ide> TREE_OPERATION_REMOVE,
<ide> import {
<ide> } from 'react-devtools-shared/src/types';
<ide> import {localStorageGetItem, localStorageSetItem} from './storage';
<ide> import {meta} from './hydration';
<del>
<ide> import type {ComponentFilter, ElementType} from './types';
<ide>
<ide> const cachedDisplayNames: WeakMap<Function, string> = new WeakMap();
<ide> export function getDisplayNameForReactElement(
<ide> return 'StrictMode';
<ide> case Suspense:
<ide> return 'Suspense';
<add> case SuspenseList:
<add> return 'SuspenseList';
<ide> default:
<ide> const {type} = element;
<ide> if (typeof type === 'string') {
<ide> return type;
<del> } else if (type != null) {
<add> } else if (typeof type === 'function') {
<ide> return getDisplayName(type, 'Anonymous');
<add> } else if (type != null) {
<add> return 'NotImplementedInDevtools';
<ide> } else {
<ide> return 'Element';
<ide> }
<ide><path>packages/react-is/src/ReactIs.js
<ide> import {
<ide> REACT_PROVIDER_TYPE,
<ide> REACT_STRICT_MODE_TYPE,
<ide> REACT_SUSPENSE_TYPE,
<add> REACT_SUSPENSE_LIST_TYPE,
<ide> } from 'shared/ReactSymbols';
<ide> import isValidElementType from 'shared/isValidElementType';
<ide>
<ide> export function typeOf(object: any) {
<ide> case REACT_PROFILER_TYPE:
<ide> case REACT_STRICT_MODE_TYPE:
<ide> case REACT_SUSPENSE_TYPE:
<add> case REACT_SUSPENSE_LIST_TYPE:
<ide> return type;
<ide> default:
<ide> const $$typeofType = type && type.$$typeof; | 3 |
Ruby | Ruby | add bigserial pk support | 6dc178711de036ae9a441e83978b7ed740be2f63 | <ide><path>activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
<ide> class PostgreSQLAdapter < AbstractAdapter
<ide>
<ide> NATIVE_DATABASE_TYPES = {
<ide> primary_key: "serial primary key",
<add> bigserial: "bigserial",
<ide> string: { name: "character varying" },
<ide> text: { name: "text" },
<ide> integer: { name: "integer" },
<ide><path>activerecord/lib/active_record/schema_dumper.rb
<ide> def table(table, stream)
<ide> if pkcol
<ide> if pk != 'id'
<ide> tbl.print %Q(, primary_key: "#{pk}")
<add> elsif pkcol.sql_type == 'bigint'
<add> tbl.print ", id: :bigserial"
<ide> elsif pkcol.sql_type == 'uuid'
<ide> tbl.print ", id: :uuid"
<ide> tbl.print %Q(, default: "#{pkcol.default_function}") if pkcol.default_function
<ide><path>activerecord/test/cases/adapters/postgresql/uuid_test.rb
<ide> def test_uuid_formats
<ide> end
<ide> end
<ide>
<add>class PostgresqlLargeKeysTest < ActiveRecord::TestCase
<add> include PostgresqlUUIDHelper
<add> def setup
<add> connection.create_table('big_serials', id: :bigserial) do |t|
<add> t.string 'name'
<add> end
<add> end
<add>
<add> def test_omg
<add> schema = StringIO.new
<add> ActiveRecord::SchemaDumper.dump(connection, schema)
<add> assert_match "create_table \"big_serials\", id: :bigserial, force: true",
<add> schema.string
<add> end
<add>
<add> def teardown
<add> drop_table "big_serials"
<add> end
<add>end
<add>
<ide> class PostgresqlUUIDGenerationTest < ActiveRecord::TestCase
<ide> include PostgresqlUUIDHelper
<ide> | 3 |
Ruby | Ruby | provide a middleware to debug misbehaving locks | 04b4a0666bdf900f9a4e797022dfe0197eebef6a | <ide><path>actionpack/lib/action_dispatch.rb
<ide> class IllegalStateError < StandardError
<ide> autoload :Callbacks
<ide> autoload :Cookies
<ide> autoload :DebugExceptions
<add> autoload :DebugLocks
<ide> autoload :ExceptionWrapper
<ide> autoload :Executor
<ide> autoload :Flash
<ide><path>actionpack/lib/action_dispatch/middleware/debug_locks.rb
<add>module ActionDispatch
<add> # This middleware can be used to diagnose deadlocks in the autoload interlock.
<add> #
<add> # To use it, insert it near the top of the middleware stack, using
<add> # <tt>config/application.rb</tt>:
<add> #
<add> # config.middleware.insert_before Rack::Sendfile, ActionDispatch::DebugLocks
<add> #
<add> # After restarting the application and re-triggering the deadlock condition,
<add> # <tt>/rails/locks</tt> will show a summary of all threads currently known to
<add> # the interlock, which lock level they are holding or awaiting, and their
<add> # current backtrace.
<add> #
<add> # Generally a deadlock will be caused by the interlock conflicting with some
<add> # other external lock or blocking I/O call. These cannot be automatically
<add> # identified, but should be visible in the displayed backtraces.
<add> #
<add> # NOTE: The formatting and content of this middleware's output is intended for
<add> # human consumption, and should be expected to change between releases.
<add> #
<add> # This middleware exposes operational details of the server, with no access
<add> # control. It should only be enabled when in use, and removed thereafter.
<add> class DebugLocks
<add> def initialize(app, path = '/rails/locks')
<add> @app = app
<add> @path = path
<add> end
<add>
<add> def call(env)
<add> req = ActionDispatch::Request.new env
<add>
<add> if req.get?
<add> path = req.path_info.chomp('/'.freeze)
<add> if path == @path
<add> return render_details(req)
<add> end
<add> end
<add>
<add> @app.call(env)
<add> end
<add>
<add> private
<add> def render_details(req)
<add> threads = ActiveSupport::Dependencies.interlock.raw_state do |threads|
<add> # The Interlock itself comes to a complete halt as long as this block
<add> # is executing. That gives us a more consistent picture of everything,
<add> # but creates a pretty strong Observer Effect.
<add> #
<add> # Most directly, that means we need to do as little as possible in
<add> # this block. More widely, it means this middleware should remain a
<add> # strictly diagnostic tool (to be used when something has gone wrong),
<add> # and not for any sort of general monitoring.
<add>
<add> threads.each.with_index do |(thread, info), idx|
<add> info[:index] = idx
<add> info[:backtrace] = thread.backtrace
<add> end
<add>
<add> threads
<add> end
<add>
<add> str = threads.map do |thread, info|
<add> if info[:exclusive]
<add> lock_state = 'Exclusive'
<add> elsif info[:sharing] > 0
<add> lock_state = 'Sharing'
<add> lock_state << " x#{info[:sharing]}" if info[:sharing] > 1
<add> else
<add> lock_state = 'No lock'
<add> end
<add>
<add> if info[:waiting]
<add> lock_state << ' (yielded share)'
<add> end
<add>
<add> msg = "Thread #{info[:index]} [0x#{thread.__id__.to_s(16)} #{thread.status || 'dead'}] #{lock_state}\n"
<add>
<add> if info[:sleeper]
<add> msg << " Waiting in #{info[:sleeper]}"
<add> msg << " to #{info[:purpose].to_s.inspect}" unless info[:purpose].nil?
<add> msg << "\n"
<add>
<add> if info[:compatible]
<add> compat = info[:compatible].map { |c| c == false ? "share" : c.to_s.inspect }
<add> msg << " may be pre-empted for: #{compat.join(', ')}\n"
<add> end
<add>
<add> blockers = threads.values.select { |binfo| blocked_by?(info, binfo, threads.values) }
<add> msg << " blocked by: #{blockers.map {|i| i[:index] }.join(', ')}\n" if blockers.any?
<add> end
<add>
<add> blockees = threads.values.select { |binfo| blocked_by?(binfo, info, threads.values) }
<add> msg << " blocking: #{blockees.map {|i| i[:index] }.join(', ')}\n" if blockees.any?
<add>
<add> msg << "\n#{info[:backtrace].join("\n")}\n" if info[:backtrace]
<add> end.join("\n\n---\n\n\n")
<add>
<add> [200, { "Content-Type" => "text/plain", "Content-Length" => str.size }, [str]]
<add> end
<add>
<add> def blocked_by?(victim, blocker, all_threads)
<add> return false if victim.equal?(blocker)
<add>
<add> case victim[:sleeper]
<add> when :start_sharing
<add> blocker[:exclusive] ||
<add> (!victim[:waiting] && blocker[:compatible] && !blocker[:compatible].include?(false))
<add> when :start_exclusive
<add> blocker[:sharing] > 0 ||
<add> blocker[:exclusive] ||
<add> (blocker[:compatible] && !blocker[:compatible].include?(victim[:purpose]))
<add> when :yield_shares
<add> blocker[:exclusive]
<add> when :stop_exclusive
<add> blocker[:exclusive] ||
<add> victim[:compatible] &&
<add> victim[:compatible].include?(blocker[:purpose]) &&
<add> all_threads.all? { |other| !other[:compatible] || blocker.equal?(other) || other[:compatible].include?(blocker[:purpose]) }
<add> end
<add> end
<add> end
<add>end
<ide><path>activesupport/lib/active_support/concurrency/share_lock.rb
<ide> class ShareLock
<ide> # to upgrade share locks to exclusive.
<ide>
<ide>
<add> def raw_state # :nodoc:
<add> synchronize do
<add> threads = @sleeping.keys | @sharing.keys | @waiting.keys
<add> threads |= [@exclusive_thread] if @exclusive_thread
<add>
<add> data = {}
<add>
<add> threads.each do |thread|
<add> purpose, compatible = @waiting[thread]
<add>
<add> data[thread] = {
<add> thread: thread,
<add> sharing: @sharing[thread],
<add> exclusive: @exclusive_thread == thread,
<add> purpose: purpose,
<add> compatible: compatible,
<add> waiting: !!@waiting[thread],
<add> sleeper: @sleeping[thread],
<add> }
<add> end
<add>
<add> # NB: Yields while holding our *internal* synchronize lock,
<add> # which is supposed to be used only for a few instructions at
<add> # a time. This allows the caller to inspect additional state
<add> # without things changing out from underneath, but would have
<add> # disastrous effects upon normal operation. Fortunately, this
<add> # method is only intended to be called when things have
<add> # already gone wrong.
<add> yield data
<add> end
<add> end
<add>
<ide> def initialize
<ide> super()
<ide>
<ide> @cv = new_cond
<ide>
<ide> @sharing = Hash.new(0)
<ide> @waiting = {}
<add> @sleeping = {}
<ide> @exclusive_thread = nil
<ide> @exclusive_depth = 0
<ide> end
<ide> def start_exclusive(purpose: nil, compatible: [], no_wait: false)
<ide> return false if no_wait
<ide>
<ide> yield_shares(purpose: purpose, compatible: compatible, block_share: true) do
<del> @cv.wait_while { busy_for_exclusive?(purpose) }
<add> wait_for(:start_exclusive) { busy_for_exclusive?(purpose) }
<ide> end
<ide> end
<ide> @exclusive_thread = Thread.current
<ide> def stop_exclusive(compatible: [])
<ide>
<ide> if eligible_waiters?(compatible)
<ide> yield_shares(compatible: compatible, block_share: true) do
<del> @cv.wait_while { @exclusive_thread || eligible_waiters?(compatible) }
<add> wait_for(:stop_exclusive) { @exclusive_thread || eligible_waiters?(compatible) }
<ide> end
<ide> end
<ide> @cv.broadcast
<ide> def start_sharing
<ide> elsif @waiting[Thread.current]
<ide> # We're nested inside a +yield_shares+ call: we'll resume as
<ide> # soon as there isn't an exclusive lock in our way
<del> @cv.wait_while { @exclusive_thread }
<add> wait_for(:start_sharing) { @exclusive_thread }
<ide> else
<ide> # This is an initial / outermost share call: any outstanding
<ide> # requests for an exclusive lock get to go first
<del> @cv.wait_while { busy_for_sharing?(false) }
<add> wait_for(:start_sharing) { busy_for_sharing?(false) }
<ide> end
<ide> @sharing[Thread.current] += 1
<ide> end
<ide> def yield_shares(purpose: nil, compatible: [], block_share: false)
<ide> yield
<ide> ensure
<ide> synchronize do
<del> @cv.wait_while { @exclusive_thread && @exclusive_thread != Thread.current }
<add> wait_for(:yield_shares) { @exclusive_thread && @exclusive_thread != Thread.current }
<ide>
<ide> if previous_wait
<ide> @waiting[Thread.current] = previous_wait
<ide> def busy_for_sharing?(purpose)
<ide> def eligible_waiters?(compatible)
<ide> @waiting.any? { |t, (p, _)| compatible.include?(p) && @waiting.all? { |t2, (_, c2)| t == t2 || c2.include?(p) } }
<ide> end
<add>
<add> def wait_for(method)
<add> @sleeping[Thread.current] = method
<add> @cv.wait_while { yield }
<add> ensure
<add> @sleeping.delete Thread.current
<add> end
<ide> end
<ide> end
<ide> end
<ide><path>activesupport/lib/active_support/dependencies/interlock.rb
<ide> def permit_concurrent_loads
<ide> yield
<ide> end
<ide> end
<add>
<add> def raw_state(&block) # :nodoc:
<add> @lock.raw_state(&block)
<add> end
<ide> end
<ide> end
<ide> end | 4 |
PHP | PHP | remove trait from test | ff15da8d59a494efdbde201dfcf4e5922433b09f | <ide><path>tests/Unit/ExampleTest.php
<ide> namespace Tests\Unit;
<ide>
<ide> use Tests\TestCase;
<del>use Illuminate\Foundation\Testing\WithoutMiddleware;
<ide> use Illuminate\Foundation\Testing\DatabaseMigrations;
<ide> use Illuminate\Foundation\Testing\DatabaseTransactions;
<ide> | 1 |
Java | Java | support macros in cronexpression | 1a8906bdc3a9e78e5036ef4874345a742fb151f7 | <ide><path>spring-context/src/main/java/org/springframework/scheduling/support/CronExpression.java
<ide> public final class CronExpression {
<ide>
<ide> static final int MAX_ATTEMPTS = 366;
<ide>
<add> private static final String[] MACROS = new String[] {
<add> "@yearly", "0 0 0 1 1 *",
<add> "@annually", "0 0 0 1 1 *",
<add> "@monthly", "0 0 0 1 * *",
<add> "@weekly", "0 0 0 * * 0",
<add> "@daily", "0 0 0 * * *",
<add> "@midnight", "0 0 0 * * *",
<add> "@hourly", "0 0 * * * *"
<add> };
<add>
<ide>
<ide> private final CronField[] fields;
<ide>
<ide> private CronExpression(
<ide> * <li>{@code "0 0 0 25 12 ?"} = every Christmas Day at midnight</li>
<ide> * </ul>
<ide> *
<add> * <p>The following macros are also supported:
<add> * <ul>
<add> * <li>{@code "@yearly"} (or {@code "@annually"}) to run un once a year, i.e. {@code "0 0 0 1 1 *"},</li>
<add> * <li>{@code "@monthly"} to run once a month, i.e. {@code "0 0 0 1 * *"},</li>
<add> * <li>{@code "@weekly"} to run once a week, i.e. {@code "0 0 0 * * 0"},</li>
<add> * <li>{@code "@daily"} (or {@code "@midnight"}) to run once a day, i.e. {@code "0 0 0 * * *"},</li>
<add> * <li>{@code "@hourly"} to run once an hour, i.e. {@code "0 0 * * * *"}.</li>
<add> * </ul>
<add> *
<ide> * @param expression the expression string to parse
<ide> * @return the parsed {@code CronExpression} object
<ide> * @throws IllegalArgumentException in the expression does not conform to
<ide> private CronExpression(
<ide> public static CronExpression parse(String expression) {
<ide> Assert.hasLength(expression, "Expression string must not be empty");
<ide>
<add> expression = resolveMacros(expression);
<add>
<ide> String[] fields = StringUtils.tokenizeToStringArray(expression, " ");
<ide> if (fields.length != 6) {
<ide> throw new IllegalArgumentException(String.format(
<ide> public static CronExpression parse(String expression) {
<ide> }
<ide>
<ide>
<add> private static String resolveMacros(String expression) {
<add> expression = expression.trim();
<add> for (int i = 0; i < MACROS.length; i = i + 2) {
<add> if (MACROS[i].equalsIgnoreCase(expression)) {
<add> return MACROS[i + 1];
<add> }
<add> }
<add> return expression;
<add> }
<add>
<add>
<ide> /**
<ide> * Calculate the next {@link Temporal} that matches this expression.
<ide> * @param temporal the seed value
<ide><path>spring-context/src/test/java/org/springframework/scheduling/support/CronExpressionTests.java
<ide>
<ide> import static java.time.DayOfWeek.FRIDAY;
<ide> import static java.time.DayOfWeek.MONDAY;
<add>import static java.time.DayOfWeek.SUNDAY;
<ide> import static java.time.DayOfWeek.TUESDAY;
<ide> import static java.time.DayOfWeek.WEDNESDAY;
<ide> import static java.time.temporal.TemporalAdjusters.next;
<ide> void friday13th() {
<ide> assertThat(actual.getDayOfMonth()).isEqualTo(13);
<ide> }
<ide>
<add> @Test
<add> void yearly() {
<add> CronExpression expression = CronExpression.parse("@yearly");
<add> assertThat(expression).isEqualTo(CronExpression.parse("0 0 0 1 1 *"));
<add>
<add> LocalDateTime last = LocalDateTime.now().withMonth(10).withDayOfMonth(10);
<add> LocalDateTime expected = LocalDateTime.of(last.getYear() + 1, 1, 1, 0, 0);
<add>
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = expected.plusYears(1);
<add> actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = expected.plusYears(1);
<add> assertThat(expression.next(last)).isEqualTo(expected);
<add> }
<add>
<add> @Test
<add> void annually() {
<add> CronExpression expression = CronExpression.parse("@annually");
<add> assertThat(expression).isEqualTo(CronExpression.parse("0 0 0 1 1 *"));
<add> assertThat(expression).isEqualTo(CronExpression.parse("@yearly"));
<add> }
<add>
<add> @Test
<add> void monthly() {
<add> CronExpression expression = CronExpression.parse("@monthly");
<add> assertThat(expression).isEqualTo(CronExpression.parse("0 0 0 1 * *"));
<add>
<add> LocalDateTime last = LocalDateTime.now().withMonth(10).withDayOfMonth(10);
<add> LocalDateTime expected = LocalDateTime.of(last.getYear(), 11, 1, 0, 0);
<add>
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = expected.plusMonths(1);
<add> actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = expected.plusMonths(1);
<add> assertThat(expression.next(last)).isEqualTo(expected);
<add> }
<add>
<add> @Test
<add> void weekly() {
<add> CronExpression expression = CronExpression.parse("@weekly");
<add> assertThat(expression).isEqualTo(CronExpression.parse("0 0 0 * * 0"));
<add>
<add> LocalDateTime last = LocalDateTime.now();
<add> LocalDateTime expected = last.with(next(SUNDAY)).withHour(0).withMinute(0).withSecond(0).withNano(0);
<add>
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = expected.plusWeeks(1);
<add> actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = expected.plusWeeks(1);
<add> assertThat(expression.next(last)).isEqualTo(expected);
<add> }
<add>
<add> @Test
<add> void daily() {
<add> CronExpression expression = CronExpression.parse("@daily");
<add> assertThat(expression).isEqualTo(CronExpression.parse("0 0 0 * * *"));
<add>
<add> LocalDateTime last = LocalDateTime.now();
<add> LocalDateTime expected = last.plusDays(1).withHour(0).withMinute(0).withSecond(0).withNano(0);
<add>
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = expected.plusDays(1);
<add> actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = expected.plusDays(1);
<add> assertThat(expression.next(last)).isEqualTo(expected);
<add> }
<add>
<add> @Test
<add> void midnight() {
<add> CronExpression expression = CronExpression.parse("@midnight");
<add> assertThat(expression).isEqualTo(CronExpression.parse("0 0 0 * * *"));
<add> assertThat(expression).isEqualTo(CronExpression.parse("@daily"));
<add> }
<add>
<add> @Test
<add> void hourly() {
<add> CronExpression expression = CronExpression.parse("@hourly");
<add> assertThat(expression).isEqualTo(CronExpression.parse("0 0 * * * *"));
<add>
<add> LocalDateTime last = LocalDateTime.now();
<add> LocalDateTime expected = last.plusHours(1).withMinute(0).withSecond(0).withNano(0);
<add>
<add> LocalDateTime actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = expected.plusHours(1);
<add> actual = expression.next(last);
<add> assertThat(actual).isEqualTo(expected);
<add>
<add> last = actual;
<add> expected = expected.plusHours(1);
<add> assertThat(expression.next(last)).isEqualTo(expected);
<add> }
<add>
<add>
<ide> } | 2 |
Javascript | Javascript | require electron once | aec88679047a408ea1c51a47bb7ab7c0236573fd | <ide><path>static/index.js
<ide> (function () {
<add> const electron = require('electron')
<ide> const path = require('path')
<ide> const getWindowLoadSettings = require('../src/get-window-load-settings')
<ide> const entryPointDirPath = __dirname
<ide> }
<ide>
<ide> function handleSetupError (error) {
<del> const currentWindow = require('electron').remote.getCurrentWindow()
<add> const currentWindow = electron.remote.getCurrentWindow()
<ide> currentWindow.setSize(800, 600)
<ide> currentWindow.center()
<ide> currentWindow.show()
<ide> const initScriptPath = path.relative(entryPointDirPath, getWindowLoadSettings().windowInitializationScript)
<ide> const initialize = requireFunction(initScriptPath)
<ide> return initialize({blobStore: blobStore}).then(function () {
<del> require('electron').ipcRenderer.send('window-command', 'window:loaded')
<add> electron.ipcRenderer.send('window-command', 'window:loaded')
<ide> })
<ide> }
<ide>
<ide> })
<ide> }
<ide>
<del> const webContents = require('electron').remote.getCurrentWindow().webContents
<add> const webContents = electron.remote.getCurrentWindow().webContents
<ide> if (webContents.devToolsWebContents) {
<ide> profile()
<ide> } else { | 1 |
Ruby | Ruby | relocate bottles on linux using patchelf | 1b688a3a25fa4ca077de88e2177241b6cb76a5f6 | <ide><path>Library/Homebrew/dev-cmd/bottle.rb
<ide> def bottle
<ide> end
<ide>
<ide> return merge if args.merge?
<add> ensure_relocation_formulae_installed!
<ide> ARGV.resolved_formulae.each do |f|
<ide> bottle_formula f
<ide> end
<ide> end
<ide>
<add> def ensure_relocation_formulae_installed!
<add> Keg.relocation_formulae.each do |f|
<add> next if Formula[f].installed?
<add> ohai "Installing #{f}..."
<add> safe_system HOMEBREW_BREW_FILE, "install", f
<add> end
<add> end
<add>
<ide> def keg_contain?(string, keg, ignores)
<ide> @put_string_exists_header, @put_filenames = nil
<ide>
<ide><path>Library/Homebrew/extend/os/keg_relocate.rb
<del>require "extend/os/mac/keg_relocate" if OS.mac?
<add>if OS.mac?
<add> require "extend/os/mac/keg_relocate"
<add>elsif OS.linux?
<add> require "extend/os/linux/keg_relocate"
<add>end
<ide><path>Library/Homebrew/extend/os/linux/keg_relocate.rb
<add>class Keg
<add> def relocate_dynamic_linkage(relocation)
<add> # Patching patchelf using itself fails with "Text file busy" or SIGBUS.
<add> return if name == "patchelf"
<add>
<add> elf_files.each do |file|
<add> file.ensure_writable do
<add> change_rpath(file, relocation.old_prefix, relocation.new_prefix)
<add> end
<add> end
<add> end
<add>
<add> def change_rpath(file, old_prefix, new_prefix)
<add> return if !file.elf? || !file.dynamic_elf?
<add>
<add> patchelf = DevelopmentTools.locate "patchelf"
<add> cmd_rpath = [patchelf, "--print-rpath", file]
<add> old_rpath = Utils.popen_read(*cmd_rpath, err: :out).strip
<add>
<add> # patchelf requires that the ELF file have a .dynstr section.
<add> # Skip ELF files that do not have a .dynstr section.
<add> return if ["cannot find section .dynstr", "strange: no string table"].include?(old_rpath)
<add> raise ErrorDuringExecution, "#{cmd_rpath}\n#{old_rpath}" unless $CHILD_STATUS.success?
<add>
<add> rpath = old_rpath
<add> .split(":")
<add> .map { |x| x.sub(old_prefix, new_prefix) }
<add> .select { |x| x.start_with?(new_prefix, "$ORIGIN") }
<add>
<add> lib_path = "#{new_prefix}/lib"
<add> rpath << lib_path unless rpath.include? lib_path
<add> new_rpath = rpath.join(":")
<add> cmd = [patchelf, "--force-rpath", "--set-rpath", new_rpath]
<add>
<add> if file.binary_executable?
<add> old_interpreter = Utils.safe_popen_read(patchelf, "--print-interpreter", file).strip
<add> new_interpreter = if File.readable? "#{new_prefix}/lib/ld.so"
<add> "#{new_prefix}/lib/ld.so"
<add> else
<add> old_interpreter.sub old_prefix, new_prefix
<add> end
<add> cmd << "--set-interpreter" << new_interpreter if old_interpreter != new_interpreter
<add> end
<add>
<add> return if old_rpath == new_rpath && old_interpreter == new_interpreter
<add> safe_system(*cmd, file)
<add> end
<add>
<add> def detect_cxx_stdlibs(options = {})
<add> skip_executables = options.fetch(:skip_executables, false)
<add> results = Set.new
<add> elf_files.each do |file|
<add> next unless file.dynamic_elf?
<add> next if file.binary_executable? && skip_executables
<add> dylibs = file.dynamically_linked_libraries
<add> results << :libcxx if dylibs.any? { |s| s.include? "libc++.so" }
<add> results << :libstdcxx if dylibs.any? { |s| s.include? "libstdc++.so" }
<add> end
<add> results.to_a
<add> end
<add>
<add> def elf_files
<add> hardlinks = Set.new
<add> elf_files = []
<add> path.find do |pn|
<add> next if pn.symlink? || pn.directory?
<add> next if !pn.dylib? && !pn.binary_executable?
<add>
<add> # If we've already processed a file, ignore its hardlinks (which have the
<add> # same dev ID and inode). This prevents relocations from being performed
<add> # on a binary more than once.
<add> next unless hardlinks.add? [pn.stat.dev, pn.stat.ino]
<add> elf_files << pn
<add> end
<add> elf_files
<add> end
<add>
<add> def self.relocation_formulae
<add> ["patchelf"]
<add> end
<add>end
<ide><path>Library/Homebrew/formula_installer.rb
<ide> def expand_requirements
<ide>
<ide> def expand_dependencies(deps)
<ide> inherited_options = Hash.new { |hash, key| hash[key] = Options.new }
<add> pour_bottle = pour_bottle?
<ide>
<ide> expanded_deps = Dependency.expand(formula, deps) do |dependent, dep|
<ide> inherited_options[dep.name] |= inherited_options_for(dep)
<ide> build = effective_build_options_for(
<ide> dependent,
<ide> inherited_options.fetch(dependent.name, []),
<ide> )
<add> pour_bottle = true if install_bottle_for?(dep.to_formula, build)
<ide>
<ide> if dep.prune_from_option?(build)
<ide> Dependency.prune
<ide> def expand_dependencies(deps)
<ide> end
<ide> end
<ide>
<add> if pour_bottle
<add> bottle_deps = Keg.relocation_formulae
<add> .map { |formula| Dependency.new(formula) }
<add> .reject do |dep|
<add> inherited_options[dep.name] |= inherited_options_for(dep)
<add> dep.satisfied? inherited_options[dep.name]
<add> end
<add> expanded_deps = Dependency.merge_repeats(bottle_deps + expanded_deps) unless bottle_deps.empty?
<add> end
<add>
<ide> expanded_deps.map { |dep| [dep, inherited_options[dep.name]] }
<ide> end
<ide>
<ide><path>Library/Homebrew/keg_relocate.rb
<ide> def symlink_files
<ide> def self.file_linked_libraries(_file, _string)
<ide> []
<ide> end
<add>
<add> def self.relocation_formulae
<add> []
<add> end
<ide> end
<ide>
<ide> require "extend/os/keg_relocate"
<ide><path>Library/Homebrew/test/dev-cmd/bottle_spec.rb
<ide> expect { brew "install", "--build-bottle", testball }
<ide> .to be_a_success
<ide>
<add> setup_test_formula "patchelf"
<add> (HOMEBREW_CELLAR/"patchelf/1.0/bin").mkpath
<add>
<ide> expect { brew "bottle", "--no-rebuild", testball }
<ide> .to output(/Formula not from core or any taps/).to_stderr
<ide> .and not_to_output.to_stdout
<ide><path>Library/Homebrew/test/formula_installer_bottle_spec.rb
<ide> def temporarily_install_bottle(formula)
<ide> expect(formula).to be_bottled
<ide> expect(formula).to pour_bottle
<ide>
<add> stub_formula_loader formula
<add> stub_formula_loader formula("patchelf") { url "patchelf-1.0" }
<add> allow(Formula["patchelf"]).to receive(:installed?).and_return(true)
<ide> described_class.new(formula).install
<ide>
<ide> keg = Keg.new(formula.prefix)
<ide><path>Library/Homebrew/test/formulary_spec.rb
<ide> class Wrong#{described_class.class_s(formula_name)} < Formula
<ide> end
<ide>
<ide> context "with installed Formula" do
<del> let(:formula) { described_class.factory(formula_path) }
<del> let(:installer) { FormulaInstaller.new(formula) }
<add> before do
<add> allow(Formulary).to receive(:loader_for).and_call_original
<add> stub_formula_loader formula("patchelf") { url "patchelf-1.0" }
<add> allow(Formula["patchelf"]).to receive(:installed?).and_return(true)
<add> end
<add>
<add> let(:installed_formula) { described_class.factory(formula_path) }
<add> let(:installer) { FormulaInstaller.new(installed_formula) }
<ide>
<ide> it "returns a Formula when given a rack" do
<ide> installer.install
<ide>
<del> f = described_class.from_rack(formula.rack)
<add> f = described_class.from_rack(installed_formula.rack)
<ide> expect(f).to be_kind_of(Formula)
<ide> end
<ide>
<ide> it "returns a Formula when given a Keg" do
<ide> installer.install
<ide>
<del> keg = Keg.new(formula.prefix)
<add> keg = Keg.new(installed_formula.prefix)
<ide> f = described_class.from_keg(keg)
<ide> expect(f).to be_kind_of(Formula)
<ide> end
<ide><path>Library/Homebrew/test/support/helper/spec/shared_context/integration_test.rb
<ide> def install
<ide> url "https://example.com/#{name}-1.0"
<ide> depends_on "foo"
<ide> RUBY
<add> when "patchelf"
<add> content = <<~RUBY
<add> url "https://example.com/#{name}-1.0"
<add> RUBY
<ide> end
<ide>
<ide> Formulary.core_path(name).tap do |formula_path|
<ide><path>Library/Homebrew/utils/popen.rb
<ide> def self.popen_read(*args, **options, &block)
<ide> popen(args, "rb", options, &block)
<ide> end
<ide>
<add> def self.safe_popen_read(*args, **options, &block)
<add> output = popen_read(*args, **options, &block)
<add> raise ErrorDuringExecution, args unless $CHILD_STATUS.success?
<add> output
<add> end
<add>
<ide> def self.popen_write(*args, **options, &block)
<ide> popen(args, "wb", options, &block)
<ide> end
<ide>
<add> def self.safe_popen_write(*args, **options, &block)
<add> output = popen_write(args, **options, &block)
<add> raise ErrorDuringExecution, args unless $CHILD_STATUS.success?
<add> output
<add> end
<add>
<ide> def self.popen(args, mode, options = {})
<ide> IO.popen("-", mode) do |pipe|
<ide> if pipe | 10 |
Javascript | Javascript | define the last day of 'heisei' era | f98d5a26420e0aa6c1cef5134865b488276fdf33 | <ide><path>src/locale/ja.js
<ide> export default moment.defineLocale('ja', {
<ide> },
<ide> {
<ide> since: '1989-01-08',
<add> until: '2019-04-30',
<ide> offset: 1,
<ide> name: '平成',
<ide> narrow: '㍻', | 1 |
Text | Text | handle backpressure when write() return false | e9044c83a9b997bde60432cd056d36e3a7d8d1e3 | <ide><path>doc/api/stream.md
<ide> first argument. To reliably detect write errors, add a listener for the
<ide> The return value is `true` if the internal buffer does not exceed
<ide> `highWaterMark` configured when the stream was created after admitting `chunk`.
<ide> If `false` is returned, further attempts to write data to the stream should
<del>stop until the [`'drain'`][] event is emitted. However, the `false` return
<del>value is only advisory and the writable stream will unconditionally accept and
<del>buffer `chunk` even if it has not not been allowed to drain.
<add>stop until the [`'drain'`][] event is emitted.
<add>
<add>While a stream is not draining, calls to `write()` will buffer `chunk`, and
<add>return false. Once all currently buffered chunks are drained (accepted for
<add>delivery by the operating system), the `'drain'` event will be emitted.
<add>It is recommended that once write() returns false, no more chunks be written
<add>until the `'drain'` event is emitted. While calling `write()` on a stream that
<add>is not draining is allowed, Node.js will buffer all written chunks until
<add>maximum memory usage occurs, at which point it will abort unconditionally.
<add>Even before it aborts, high memory usage will cause poor garbage collector
<add>performance and high RSS (which is not typically released back to the system,
<add>even after the memory is no longer required). Since TCP sockets may never
<add>drain if the remote peer does not read the data, writing a socket that is
<add>not draining may lead to a remotely exploitable vulnerability.
<add>
<add>Writing data while the stream is not draining is particularly
<add>problematic for a [Transform][], because the `Transform` streams are paused
<add>by default until they are piped or an `'data'` or `'readable'` event handler
<add>is added.
<add>
<add>If the data to be written can be generated or fetched on demand, it is
<add>recommended to encapsulate the logic into a [Readable][] and use
<add>[`stream.pipe()`][]. However, if calling `write()` is preferred, it is
<add>possible to respect backpressure and avoid memory issues using the
<add>the [`'drain'`][] event:
<add>
<add>```js
<add>function write (data, cb) {
<add> if (!stream.write(data)) {
<add> stream.once('drain', cb)
<add> } else {
<add> process.nextTick(cb)
<add> }
<add>}
<add>
<add>// Wait for cb to be called before doing any other write.
<add>write('hello', () => {
<add> console.log('write completed, do more writes now')
<add>})
<add>```
<ide>
<ide> A Writable stream in object mode will always ignore the `encoding` argument.
<ide> | 1 |
Javascript | Javascript | move branch to the correct location | e12f48ef07e837553ea9c537b08d3e4a44d3fad2 | <ide><path>lib/path.js
<ide> const win32 = {
<ide> lastCommonSep = 3;
<ide> }
<ide> }
<add> if (lastCommonSep === -1)
<add> lastCommonSep = 0;
<ide> }
<ide>
<ide> let out = '';
<del> if (lastCommonSep === -1)
<del> lastCommonSep = 0;
<ide> // Generate the relative path based on the path difference between `to` and
<ide> // `from`
<ide> for (i = fromStart + lastCommonSep + 1; i <= fromEnd; ++i) { | 1 |
Text | Text | use comment for pull request template | 6f90e0799e7a8a080f5ecc76928bebb76c672946 | <ide><path>.github/PULL_REQUEST_TEMPLATE.md
<add><!--
<ide> Please consider the following before submitting a pull request:
<ide>
<ide> Guidelines for contributing: https://github.com/chartjs/Chart.js/blob/master/docs/developers/contributing.md
<ide> Example of changes on an interactive website such as the following:
<ide> - http://jsbin.com/
<ide> - http://jsfiddle.net/
<ide> - http://codepen.io/pen/
<del>- Premade template: http://codepen.io/pen?template=JXVYzq
<ide>\ No newline at end of file
<add>- Premade template: http://codepen.io/pen?template=JXVYzq
<add>--> | 1 |
Text | Text | fix typo in cli.md | 71de711189659847ea18313b7b0a57703960d0da | <ide><path>docs/sources/reference/commandline/cli.md
<ide> the proxy
<ide>
<ide> `--default-ulimit` allows you to set the default `ulimit` options to use for all
<ide> containers. It takes the same options as `--ulimit` for `docker run`. If these
<del>defaults are not set, `ulimit` settings will be inheritted, if not set on
<add>defaults are not set, `ulimit` settings will be inherited, if not set on
<ide> `docker run`, from the Docker daemon. Any `--ulimit` options passed to
<ide> `docker run` will overwrite these defaults.
<ide>
<ide> attaching to a tty-enabled container (i.e.: launched with `-t`).
<ide>
<ide> #### Examples
<ide>
<del> $ docker run -d --name topdemo ubuntu /usr/bin/top -b)
<add> $ docker run -d --name topdemo ubuntu /usr/bin/top -b
<ide> $ docker attach topdemo
<ide> top - 02:05:52 up 3:05, 0 users, load average: 0.01, 0.02, 0.05
<ide> Tasks: 1 total, 1 running, 0 sleeping, 0 stopped, 0 zombie
<ide> is returned by the `docker attach` command to its caller too:
<ide> -m, --memory="" Memory limit for all build containers
<ide> --memory-swap="" Total memory (memory + swap), `-1` to disable swap
<ide> -c, --cpu-shares CPU Shares (relative weight)
<del> --cpuset-cpus="" CPUs in which to allow exection, e.g. `0-3`, `0,1`
<add> --cpuset-cpus="" CPUs in which to allow execution, e.g. `0-3`, `0,1`
<ide>
<ide> Builds Docker images from a Dockerfile and a "context". A build's context is
<ide> the files located in the specified `PATH` or `URL`. The build process can
<ide> labels file in the current directory:
<ide> $ docker run --label-file ./labels ubuntu bash
<ide>
<ide> The label-file format is similar to the format for loading environment
<del>variables. (Unlike environment variables, labels are not visislbe to processes
<add>variables. (Unlike environment variables, labels are not visible to processes
<ide> running inside a container.) The following example illustrates a label-file
<ide> format:
<ide> | 1 |
Javascript | Javascript | fix merge conflict in | 3a4e1dbb5b2957875cab6af11a5e5934e125a0d8 | <ide><path>src/test/ReactTestUtils.js
<ide> ReactShallowRenderer.prototype._render = function(element, transaction, context)
<ide> if (this._instance) {
<ide> this._instance.receiveComponent(element, transaction, context);
<ide> } else {
<del> var instance = new ShallowComponentWrapper(element.type);
<add> var instance = new ShallowComponentWrapper(element);
<ide> instance.mountComponent(transaction, null, null, context);
<ide> this._instance = instance;
<ide> } | 1 |
Text | Text | use caret range in blog instructions | aa1f8687d7c93a00c07d63237bcd54b23b4f50af | <ide><path>docs/_posts/2017-04-07-react-v15.5.0.md
<ide> We recommend using [Yarn](https://yarnpkg.com/) or [npm](https://www.npmjs.com/)
<ide> To install React with Yarn, run:
<ide>
<ide> ```bash
<del>yarn add react@15.5.0 react-dom@15.5.0
<add>yarn add react@^15.5.0 react-dom@^15.5.0
<ide> ```
<ide>
<ide> To install React with npm, run:
<ide>
<ide> ```bash
<del>npm install --save react@15.5.0 react-dom@15.5.0
<add>npm install --save react@^15.5.0 react-dom@^15.5.0
<ide> ```
<ide>
<ide> We recommend using a bundler like [webpack](https://webpack.js.org/) or [Browserify](http://browserify.org/) so you can write modular code and bundle it together into small packages to optimize load time. | 1 |
PHP | PHP | remove useless (array) cast | 00eebd513c8fa317438488080a6eb05767869a4c | <ide><path>src/Illuminate/Console/AppNamespaceDetectorTrait.php
<ide> trait AppNamespaceDetectorTrait {
<ide> */
<ide> protected function getAppNamespace()
<ide> {
<del> $composer = (array) json_decode(file_get_contents(base_path().'/composer.json', true));
<add> $composer = json_decode(file_get_contents(base_path().'/composer.json', true), true);
<ide>
<ide> foreach ((array) data_get($composer, 'autoload.psr-4') as $namespace => $path)
<ide> { | 1 |
Mixed | Javascript | check bytelength in readint(b|l)e | d964ffeec356167038b4060c867b355d5fea6987 | <ide><path>benchmark/buffers/buffer-read.js
<ide> const types = [
<ide> 'FloatLE',
<ide> 'FloatBE',
<ide> 'DoubleLE',
<del> 'DoubleBE'
<add> 'DoubleBE',
<add> 'IntLE',
<add> 'IntBE',
<ide> ];
<ide>
<ide> const bench = common.createBenchmark(main, {
<ide> function main(conf) {
<ide> const fn = `read${type}`;
<ide>
<ide> buff.writeDoubleLE(0, 0, noAssert);
<del> const testFunction = new Function('buff', `
<del> for (var i = 0; i !== ${len}; i++) {
<del> buff.${fn}(0, ${JSON.stringify(noAssert)});
<del> }
<del> `);
<add>
<add> var call;
<add> if (fn === 'readIntLE' || fn === 'readIntBE') {
<add> call = `buff.${fn}(0, 1, ${JSON.stringify(noAssert)})`;
<add> } else {
<add> call = `buff.${fn}(0, ${JSON.stringify(noAssert)})`;
<add> }
<add>
<add> const testFunction = new Function(
<add> 'buff',
<add> `for (var i = 0; i !== ${len}; ++i) { ${call}; }`
<add> );
<add>
<ide> bench.start();
<ide> testFunction(buff);
<ide> bench.end(len / 1e6);
<ide><path>doc/api/buffer.md
<ide> console.log(buf.readIntLE(0, 6).toString(16));
<ide> // Prints: 1234567890ab
<ide> console.log(buf.readIntBE(0, 6).toString(16));
<ide>
<del>// Throws an exception: RangeError: Index out of range
<add>// Throws ERR_INDEX_OUT_OF_RANGE:
<ide> console.log(buf.readIntBE(1, 6).toString(16));
<add>
<add>// Throws ERR_OUT_OF_RANGE:
<add>console.log(buf.readIntBE(1, 0).toString(16));
<ide> ```
<ide>
<ide> ### buf.readUInt8(offset[, noAssert])
<ide><path>lib/buffer.js
<ide> Buffer.from = function from(value, encodingOrOffset, length) {
<ide> );
<ide> }
<ide>
<del> if (typeof value === 'number')
<add> if (typeof value === 'number') {
<ide> throw new errors.TypeError(
<ide> 'ERR_INVALID_ARG_TYPE', 'value', 'not number', value
<ide> );
<add> }
<ide>
<ide> const valueOf = value.valueOf && value.valueOf();
<ide> if (valueOf !== null && valueOf !== undefined && valueOf !== value)
<ide> Buffer[kIsEncodingSymbol] = Buffer.isEncoding;
<ide>
<ide> Buffer.concat = function concat(list, length) {
<ide> var i;
<del> if (!Array.isArray(list))
<add> if (!Array.isArray(list)) {
<ide> throw new errors.TypeError(
<ide> 'ERR_INVALID_ARG_TYPE', 'list', ['Array', 'Buffer', 'Uint8Array']
<ide> );
<add> }
<ide>
<ide> if (list.length === 0)
<ide> return new FastBuffer();
<ide> Buffer.concat = function concat(list, length) {
<ide> var pos = 0;
<ide> for (i = 0; i < list.length; i++) {
<ide> var buf = list[i];
<del> if (!isUint8Array(buf))
<add> if (!isUint8Array(buf)) {
<ide> throw new errors.TypeError(
<ide> 'ERR_INVALID_ARG_TYPE', 'list', ['Array', 'Buffer', 'Uint8Array']
<ide> );
<add> }
<ide> _copy(buf, buffer, pos);
<ide> pos += buf.length;
<ide> }
<ide> function checkOffset(offset, ext, length) {
<ide> throw new errors.RangeError('ERR_INDEX_OUT_OF_RANGE');
<ide> }
<ide>
<add>function checkByteLength(byteLength) {
<add> if (byteLength < 1 || byteLength > 6) {
<add> throw new errors.RangeError('ERR_OUT_OF_RANGE',
<add> 'byteLength',
<add> '>= 1 and <= 6');
<add> }
<add>}
<add>
<ide>
<ide> Buffer.prototype.readUIntLE =
<ide> function readUIntLE(offset, byteLength, noAssert) {
<ide> Buffer.prototype.readUInt32BE = function readUInt32BE(offset, noAssert) {
<ide> Buffer.prototype.readIntLE = function readIntLE(offset, byteLength, noAssert) {
<ide> offset = offset >>> 0;
<ide> byteLength = byteLength >>> 0;
<del> if (!noAssert)
<add>
<add> if (!noAssert) {
<add> checkByteLength(byteLength);
<ide> checkOffset(offset, byteLength, this.length);
<add> }
<ide>
<ide> var val = this[offset];
<ide> var mul = 1;
<ide> Buffer.prototype.readIntLE = function readIntLE(offset, byteLength, noAssert) {
<ide> Buffer.prototype.readIntBE = function readIntBE(offset, byteLength, noAssert) {
<ide> offset = offset >>> 0;
<ide> byteLength = byteLength >>> 0;
<del> if (!noAssert)
<add>
<add> if (!noAssert) {
<add> checkByteLength(byteLength);
<ide> checkOffset(offset, byteLength, this.length);
<add> }
<ide>
<ide> var i = byteLength;
<ide> var mul = 1;
<ide> if (process.binding('config').hasIntl) {
<ide> // Transcodes the Buffer from one encoding to another, returning a new
<ide> // Buffer instance.
<ide> transcode = function transcode(source, fromEncoding, toEncoding) {
<del> if (!isUint8Array(source))
<add> if (!isUint8Array(source)) {
<ide> throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'source',
<ide> ['Buffer', 'Uint8Array'], source);
<add> }
<ide> if (source.length === 0) return Buffer.alloc(0);
<ide>
<ide> fromEncoding = normalizeEncoding(fromEncoding) || fromEncoding;
<ide><path>test/parallel/test-buffer-read.js
<ide> function read(buff, funx, args, expected) {
<ide>
<ide> assert.strictEqual(buff[funx](...args), expected);
<ide> common.expectsError(
<del> () => buff[funx](-1),
<add> () => buff[funx](-1, args[1]),
<ide> {
<ide> code: 'ERR_INDEX_OUT_OF_RANGE'
<ide> }
<ide> assert.throws(() => Buffer.allocUnsafe(8).readFloatLE(-1), RangeError);
<ide> assert.strictEqual(buf.readIntLE(0, 6), 0x060504030201);
<ide> assert.strictEqual(buf.readIntBE(0, 6), 0x010203040506);
<ide> }
<add>
<add>// test for byteLength parameter not between 1 and 6 (inclusive)
<add>common.expectsError(() => { buf.readIntLE(1); }, { code: 'ERR_OUT_OF_RANGE' });
<add>common.expectsError(() => { buf.readIntLE(1, 'string'); },
<add> { code: 'ERR_OUT_OF_RANGE' });
<add>common.expectsError(() => { buf.readIntLE(1, 0); },
<add> { code: 'ERR_OUT_OF_RANGE' });
<add>common.expectsError(() => { buf.readIntLE(1, 7); },
<add> { code: 'ERR_OUT_OF_RANGE' });
<add>common.expectsError(() => { buf.readIntBE(1); }, { code: 'ERR_OUT_OF_RANGE' });
<add>common.expectsError(() => { buf.readIntBE(1, 'string'); },
<add> { code: 'ERR_OUT_OF_RANGE' });
<add>common.expectsError(() => { buf.readIntBE(1, 0); },
<add> { code: 'ERR_OUT_OF_RANGE' });
<add>common.expectsError(() => { buf.readIntBE(1, 7); },
<add> { code: 'ERR_OUT_OF_RANGE' }); | 4 |
Javascript | Javascript | fix accidental overriding of imports | dd9a0a4e0b68e36b1bac39ca3b98f29f2d6b7c0a | <ide><path>packages/ember-htmlbars/tests/helpers/log_test.js
<ide> import EmberView from 'ember-views/views/view';
<ide> import compile from 'ember-template-compiler/system/compile';
<ide> import { runAppend, runDestroy } from "ember-runtime/tests/utils";
<ide>
<del>var originalLookup, originalLog, logCalls, lookup, view, compile;
<add>var originalLookup, originalLog, logCalls, lookup, view;
<ide>
<ide> QUnit.module('ember-htmlbars: {{#log}} helper', {
<ide> setup: function() {
<ide><path>packages/ember-htmlbars/tests/integration/binding_integration_test.js
<ide> import { registerHelper } from "ember-htmlbars/helpers";
<ide>
<ide> import { set } from 'ember-metal/property_set';
<ide>
<del>var compile, view, MyApp, originalLookup, lookup;
<add>var view, MyApp, originalLookup, lookup;
<ide>
<ide> var trim = jQuery.trim;
<ide>
<ide><path>packages/ember-htmlbars/tests/integration/escape_integration_test.js
<ide> import { set } from 'ember-metal/property_set';
<ide> import { create as o_create } from 'ember-metal/platform';
<ide> import { runAppend, runDestroy } from "ember-runtime/tests/utils";
<ide>
<del>var compile, view;
<add>var view;
<ide>
<ide> QUnit.module('ember-htmlbars: Integration with Globals', {
<ide> teardown: function() {
<ide><path>packages/ember-htmlbars/tests/integration/globals_integration_test.js
<ide> import EmberView from 'ember-views/views/view';
<ide> import compile from 'ember-template-compiler/system/compile';
<ide> import { runAppend, runDestroy } from "ember-runtime/tests/utils";
<ide>
<del>var compile, view, originalLookup, lookup;
<add>var view, originalLookup, lookup;
<ide>
<ide> var originalLookup = Ember.lookup;
<ide> | 4 |
Text | Text | improve ecdh example | 64cf96d684cc8c713c8d12daeb2b0c68ab6dd760 | <ide><path>doc/api/crypto.md
<ide> If the `inputEncoding` is not provided, `key` is expected to be a [`Buffer`][],
<ide> Example (uncompressing a key):
<ide>
<ide> ```js
<del>const { ECDH } = require('crypto');
<add>const { createECDH, ECDH } = require('crypto');
<ide>
<del>const ecdh = ECDH('secp256k1');
<add>const ecdh = createECDH('secp256k1');
<ide> ecdh.generateKeys();
<ide>
<ide> const compressedKey = ecdh.getPublicKey('hex', 'compressed'); | 1 |
PHP | PHP | fix failing tests | 02f66ed3fa997056818e3b254f23e7a139b39baa | <ide><path>src/Console/Command/BakeShell.php
<ide> public function getOptionParser() {
<ide> 'help' => __d('cake_console', 'Theme to use when baking code.')
<ide> ]);
<ide>
<del> foreach ($this->tasks as $task) {
<add> foreach ($this->_taskMap as $task => $config) {
<ide> $taskParser = $this->{$task}->getOptionParser();
<ide> $parser->addSubcommand(Inflector::underscore($task), [
<ide> 'help' => $taskParser->description(),
<ide><path>tests/TestCase/Console/Command/CompletionShellTest.php
<ide> public function testSubCommandsCorePlugin() {
<ide> $this->Shell->runCommand('subCommands', array('subCommands', 'CORE.bake'));
<ide> $output = $this->Shell->stdout->output;
<ide>
<del> $expected = "behavior component controller fixture helper model plugin project test view\n";
<add> $expected = "behavior component controller fixture helper model plugin project test view widget zerg\n";
<ide> $this->assertEquals($expected, $output);
<ide> }
<ide>
<ide> public function testSubCommands() {
<ide> $this->Shell->runCommand('subCommands', array('subCommands', 'bake'));
<ide> $output = $this->Shell->stdout->output;
<ide>
<del> $expected = "behavior component controller fixture helper model plugin project test view\n";
<add> $expected = "behavior component controller fixture helper model plugin project test view widget zerg\n";
<ide> $this->assertEquals($expected, $output);
<ide> }
<ide> | 2 |
Python | Python | add .output_shape attribute in all layers (+tests) | c506fbda4ac1d3df0f9a6111ba9bf75a20bc7b68 | <ide><path>keras/layers/containers.py
<ide> def add(self, layer):
<ide> self.constraints += constraints
<ide> self.updates += updates
<ide>
<add> @property
<add> def output_shape(self):
<add> return self.layers[-1].output_shape
<add>
<ide> def get_output(self, train=False):
<ide> return self.layers[-1].get_output(train)
<ide>
<ide> def get_config(self):
<ide> def count_params(self):
<ide> return sum([layer.count_params() for layer in self.layers])
<ide>
<add>
<ide> class Graph(Layer):
<ide> '''
<ide> Implement a NN graph with arbitrary layer connections,
<ide> def get_input(self, train=False):
<ide> def input(self):
<ide> return self.get_input()
<ide>
<add> @property
<add> def output_shape(self):
<add> if self.nb_output == 1:
<add> # return tuple
<add> return self.outputs[self.output_order[0]].output_shape
<add> else:
<add> # return dictionary mapping output names to shape tuples
<add> return dict([(k, v.output_shape) for k, v in self.outputs.items()])
<add>
<ide> def get_output(self, train=False):
<ide> if len(self.inputs) == len(self.outputs) == 1:
<ide> return self.outputs[self.output_order[0]].get_output(train)
<ide><path>keras/layers/convolutional.py
<ide> from theano.sandbox.cuda import dnn
<ide>
<ide>
<add>def conv_output_length(input_length, filter_size, border_mode, stride):
<add> assert border_mode in {'same', 'full', 'valid'}
<add> if border_mode == 'same':
<add> output_length = input_length
<add> elif border_mode == 'full':
<add> output_length = input_length + filter_size - 1
<add> elif border_mode == 'valid':
<add> output_length = input_length - filter_size + 1
<add> return (output_length + stride - 1) // stride
<add>
<add>
<add>def pool_output_length(input_length, pool_size, ignore_border, stride):
<add> if ignore_border:
<add> output_length = input_length - pool_size + 1
<add> output_length = (output_length + stride - 1) // stride
<add> else:
<add> if pool_size == input_length:
<add> output_length = min(input_length, stride - stride % 2)
<add> if output_length <= 0:
<add> output_length = 1
<add> elif stride >= pool_size:
<add> output_length = (input_length + stride - 1) // stride
<add> else:
<add> output_length = (input_length - pool_size + stride - 1) // stride
<add> if output_length <= 0:
<add> output_length = 1
<add> else:
<add> output_length += 1
<add> return output_length
<add>
<add>
<ide> class Convolution1D(Layer):
<ide> def __init__(self, input_dim, nb_filter, filter_length,
<ide> init='uniform', activation='linear', weights=None,
<ide> def __init__(self, input_dim, nb_filter, filter_length,
<ide> self.subsample_length = subsample_length
<ide> self.init = initializations.get(init)
<ide> self.activation = activations.get(activation)
<del> self.subsample = (1, subsample_length)
<add> self.subsample = (subsample_length, 1)
<ide> self.border_mode = border_mode
<ide>
<ide> self.input = T.tensor3()
<ide> def __init__(self, input_dim, nb_filter, filter_length,
<ide> if weights is not None:
<ide> self.set_weights(weights)
<ide>
<del> def get_output(self, train):
<add> @property
<add> def output_shape(self):
<add> length = conv_output_length(self.input_shape[1], self.filter_length, self.border_mode, self.subsample[0])
<add> return (self.input_shape[0], length, self.nb_filter)
<add>
<add> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> X = T.reshape(X, (X.shape[0], X.shape[1], X.shape[2], 1)).dimshuffle(0, 2, 1, 3)
<ide>
<ide> border_mode = self.border_mode
<ide> if border_mode == 'same':
<ide> border_mode = 'full'
<add> assert self.subsample == (1, 1)
<ide>
<ide> conv_out = T.nnet.conv.conv2d(X, self.W, border_mode=border_mode, subsample=self.subsample)
<ide> if self.border_mode == 'same':
<ide> def __init__(self, nb_filter, stack_size, nb_row, nb_col,
<ide> if weights is not None:
<ide> self.set_weights(weights)
<ide>
<del> def get_output(self, train):
<add> @property
<add> def output_shape(self):
<add> input_shape = self.input_shape
<add> rows = input_shape[2]
<add> cols = input_shape[3]
<add> rows = conv_output_length(rows, self.nb_row, self.border_mode, self.subsample[0])
<add> cols = conv_output_length(cols, self.nb_col, self.border_mode, self.subsample[1])
<add> return (input_shape[0], self.nb_filter, rows, cols)
<add>
<add> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> border_mode = self.border_mode
<ide> if on_gpu() and dnn.dnn_available():
<ide> def get_output(self, train):
<ide> else:
<ide> if border_mode == 'same':
<ide> border_mode = 'full'
<add> assert(self.subsample == (1, 1))
<ide>
<ide> conv_out = T.nnet.conv.conv2d(X, self.W,
<ide> border_mode=border_mode,
<ide> subsample=self.subsample)
<ide> if self.border_mode == 'same':
<del> shift_x = (self.nb_row - 1) // 2
<del> shift_y = (self.nb_col - 1) // 2
<del> conv_out = conv_out[:, :, shift_x:X.shape[2] + shift_x, shift_y:X.shape[3] + shift_y]
<add> shift_x = (self.nb_row - 1) // 2
<add> shift_y = (self.nb_col - 1) // 2
<add> conv_out = conv_out[:, :, shift_x:X.shape[2] + shift_x, shift_y:X.shape[3] + shift_y]
<ide>
<ide> return self.activation(conv_out + self.b.dimshuffle('x', 0, 'x', 'x'))
<ide>
<ide> def get_config(self):
<ide>
<ide>
<ide> class MaxPooling1D(Layer):
<del> def __init__(self, pool_length=2, stride=None, ignore_border=True):
<add> def __init__(self, pool_length=2, stride=1, ignore_border=True):
<ide> super(MaxPooling1D, self).__init__()
<add> if type(stride) is not int or not stride:
<add> raise Exception('"stride" argument in MaxPooling1D should be an int > 0.')
<ide> self.pool_length = pool_length
<ide> self.stride = stride
<del> if self.stride:
<del> self.st = (self.stride, 1)
<del> else:
<del> self.st = None
<add> self.st = (self.stride, 1)
<ide>
<ide> self.input = T.tensor3()
<ide> self.poolsize = (pool_length, 1)
<ide> self.ignore_border = ignore_border
<ide>
<del> def get_output(self, train):
<add> @property
<add> def output_shape(self):
<add> input_shape = self.input_shape
<add> length = pool_output_length(input_shape[1], self.pool_length, self.ignore_border, self.stride)
<add> return (input_shape[0], length, input_shape[2])
<add>
<add> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> X = T.reshape(X, (X.shape[0], X.shape[1], X.shape[2], 1)).dimshuffle(0, 2, 1, 3)
<ide> output = downsample.max_pool_2d(X, ds=self.poolsize, st=self.st, ignore_border=self.ignore_border)
<ide> def get_config(self):
<ide>
<ide>
<ide> class MaxPooling2D(Layer):
<del> def __init__(self, poolsize=(2, 2), stride=None, ignore_border=True):
<add> def __init__(self, poolsize=(2, 2), stride=(1, 1), ignore_border=True):
<ide> super(MaxPooling2D, self).__init__()
<ide> self.input = T.tensor4()
<ide> self.poolsize = tuple(poolsize)
<del> self.stride = stride
<add> self.stride = tuple(stride)
<ide> self.ignore_border = ignore_border
<ide>
<del> def get_output(self, train):
<add> @property
<add> def output_shape(self):
<add> input_shape = self.input_shape
<add> rows = pool_output_length(input_shape[2], self.poolsize[0], self.ignore_border, self.stride[0])
<add> cols = pool_output_length(input_shape[3], self.poolsize[1], self.ignore_border, self.stride[1])
<add> return (input_shape[0], input_shape[1], rows, cols)
<add>
<add> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> output = downsample.max_pool_2d(X, ds=self.poolsize, st=self.stride, ignore_border=self.ignore_border)
<ide> return output
<ide> def __init__(self, length=2):
<ide> self.length = length
<ide> self.input = T.tensor3()
<ide>
<del> def get_output(self, train):
<add> @property
<add> def output_shape(self):
<add> input_shape = self.input_shape
<add> return (input_shape[0], self.length * input_shape[1], input_shape[2])
<add>
<add> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> output = theano.tensor.extra_ops.repeat(X, self.length, axis=1)
<ide> return output
<ide> def __init__(self, size=(2, 2)):
<ide> self.input = T.tensor4()
<ide> self.size = tuple(size)
<ide>
<del> def get_output(self, train):
<add> @property
<add> def output_shape(self):
<add> input_shape = self.input_shape
<add> return (input_shape[0], input_shape[1], self.size[0] * input_shape[2], self.size[1] * input_shape[3])
<add>
<add> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> Y = theano.tensor.extra_ops.repeat(X, self.size[0], axis=2)
<ide> output = theano.tensor.extra_ops.repeat(Y, self.size[1], axis=3)
<ide> def __init__(self, pad=(1, 1)):
<ide> self.pad = tuple(pad)
<ide> self.input = T.tensor4()
<ide>
<del> def get_output(self, train):
<add> @property
<add> def output_shape(self):
<add> input_shape = self.input_shape
<add> return (input_shape[0], input_shape[1], input_shape[2] + 2 * self.pad[0], input_shape[3] + 2 * self.pad[1])
<add>
<add> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> pad = self.pad
<ide> in_shape = X.shape
<ide><path>keras/layers/core.py
<ide> import numpy as np
<ide>
<ide> from collections import OrderedDict
<add>import copy
<ide>
<ide> from .. import activations, initializations, regularizers, constraints
<ide> from ..utils.theano_utils import shared_zeros, floatX
<ide> def nb_input(self):
<ide> def nb_output(self):
<ide> return 1
<ide>
<add> @property
<add> def input_shape(self):
<add> # if layer is not connected (e.g. input layer),
<add> # input shape can be set manually via _input_shape attribute.
<add> if hasattr(self, 'previous'):
<add> return self.previous.output_shape
<add> elif hasattr(self, '_input_shape'):
<add> return self._input_shape
<add> else:
<add> raise NotImplementedError
<add>
<add> @property
<add> def output_shape(self):
<add> # default assumption: tensor shape unchanged.
<add> return self.input_shape
<add>
<ide> def get_output(self, train=False):
<ide> return self.get_input(train)
<ide>
<ide> def get_input(self, train=False):
<ide> if hasattr(self, 'previous'):
<ide> return self.previous.get_output(train=train)
<del> else:
<add> elif hasattr(self, 'input'):
<ide> return self.input
<add> else:
<add> raise Exception('Layer is not connected\
<add> and is not an input layer.')
<ide>
<ide> def supports_masked_input(self):
<ide> ''' Whether or not this layer respects the output mask of its previous layer in its calculations. If you try
<ide> def set_name(self, name):
<ide> def count_params(self):
<ide> return sum([np.prod(p.shape.eval()) for p in self.params])
<ide>
<add>
<ide> class MaskedLayer(Layer):
<ide> '''
<ide> If your layer trivially supports masking (by simply copying the input mask to the output), then subclass MaskedLayer
<ide> def get_config(self):
<ide> return {"name": self.__class__.__name__,
<ide> "mask_value": self.mask_value}
<ide>
<add>
<ide> class TimeDistributedMerge(Layer):
<add> '''Sum/multiply/average over the outputs of a TimeDistributed layer.
<add>
<add> mode: {'sum', 'mul', 'ave'}
<add> Tensor input dimensions: (nb_sample, time, features)
<add> Tensor output dimensions: (nb_sample, features)
<add> '''
<ide> def __init__(self, mode='sum'):
<del> '''
<del> Sum/multiply/avearge over a time distributed layer's outputs.
<del> mode: {'sum', 'mul', 'ave'}
<del> Tensor input dimensions: (nb_sample, shared_dimension, input_dim)
<del> Tensor output dimensions: (nb_sample, output_dim)
<del> '''
<ide> self.mode = mode
<ide> self.params = []
<ide> self.regularizers = []
<ide> self.constraints = []
<ide> self.updates = []
<ide>
<add> @property
<add> def output_shape(self):
<add> return (None, self.input_shape[2])
<add>
<ide> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> if self.mode == 'sum' or self.mode == 'ave':
<ide> def __init__(self, layers, mode='sum', concat_axis=-1):
<ide> '''
<ide> if len(layers) < 2:
<ide> raise Exception("Please specify two or more input layers (or containers) to merge")
<add> if mode not in {'sum', 'mul', 'concat', 'ave'}:
<add> raise Exception("Invalid merge mode: " + str(mode))
<ide> self.mode = mode
<ide> self.concat_axis = concat_axis
<ide> self.layers = layers
<ide> def __init__(self, layers, mode='sum', concat_axis=-1):
<ide> self.params.append(p)
<ide> self.constraints.append(c)
<ide>
<add> @property
<add> def output_shape(self):
<add> input_shapes = [layer.output_shape for layer in self.layers]
<add> if self.mode in ['sum', 'mul', 'ave']:
<add> return input_shapes[0]
<add> elif self.mode == 'concat':
<add> output_shape = list(input_shapes[0])
<add> for shape in input_shapes[1:]:
<add> output_shape[self.concat_axis] += shape[concat_axis]
<add> return tuple(output_shape)
<add>
<ide> def get_params(self):
<ide> return self.params, self.regularizers, self.constraints, self.updates
<ide>
<ide> def __init__(self, *dims):
<ide> dims = dims[0]
<ide> self.dims = tuple(dims)
<ide>
<add> @property
<add> def output_shape(self):
<add> return make_tuple(self.input_shape[0], *self.dims)
<add>
<ide> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> nshape = make_tuple(X.shape[0], *self.dims)
<ide> def get_config(self):
<ide>
<ide> class Permute(Layer):
<ide> '''
<del> Permute the dimensions of the data according to the given tuple
<add> Permute the dimensions of the input according to the given tuple.
<ide> '''
<ide> def __init__(self, dims):
<ide> super(Permute, self).__init__()
<ide> self.dims = tuple(dims)
<ide>
<del> def get_output(self, train):
<add> @property
<add> def output_shape(self):
<add> input_shape = list(self.input_shape)
<add> output_shape = copy.copy(input_shape)
<add> for i, dim in enumerate(self.dims):
<add> target_dim = input_shape[dim]
<add> output_shape[i+1] = target_dim
<add> return tuple(output_shape)
<add>
<add> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> return X.dimshuffle((0,) + self.dims)
<ide>
<ide> class Flatten(Layer):
<ide> def __init__(self):
<ide> super(Flatten, self).__init__()
<ide>
<add> @property
<add> def output_shape(self):
<add> input_shape = self.input_shape
<add> return (input_shape[0], np.prod(input_shape[1:]))
<add>
<ide> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> size = theano.tensor.prod(X.shape) // X.shape[0]
<ide> def __init__(self, n):
<ide> super(RepeatVector, self).__init__()
<ide> self.n = n
<ide>
<add> @property
<add> def output_shape(self):
<add> input_shape = self.input_shape
<add> return (input_shape[0], self.n, input_shape[1])
<add>
<ide> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> tensors = [X]*self.n
<ide> def set_name(self, name):
<ide> self.W.name = '%s_W' % name
<ide> self.b.name = '%s_b' % name
<ide>
<add> @property
<add> def output_shape(self):
<add> return (self.input_shape[0], self.output_dim)
<add>
<ide> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> output = self.activation(T.dot(X, self.W) + self.b)
<ide> def get_config(self):
<ide>
<ide> class TimeDistributedDense(MaskedLayer):
<ide> '''
<del> Apply a same DenseLayer for each dimension[1] (shared_dimension) input
<del> Especially useful after a recurrent network with 'return_sequence=True'
<del> Tensor input dimensions: (nb_sample, shared_dimension, input_dim)
<del> Tensor output dimensions: (nb_sample, shared_dimension, output_dim)
<add> Apply a same Dense layer for each dimension[1] (time_dimension) input.
<add> Especially useful after a recurrent network with 'return_sequence=True'.
<add> Tensor input dimensions: (nb_sample, time_dimension, input_dim)
<add> Tensor output dimensions: (nb_sample, time_dimension, output_dim)
<ide>
<ide> '''
<ide> def __init__(self, input_dim, output_dim, init='glorot_uniform', activation='linear', weights=None,
<ide> def __init__(self, input_dim, output_dim, init='glorot_uniform', activation='lin
<ide> if weights is not None:
<ide> self.set_weights(weights)
<ide>
<add> @property
<add> def output_shape(self):
<add> input_shape = self.input_shape
<add> return (input_shape[0], input_shape[1], self.output_dim)
<add>
<ide> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> output = self.activation(T.dot(X.dimshuffle(1, 0, 2), self.W) + self.b)
<ide> def get_config(self):
<ide>
<ide>
<ide> class AutoEncoder(Layer):
<del> '''
<del> A customizable autoencoder model.
<del> If output_reconstruction then dim(input) = dim(output)
<del> else dim(output) = dim(hidden)
<add> '''A customizable autoencoder model.
<add>
<add> Tensor input dimensions: same as encoder input
<add> Tensor output dimensions:
<add> if output_reconstruction:
<add> same as encoder output
<add> else:
<add> same as decoder output
<ide> '''
<ide> def __init__(self, encoder, decoder, output_reconstruction=True, weights=None):
<ide>
<ide> def input(self):
<ide> def _get_hidden(self, train=False):
<ide> return self.encoder.get_output(train)
<ide>
<add> @property
<add> def input_shape(self):
<add> self.encoder.previous.output_shape
<add>
<add> @property
<add> def output_shape(self):
<add> if self.output_reconstruction:
<add> return self.encoder.previous.output_shape
<add> else:
<add> return self.decoder.previous.output_shape
<add>
<ide> def get_output(self, train=False):
<ide> if not train and not self.output_reconstruction:
<ide> return self.encoder.get_output(train)
<ide> def __init__(self, input_dim, output_dim, nb_feature=4, init='glorot_uniform', w
<ide> if weights is not None:
<ide> self.set_weights(weights)
<ide>
<add> @property
<add> def output_shape(self):
<add> return (self.input_shape[0], self.output_dim)
<add>
<ide> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> # -- don't need activation since it's just linear.
<ide><path>keras/layers/embeddings.py
<ide> def get_output_mask(self, train=None):
<ide> else:
<ide> return T.ones_like(X) * (1 - T.eq(X, 0))
<ide>
<add> @property
<add> def output_shape(self):
<add> return (self.input_shape[0], None, self.output_dim)
<add>
<ide> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> out = self.W[X]
<ide> def __init__(self, input_dim, proj_dim=128,
<ide> if weights is not None:
<ide> self.set_weights(weights)
<ide>
<add> @property
<add> def output_shape(self):
<add> return (self.input_shape[0], 1)
<add>
<ide> def get_output(self, train=False):
<ide> X = self.get_input(train)
<ide> w = self.W_w[X[:, 0]] # nb_samples, proj_dim
<ide><path>keras/layers/normalization.py
<ide> class BatchNormalization(Layer):
<ide> def __init__(self, input_shape, epsilon=1e-6, mode=0, momentum=0.9, weights=None):
<ide> super(BatchNormalization, self).__init__()
<ide> self.init = initializations.get("uniform")
<del> self.input_shape = input_shape
<add> self._input_shape = input_shape
<ide> self.epsilon = epsilon
<ide> self.mode = mode
<ide> self.momentum = momentum
<del> self.input = ndim_tensor(len(self.input_shape) + 1)
<add> self.input = ndim_tensor(len(input_shape) + 1)
<ide>
<del> self.gamma = self.init((self.input_shape))
<del> self.beta = shared_zeros(self.input_shape)
<add> self.gamma = self.init((input_shape))
<add> self.beta = shared_zeros(input_shape)
<ide>
<ide> self.params = [self.gamma, self.beta]
<del> self.running_mean = shared_zeros(self.input_shape)
<del> self.running_std = shared_ones((self.input_shape))
<add> self.running_mean = shared_zeros(input_shape)
<add> self.running_std = shared_ones((input_shape))
<ide> if weights is not None:
<ide> self.set_weights(weights)
<ide>
<ide> def get_output(self, train):
<ide>
<ide> def get_config(self):
<ide> return {"name": self.__class__.__name__,
<del> "input_shape": self.input_shape,
<add> "input_shape": self._input_shape,
<ide> "epsilon": self.epsilon,
<ide> "mode": self.mode}
<ide>
<ide><path>keras/layers/recurrent.py
<ide> def get_padded_shuffled_mask(self, train, X, pad=0):
<ide> mask = T.concatenate([padding, mask], axis=0)
<ide> return mask.astype('int8')
<ide>
<add> @property
<add> def output_shape(self):
<add> input_shape = self.input_shape
<add> if self.return_sequences:
<add> return (input_shape[0], input_shape[1], self.output_dim)
<add> else:
<add> return (input_shape[0], self.output_dim)
<add>
<ide>
<ide> class SimpleRNN(Recurrent):
<ide> '''
<ide><path>tests/auto/keras/layers/test_convolutional.py
<ide> def test_convolution_1d(self):
<ide> for weight in [None, weights_in]:
<ide> for border_mode in ['valid', 'full', 'same']:
<ide> for subsample_length in [1, 3]:
<add> if border_mode == 'same' and subsample_length != 1:
<add> continue
<ide> for W_regularizer in [None, 'l2']:
<ide> for b_regularizer in [None, 'l2']:
<ide> for act_regularizer in [None, 'l2']:
<ide> def test_maxpooling_1d(self):
<ide>
<ide> input = np.ones((nb_samples, nb_steps, input_dim))
<ide> for ignore_border in [True, False]:
<del> for stride in [None, 2]:
<add> for stride in [1, 2]:
<ide> layer = convolutional.MaxPooling1D(stride=stride, ignore_border=ignore_border)
<ide> layer.input = theano.shared(value=input)
<ide> for train in [True, False]:
<ide> def test_convolution_2d(self):
<ide> for weight in [None, weights_in]:
<ide> for border_mode in ['valid', 'full', 'same']:
<ide> for subsample in [(1, 1), (2, 3)]:
<add> if border_mode == 'same' and subsample != (1, 1):
<add> continue
<ide> for W_regularizer in [None, 'l2']:
<ide> for b_regularizer in [None, 'l2']:
<ide> for act_regularizer in [None, 'l2']:
<ide> def test_maxpooling_2d(self):
<ide>
<ide> input = np.ones((nb_samples, stack_size, input_nb_row, input_nb_col))
<ide> for ignore_border in [True, False]:
<del> for stride in [None, (2, 2)]:
<add> for stride in [(1, 1), (2, 2)]:
<ide> layer = convolutional.MaxPooling2D(stride=stride, ignore_border=ignore_border, poolsize=poolsize)
<ide> layer.input = theano.shared(value=input)
<ide> for train in [True, False]:
<ide><path>tests/auto/keras/layers/test_core.py
<ide> def test_input_output(self):
<ide> input_dim = 5
<ide> layer = core.Layer()
<ide>
<del> # As long as there is no input, an error should be raised.
<del> for train in [True, False]:
<del> self.assertRaises(AttributeError, layer.get_input, train)
<del> self.assertRaises(AttributeError, layer.get_output, train)
<del>
<ide> # Once an input is provided, it should be reachable through the
<ide> # appropriate getters
<ide> input = np.ones((nb_samples, input_dim))
<ide> def test_connections(self):
<ide> input = np.ones((nb_samples, input_dim))
<ide> layer1.input = theano.shared(value=input)
<ide>
<del> # As long as there is no previous layer, an error should be raised.
<del> for train in [True, False]:
<del> self.assertRaises(AttributeError, layer2.get_input, train)
<del>
<ide> # After connecting, input of layer1 should be passed through
<ide> layer2.set_previous(layer1)
<ide> for train in [True, False]:
<ide><path>tests/auto/test_shape_inference.py
<add>import unittest
<add>import numpy as np
<add>import theano
<add>from keras.utils.theano_utils import ndim_tensor
<add>from keras.layers.core import *
<add>from keras.layers.convolutional import *
<add>from keras.layers.recurrent import SimpleRNN
<add>
<add>
<add>def check_layer_output_shape(layer, input_data):
<add> ndim = len(input_data.shape)
<add> layer.input = ndim_tensor(ndim)
<add> layer._input_shape = input_data.shape
<add> expected_output_shape = layer.output_shape
<add>
<add> function = theano.function([layer.input], [layer.get_output()])
<add> output = function(input_data)[0]
<add> print output.shape, '(exact) vs.', expected_output_shape, '(predicted)'
<add>
<add> assert output.shape == expected_output_shape
<add>
<add>
<add>class TestShapeInference(unittest.TestCase):
<add> # ########
<add> # # Core #
<add> # ########
<add> def test_Reshape(self):
<add> layer = Reshape(2, 3)
<add> input_data = np.random.random((2, 6))
<add> check_layer_output_shape(layer, input_data)
<add>
<add> def test_Permute(self):
<add> layer = Permute(dims=(1, 3, 2))
<add> input_data = np.random.random((2, 2, 4, 3))
<add> check_layer_output_shape(layer, input_data)
<add>
<add> def test_Flatten(self):
<add> layer = Flatten()
<add> input_data = np.random.random((2, 2, 3))
<add> check_layer_output_shape(layer, input_data)
<add>
<add> def test_RepeatVector(self):
<add> layer = RepeatVector(2)
<add> input_data = np.random.random((2, 2))
<add> check_layer_output_shape(layer, input_data)
<add>
<add> def test_Dense(self):
<add> layer = Dense(2, 3)
<add> input_data = np.random.random((2, 2))
<add> check_layer_output_shape(layer, input_data)
<add>
<add> def test_TimeDistributedDense(self):
<add> layer = TimeDistributedDense(3, 2)
<add> input_data = np.random.random((2, 2, 3))
<add> check_layer_output_shape(layer, input_data)
<add>
<add> #################
<add> # Convolutional #
<add> #################
<add> def test_Convolution1D(self):
<add> for border_mode in ['same', 'full', 'valid']:
<add> for filter_length in [2, 3]:
<add> for subsample_length in [1, 2]:
<add> if subsample_length > 1 and border_mode == 'same':
<add> continue
<add> for input_data_shape in [(2, 3, 2), (2, 4, 2)]:
<add> layer = Convolution1D(input_dim=2, nb_filter=1, filter_length=filter_length,
<add> border_mode=border_mode, subsample_length=subsample_length)
<add> input_data = np.random.random(input_data_shape)
<add> check_layer_output_shape(layer, input_data)
<add>
<add> def test_Convolution2D(self):
<add> for border_mode in ['same', 'full', 'valid']:
<add> for nb_row, nb_col in [(2, 1), (3, 2)]:
<add> for subsample in [(1, 1), (2, 2)]:
<add> if (subsample[0] > 1 or subsample[1] > 1) and border_mode == 'same':
<add> continue
<add> for input_data_shape in [(2, 1, 3, 3), (2, 1, 4, 4)]:
<add> print 'border_mode:', border_mode
<add> print 'subsample:', subsample
<add> print 'input_data_shape:', input_data_shape
<add> layer = Convolution2D(nb_filter=1, stack_size=1, nb_row=nb_row, nb_col=nb_row,
<add> border_mode=border_mode, subsample=subsample)
<add> input_data = np.random.random(input_data_shape)
<add> check_layer_output_shape(layer, input_data)
<add>
<add> def test_MaxPooling1D(self):
<add> for ignore_border in [True, False]:
<add> for stride in [1, 2]:
<add> for pool_length in [1, 2]:
<add> for input_data_shape in [(2, 1, 3), (2, 1, 4)]:
<add> layer = MaxPooling1D(pool_length=pool_length, stride=stride, ignore_border=ignore_border)
<add> input_data = np.random.random(input_data_shape)
<add> check_layer_output_shape(layer, input_data)
<add>
<add> def test_MaxPooling2D(self):
<add> for ignore_border in [True, False]:
<add> for stride in [(1, 1), (2, 2)]:
<add> for poolsize in [(2, 2), (3, 3), (4, 4)]:
<add> for input_data_shape in [(2, 1, 3, 3), (2, 1, 4, 4), (2, 1, 5, 5), (2, 1, 6, 6)]:
<add> layer = MaxPooling2D(poolsize=poolsize, stride=stride, ignore_border=ignore_border)
<add> input_data = np.random.random(input_data_shape)
<add> check_layer_output_shape(layer, input_data)
<add>
<add> def test_UpSample1D(self):
<add> layer = UpSample1D(length=2)
<add> input_data = np.random.random((2, 2, 3))
<add> check_layer_output_shape(layer, input_data)
<add>
<add> def test_UpSample2D(self):
<add> layer = UpSample2D(size=(2, 2))
<add> input_data = np.random.random((2, 1, 2, 3))
<add> check_layer_output_shape(layer, input_data)
<add>
<add> def test_ZeroPadding2D(self):
<add> layer = ZeroPadding2D((1, 2))
<add> input_data = np.random.random((2, 1, 2, 3))
<add> check_layer_output_shape(layer, input_data)
<add>
<add> # #############
<add> # # Recurrent #
<add> # #############
<add> def test_SimpleRNN(self):
<add> # all recurrent layers inherit output_shape
<add> # from the same base recurrent layer
<add> layer = SimpleRNN(3, 2)
<add> input_data = np.random.random((2, 2, 3))
<add> check_layer_output_shape(layer, input_data)
<add>
<add>
<add>if __name__ == "__main__":
<add> unittest.main() | 9 |
Mixed | Ruby | make disable_with default in submit_tag | 3822a322a82a19a9341a21a0cb1e36653da09c46 | <ide><path>actionview/CHANGELOG.md
<add>* Make `disable_with` the default behavior for submit tags. Disables the
<add> button on submit to prevent double submits.
<add>
<add> *Justin Schiff*
<add>
<ide> * Add a break_sequence option to word_wrap so you can specify a custom break.
<ide>
<ide> * Mauricio Gomez *
<ide><path>actionview/lib/action_view/base.rb
<ide> class Base
<ide> cattr_accessor :raise_on_missing_translations
<ide> @@raise_on_missing_translations = false
<ide>
<add> # Specify whether submit_tag should automatically disable on click
<add> cattr_accessor :automatically_disable_submit_tag
<add> @@automatically_disable_submit_tag = true
<add>
<ide> class_attribute :_routes
<ide> class_attribute :logger
<ide>
<ide><path>actionview/lib/action_view/helpers/form_tag_helper.rb
<ide> def radio_button_tag(name, value, checked = false, options = {})
<ide> # the form is processed normally, otherwise no action is taken.
<ide> # * <tt>:disable_with</tt> - Value of this parameter will be used as the value for a
<ide> # disabled version of the submit button when the form is submitted. This feature is
<del> # provided by the unobtrusive JavaScript driver.
<add> # provided by the unobtrusive JavaScript driver. To disable this feature for a single submit tag
<add> # pass <tt>:data => { disable_with: false }</tt> Defaults to value attribute.
<ide> #
<ide> # ==== Examples
<ide> # submit_tag
<del> # # => <input name="commit" type="submit" value="Save changes" />
<add> # # => <input name="commit" data-disable-with="Save changes" type="submit" value="Save changes" />
<ide> #
<ide> # submit_tag "Edit this article"
<del> # # => <input name="commit" type="submit" value="Edit this article" />
<add> # # => <input name="commit" data-disable-with="Edit this article" type="submit" value="Edit this article" />
<ide> #
<ide> # submit_tag "Save edits", disabled: true
<del> # # => <input disabled="disabled" name="commit" type="submit" value="Save edits" />
<add> # # => <input disabled="disabled" name="commit" data-disable-with="Save edits" type="submit" value="Save edits" />
<ide> #
<del> # submit_tag "Complete sale", data: { disable_with: "Please wait..." }
<del> # # => <input name="commit" data-disable-with="Please wait..." type="submit" value="Complete sale" />
<add> # submit_tag "Complete sale", data: { disable_with: "Submitting..." }
<add> # # => <input name="commit" data-disable-with="Submitting..." type="submit" value="Complete sale" />
<ide> #
<ide> # submit_tag nil, class: "form_submit"
<ide> # # => <input class="form_submit" name="commit" type="submit" />
<ide> #
<ide> # submit_tag "Edit", class: "edit_button"
<del> # # => <input class="edit_button" name="commit" type="submit" value="Edit" />
<add> # # => <input class="edit_button" data-disable-with="Edit" name="commit" type="submit" value="Edit" />
<ide> #
<ide> # submit_tag "Save", data: { confirm: "Are you sure?" }
<del> # # => <input name='commit' type='submit' value='Save' data-confirm="Are you sure?" />
<add> # # => <input name='commit' type='submit' value='Save' data-disable-with="Save" data-confirm="Are you sure?" />
<ide> #
<ide> def submit_tag(value = "Save changes", options = {})
<ide> options = options.stringify_keys
<add> tag_options = { "type" => "submit", "name" => "commit", "value" => value }.update(options)
<add>
<add> if ActionView::Base.automatically_disable_submit_tag
<add> unless tag_options["data-disable-with"] == false || (tag_options["data"] && tag_options["data"][:disable_with] == false)
<add> disable_with_text = tag_options["data-disable-with"]
<add> disable_with_text ||= tag_options["data"][:disable_with] if tag_options["data"]
<add> disable_with_text ||= value.clone
<add> tag_options.deep_merge!("data" => { "disable_with" => disable_with_text })
<add> else
<add> tag_options.delete("data-disable-with")
<add> tag_options["data"].delete(:disable_with) if tag_options["data"]
<add> end
<add> end
<ide>
<del> tag :input, { "type" => "submit", "name" => "commit", "value" => value }.update(options)
<add> tag :input, tag_options
<ide> end
<ide>
<ide> # Creates a button element that defines a <tt>submit</tt> button,
<ide><path>actionview/test/template/form_helper_test.rb
<ide> def test_form_for
<ide> "<textarea name='post[body]' id='post_body'>\nBack to the hill and over it again!</textarea>" +
<ide> "<input name='post[secret]' type='hidden' value='0' />" +
<ide> "<input name='post[secret]' checked='checked' type='checkbox' id='post_secret' value='1' />" +
<del> "<input name='commit' type='submit' value='Create post' />" +
<add> "<input name='commit' data-disable-with='Create post' type='submit' value='Create post' />" +
<ide> "<button name='button' type='submit'>Create post</button>" +
<ide> "<button name='button' type='submit'><span>Create post</span></button>"
<ide> end
<ide> def test_form_for_with_model_using_relative_model_naming
<ide>
<ide> expected = whole_form("/posts/44", "edit_post_44", "edit_post", method: "patch") do
<ide> "<input name='post[title]' type='text' id='post_title' value='And his name will be forty and four.' />" +
<del> "<input name='commit' type='submit' value='Edit post' />"
<add> "<input name='commit' data-disable-with='Edit post' type='submit' value='Edit post' />"
<ide> end
<ide>
<ide> assert_dom_equal expected, output_buffer
<ide> def test_form_for_with_symbol_object_name
<ide> "<textarea name='other_name[body]' id='other_name_body'>\nBack to the hill and over it again!</textarea>" +
<ide> "<input name='other_name[secret]' value='0' type='hidden' />" +
<ide> "<input name='other_name[secret]' checked='checked' id='other_name_secret' value='1' type='checkbox' />" +
<del> "<input name='commit' value='Create post' type='submit' />"
<add> "<input name='commit' value='Create post' data-disable-with='Create post' type='submit' />"
<ide> end
<ide>
<ide> assert_dom_equal expected, output_buffer
<ide> def test_form_for_label_error_wrapping
<ide> expected = whole_form('/posts/123', 'edit_post_123', 'edit_post', method: 'patch') do
<ide> "<div class='field_with_errors'><label for='post_author_name' class='label'>Author name</label></div>" +
<ide> "<div class='field_with_errors'><input name='post[author_name]' type='text' id='post_author_name' value='' /></div>" +
<del> "<input name='commit' type='submit' value='Create post' />"
<add> "<input name='commit' data-disable-with='Create post' type='submit' value='Create post' />"
<ide> end
<ide>
<ide> assert_dom_equal expected, output_buffer
<ide> def test_form_for_label_error_wrapping_without_conventional_instance_variable
<ide> expected = whole_form('/posts/123', 'edit_post_123', 'edit_post', method: 'patch') do
<ide> "<div class='field_with_errors'><label for='post_author_name' class='label'>Author name</label></div>" +
<ide> "<div class='field_with_errors'><input name='post[author_name]' type='text' id='post_author_name' value='' /></div>" +
<del> "<input name='commit' type='submit' value='Create post' />"
<add> "<input name='commit' data-disable-with='Create post' type='submit' value='Create post' />"
<ide> end
<ide>
<ide> assert_dom_equal expected, output_buffer
<ide> def test_submit_with_object_as_new_record_and_locale_strings
<ide> end
<ide>
<ide> expected = whole_form('/posts', 'new_post', 'new_post') do
<del> "<input name='commit' type='submit' value='Create Post' />"
<add> "<input name='commit' data-disable-with='Create Post' type='submit' value='Create Post' />"
<ide> end
<ide>
<ide> assert_dom_equal expected, output_buffer
<ide> def test_submit_with_object_as_existing_record_and_locale_strings
<ide> end
<ide>
<ide> expected = whole_form('/posts/123', 'edit_post_123', 'edit_post', method: 'patch') do
<del> "<input name='commit' type='submit' value='Confirm Post changes' />"
<add> "<input name='commit' data-disable-with='Confirm Post changes' type='submit' value='Confirm Post changes' />"
<ide> end
<ide>
<ide> assert_dom_equal expected, output_buffer
<ide> def test_submit_without_object_and_locale_strings
<ide> end
<ide>
<ide> expected = whole_form do
<del> "<input name='commit' class='extra' type='submit' value='Save changes' />"
<add> "<input name='commit' class='extra' data-disable-with='Save changes' type='submit' value='Save changes' />"
<ide> end
<ide>
<ide> assert_dom_equal expected, output_buffer
<ide> def test_submit_with_object_and_nested_lookup
<ide> end
<ide>
<ide> expected = whole_form('/posts/123', 'edit_another_post', 'edit_another_post', method: 'patch') do
<del> "<input name='commit' type='submit' value='Update your Post' />"
<add> "<input name='commit' data-disable-with='Update your Post' type='submit' value='Update your Post' />"
<ide> end
<ide>
<ide> assert_dom_equal expected, output_buffer
<ide><path>actionview/test/template/form_tag_helper_test.rb
<ide> def test_submit_tag
<ide> )
<ide> end
<ide>
<add> def test_empty_submit_tag
<add> assert_dom_equal(
<add> %(<input data-disable-with="Save" name='commit' type="submit" value="Save" />),
<add> submit_tag("Save")
<add> )
<add> end
<add>
<add> def test_empty_submit_tag_with_opt_out
<add> ActionView::Base.automatically_disable_submit_tag = false
<add> assert_dom_equal(
<add> %(<input name='commit' type="submit" value="Save" />),
<add> submit_tag("Save")
<add> )
<add> ensure
<add> ActionView::Base.automatically_disable_submit_tag = true
<add> end
<add>
<add> def test_data_disable_with_string
<add> assert_dom_equal(
<add> %(<input data-disable-with="Processing..." data-confirm="Are you sure?" name='commit' type="submit" value="Save" />),
<add> submit_tag("Save", { "data-disable-with" => "Processing...", "data-confirm" => "Are you sure?" })
<add> )
<add> end
<add>
<add> def test_data_disable_with_boolean
<add> assert_dom_equal(
<add> %(<input data-confirm="Are you sure?" name='commit' type="submit" value="Save" />),
<add> submit_tag("Save", { "data-disable-with" => false, "data-confirm" => "Are you sure?" })
<add> )
<add> end
<add>
<add> def test_data_hash_disable_with_boolean
<add> assert_dom_equal(
<add> %(<input data-confirm="Are you sure?" name='commit' type="submit" value="Save" />),
<add> submit_tag("Save", { :data => { :confirm => "Are you sure?", :disable_with => false } })
<add> )
<add> end
<add>
<ide> def test_submit_tag_with_no_onclick_options
<ide> assert_dom_equal(
<ide> %(<input name='commit' data-disable-with="Saving..." type="submit" value="Save" />),
<ide> def test_submit_tag_with_no_onclick_options
<ide>
<ide> def test_submit_tag_with_confirmation
<ide> assert_dom_equal(
<del> %(<input name='commit' type='submit' value='Save' data-confirm="Are you sure?" />),
<add> %(<input name='commit' type='submit' value='Save' data-confirm="Are you sure?" data-disable-with="Save" />),
<ide> submit_tag("Save", :data => { :confirm => "Are you sure?" })
<ide> )
<ide> end
<ide><path>guides/source/configuring.md
<ide> encrypted cookies salt value. Defaults to `'signed encrypted cookie'`.
<ide> * `config.action_view.raise_on_missing_translations` determines whether an
<ide> error should be raised for missing translations.
<ide>
<add>* `config.action_view.automatically_disable_submit_tag` determines whether
<add> submit_tag should automatically disable on click, this defaults to true.
<add>
<ide> ### Configuring Action Mailer
<ide>
<ide> There are a number of settings available on `config.action_mailer`: | 6 |
Ruby | Ruby | improve tests (closes ) [josh] | f0dc2774e80496bbc46f5b44e76deaa575131a42 | <ide><path>actionpack/test/template/javascript_helper_test.rb
<ide> def test_link_to_function_with_href
<ide> link_to_function("Greeting", "alert('Hello world!')", :href => 'http://example.com/')
<ide> end
<ide>
<add> def test_link_to_function_with_href
<add> assert_dom_equal %(<a href="http://example.com/" onclick="alert('Hello world!'); return false;">Greeting</a>),
<add> link_to_function("Greeting", "alert('Hello world!')", :href => 'http://example.com/')
<add> end
<add>
<ide> def test_button_to_function
<ide> assert_dom_equal %(<input type="button" onclick="alert('Hello world!');" value="Greeting" />),
<ide> button_to_function("Greeting", "alert('Hello world!')") | 1 |
PHP | PHP | integrate the new actiondispatcher into dispatcher | 268de40f2f264e93f0f315dbf90e8f0a2d725931 | <ide><path>src/Http/ActionDispatcher.php
<ide> public function __construct($factory = null, $eventManager = null)
<ide> */
<ide> public function dispatch(Request $request, Response $response)
<ide> {
<del> Router::pushRequest($request);
<add> if (Router::getRequest(true) !== $request) {
<add> Router::pushRequest($request);
<add> }
<ide> $beforeEvent = $this->dispatchEvent('Dispatcher.beforeDispatch', compact('request', 'response'));
<ide>
<ide> $request = $beforeEvent->data['request'];
<ide> if ($beforeEvent->result instanceof Response) {
<ide> return $beforeEvent->result;
<ide> }
<del> $controller = $this->factory->create($request, $response);
<add>
<add> // Use the controller built by an beforeDispatch
<add> // event handler if there is one.
<add> if (isset($beforeEvent->data['controller'])) {
<add> $controller = $beforeEvent->data['controller'];
<add> } else {
<add> $controller = $this->factory->create($request, $response);
<add> }
<add>
<ide> $response = $this->_invoke($controller);
<ide> if (isset($request->params['return'])) {
<ide> return $response;
<ide> public function dispatch(Request $request, Response $response)
<ide> */
<ide> protected function _invoke(Controller $controller)
<ide> {
<add> $this->dispatchEvent('Dispatcher.invokeController', ['controller' => $controller]);
<add>
<ide> $result = $controller->startupProcess();
<ide> if ($result instanceof Response) {
<ide> return $result;
<ide><path>src/Routing/Dispatcher.php
<ide> use Cake\Controller\Controller;
<ide> use Cake\Event\EventDispatcherTrait;
<ide> use Cake\Event\EventListenerInterface;
<add>use Cake\Http\ActionDispatcher;
<ide> use Cake\Network\Request;
<ide> use Cake\Network\Response;
<ide> use LogicException;
<ide> class Dispatcher
<ide> */
<ide> public function dispatch(Request $request, Response $response)
<ide> {
<del> $beforeEvent = $this->dispatchEvent('Dispatcher.beforeDispatch', compact('request', 'response'));
<del>
<del> $request = $beforeEvent->data['request'];
<del> if ($beforeEvent->result instanceof Response) {
<del> if (isset($request->params['return'])) {
<del> return $beforeEvent->result->body();
<del> }
<del> $beforeEvent->result->send();
<del> return null;
<del> }
<del>
<del> if (!isset($beforeEvent->data['controller'])) {
<del> throw new LogicException(
<del> 'The Dispatcher.beforeDispatch event did not create a controller. ' .
<del> 'Ensure you have added the ControllerFactoryFilter.'
<del> );
<add> $actionDispatcher = new ActionDispatcher(null, $this->eventManager());
<add> foreach ($this->_filters as $filter) {
<add> $actionDispatcher->addFilter($filter);
<ide> }
<del> $controller = $beforeEvent->data['controller'];
<del>
<del> $response = $this->_invoke($controller);
<add> $response = $actionDispatcher->dispatch($request, $response);
<ide> if (isset($request->params['return'])) {
<ide> return $response->body();
<ide> }
<del>
<del> $afterEvent = $this->dispatchEvent('Dispatcher.afterDispatch', compact('request', 'response'));
<del> $afterEvent->data['response']->send();
<del> }
<del>
<del> /**
<del> * Initializes the components and models a controller will be using.
<del> * Triggers the controller action and invokes the rendering if Controller::$autoRender
<del> * is true. If a response object is returned by controller action that is returned
<del> * else controller's $response property is returned.
<del> *
<del> * @param \Cake\Controller\Controller $controller Controller to invoke
<del> * @return \Cake\Network\Response The resulting response object
<del> * @throws \LogicException If data returned by controller action is not an
<del> * instance of Response
<del> */
<del> protected function _invoke(Controller $controller)
<del> {
<del> $result = $controller->startupProcess();
<del> if ($result instanceof Response) {
<del> return $result;
<del> }
<del>
<del> $response = $controller->invokeAction();
<del> if ($response !== null && !($response instanceof Response)) {
<del> throw new LogicException('Controller action can only return an instance of Response');
<del> }
<del>
<del> if (!$response && $controller->autoRender) {
<del> $response = $controller->render();
<del> } elseif (!$response) {
<del> $response = $controller->response;
<del> }
<del>
<del> $result = $controller->shutdownProcess();
<del> if ($result instanceof Response) {
<del> return $result;
<del> }
<del>
<del> return $response;
<add> return $response->send();
<ide> }
<ide>
<ide> /**
<ide> protected function _invoke(Controller $controller)
<ide> public function addFilter(EventListenerInterface $filter)
<ide> {
<ide> $this->_filters[] = $filter;
<del> $this->eventManager()->on($filter);
<ide> }
<ide>
<ide> /**
<ide><path>src/Routing/Filter/RoutingFilter.php
<ide> class RoutingFilter extends DispatcherFilter
<ide> public function beforeDispatch(Event $event)
<ide> {
<ide> $request = $event->data['request'];
<del> Router::setRequestInfo($request);
<add> if (Router::getRequest(true) !== $request) {
<add> Router::setRequestInfo($request);
<add> }
<ide>
<ide> try {
<ide> if (empty($request->params['controller'])) {
<ide><path>src/TestSuite/IntegrationTestCase.php
<ide> abstract class IntegrationTestCase extends TestCase
<ide> protected $_csrfToken = false;
<ide>
<ide> /**
<del> *
<ide> *
<ide> * @var null|string
<ide> */
<ide> protected function _sendRequest($url, $method, $data = [])
<ide> $response = new Response();
<ide> $dispatcher = DispatcherFactory::create();
<ide> $dispatcher->eventManager()->on(
<del> 'Dispatcher.beforeDispatch',
<add> 'Dispatcher.invokeController',
<ide> ['priority' => 999],
<ide> [$this, 'controllerSpy']
<ide> );
<ide> protected function _sendRequest($url, $method, $data = [])
<ide> * Adds additional event spies to the controller/view event manager.
<ide> *
<ide> * @param \Cake\Event\Event $event A dispatcher event.
<add> * @param \Cake\Controller\Controller $controller Controller instance.
<ide> * @return void
<ide> */
<del> public function controllerSpy($event)
<add> public function controllerSpy($event, $controller)
<ide> {
<del> if (empty($event->data['controller'])) {
<del> return;
<del> }
<del> $this->_controller = $event->data['controller'];
<del> $events = $this->_controller->eventManager();
<add> $this->_controller = $controller;
<add> $events = $controller->eventManager();
<ide> $events->on('View.beforeRender', function ($event, $viewFile) {
<ide> if (!$this->_viewName) {
<ide> $this->_viewName = $viewFile;
<ide><path>tests/TestCase/Routing/DispatcherTest.php
<ide> use Cake\Routing\Filter\ControllerFactoryFilter;
<ide> use Cake\TestSuite\TestCase;
<ide>
<del>/**
<del> * A testing stub that doesn't send headers.
<del> */
<del>class DispatcherMockResponse extends Response
<del>{
<del>
<del> protected function _sendHeader($name, $value = null)
<del> {
<del> return $name . ' ' . $value;
<del> }
<del>}
<del>
<del>/**
<del> * TestDispatcher class
<del> */
<del>class TestDispatcher extends Dispatcher
<del>{
<del>
<del> /**
<del> * Controller instance, made publicly available for testing
<del> *
<del> * @var Controller
<del> */
<del> public $controller;
<del>
<del> /**
<del> * invoke method
<del> *
<del> * @param \Cake\Controller\Controller $controller
<del> * @return \Cake\Network\Response $response
<del> */
<del> protected function _invoke(Controller $controller)
<del> {
<del> $this->controller = $controller;
<del> return parent::_invoke($controller);
<del> }
<del>}
<del>
<del>/**
<del> * MyPluginAppController class
<del> *
<del> */
<del>class MyPluginAppController extends Controller
<del>{
<del>}
<del>
<del>/**
<del> * MyPluginController class
<del> *
<del> */
<del>class MyPluginController extends MyPluginAppController
<del>{
<del>
<del> /**
<del> * name property
<del> *
<del> * @var string
<del> */
<del> public $name = 'MyPlugin';
<del>
<del> /**
<del> * index method
<del> *
<del> * @return void
<del> */
<del> public function index()
<del> {
<del> return true;
<del> }
<del>
<del> /**
<del> * add method
<del> *
<del> * @return void
<del> */
<del> public function add()
<del> {
<del> return true;
<del> }
<del>
<del> /**
<del> * admin_add method
<del> *
<del> * @param mixed $id
<del> * @return void
<del> */
<del> public function admin_add($id = null)
<del> {
<del> return $id;
<del> }
<del>}
<del>
<del>/**
<del> * OtherPagesController class
<del> *
<del> */
<del>class OtherPagesController extends MyPluginAppController
<del>{
<del>
<del> /**
<del> * name property
<del> *
<del> * @var string
<del> */
<del> public $name = 'OtherPages';
<del>
<del> /**
<del> * display method
<del> *
<del> * @param string $page
<del> * @return void
<del> */
<del> public function display($page = null)
<del> {
<del> return $page;
<del> }
<del>
<del> /**
<del> * index method
<del> *
<del> * @return void
<del> */
<del> public function index()
<del> {
<del> return true;
<del> }
<del>}
<del>
<del>/**
<del> * ArticlesTestAppController class
<del> *
<del> */
<del>class ArticlesTestAppController extends Controller
<del>{
<del>}
<del>
<del>/**
<del> * ArticlesTestController class
<del> *
<del> */
<del>class ArticlesTestController extends ArticlesTestAppController
<del>{
<del>
<del> /**
<del> * name property
<del> *
<del> * @var string
<del> */
<del> public $name = 'ArticlesTest';
<del>
<del> /**
<del> * admin_index method
<del> *
<del> * @return void
<del> */
<del> public function admin_index()
<del> {
<del> return true;
<del> }
<del>
<del> /**
<del> * fake index method.
<del> *
<del> * @return void
<del> */
<del> public function index()
<del> {
<del> return true;
<del> }
<del>}
<del>
<ide> /**
<ide> * DispatcherTest class
<ide> *
<ide> public function setUp()
<ide> Configure::write('App.webroot', 'webroot');
<ide> Configure::write('App.namespace', 'TestApp');
<ide>
<del> $this->dispatcher = new TestDispatcher();
<add> $this->dispatcher = new Dispatcher();
<ide> $this->dispatcher->addFilter(new ControllerFactoryFilter());
<ide> }
<ide>
<ide> public function testDispatchBasic()
<ide> 'controller' => 'Pages',
<ide> 'action' => 'display',
<ide> 'pass' => ['extract'],
<del> 'return' => 1
<ide> ]
<ide> ]);
<ide> $response = $this->getMock('Cake\Network\Response');
<add> $response->expects($this->once())
<add> ->method('send');
<ide>
<del> $this->dispatcher->dispatch($url, $response);
<del> $expected = 'Pages';
<del> $this->assertEquals($expected, $this->dispatcher->controller->name);
<add> $result = $this->dispatcher->dispatch($url, $response);
<add> $this->assertNull($result);
<ide> }
<ide>
<ide> /**
<ide> public function testDispatchActionReturnsResponse()
<ide> $this->assertEquals('new response', $result);
<ide> }
<ide>
<del> /**
<del> * testPrefixDispatch method
<del> *
<del> * @return void
<del> */
<del> public function testPrefixDispatch()
<del> {
<del> $request = new Request([
<del> 'url' => 'admin/posts/index',
<del> 'params' => [
<del> 'prefix' => 'Admin',
<del> 'controller' => 'Posts',
<del> 'action' => 'index',
<del> 'pass' => [],
<del> 'return' => 1
<del> ]
<del> ]);
<del> $response = $this->getMock('Cake\Network\Response');
<del>
<del> $this->dispatcher->dispatch($request, $response);
<del>
<del> $this->assertInstanceOf(
<del> 'TestApp\Controller\Admin\PostsController',
<del> $this->dispatcher->controller
<del> );
<del> $expected = '/admin/posts/index';
<del> $this->assertSame($expected, $request->here);
<del> }
<del>
<del> /**
<del> * test prefix dispatching in a plugin.
<del> *
<del> * @return void
<del> */
<del> public function testPrefixDispatchPlugin()
<del> {
<del> Plugin::load('TestPlugin');
<del>
<del> $request = new Request([
<del> 'url' => 'admin/test_plugin/comments/index',
<del> 'params' => [
<del> 'plugin' => 'TestPlugin',
<del> 'prefix' => 'Admin',
<del> 'controller' => 'Comments',
<del> 'action' => 'index',
<del> 'pass' => [],
<del> 'return' => 1
<del> ]
<del> ]);
<del> $response = $this->getMock('Cake\Network\Response');
<del>
<del> $this->dispatcher->dispatch($request, $response);
<del>
<del> $this->assertInstanceOf(
<del> 'TestPlugin\Controller\Admin\CommentsController',
<del> $this->dispatcher->controller
<del> );
<del> }
<del>
<ide> /**
<ide> * test forbidden controller names.
<ide> *
<ide><path>tests/TestCase/Routing/RequestActionTraitTest.php
<ide> public function testRequestAction()
<ide>
<ide> $result = $this->object->requestAction('');
<ide> $this->assertFalse($result);
<add> $this->assertNull(Router::getRequest(), 'requests were not popped off the stack, this will break url generation');
<ide>
<ide> $result = $this->object->requestAction('/request_action/test_request_action');
<ide> $expected = 'This is a test';
<ide> $this->assertEquals($expected, $result);
<add> $this->assertNull(Router::getRequest(), 'requests were not popped off the stack, this will break url generation');
<ide>
<ide> $result = $this->object->requestAction(Configure::read('App.fullBaseUrl') . '/request_action/test_request_action');
<ide> $expected = 'This is a test';
<ide> $this->assertEquals($expected, $result);
<add> $this->assertNull(Router::getRequest(), 'requests were not popped off the stack, this will break url generation');
<ide>
<ide> $result = $this->object->requestAction('/request_action/another_ra_test/2/5');
<ide> $expected = 7;
<ide> $this->assertEquals($expected, $result);
<add> $this->assertNull(Router::getRequest(), 'requests were not popped off the stack, this will break url generation');
<ide>
<ide> $result = $this->object->requestAction('/tests_apps/index', ['return']);
<ide> $expected = 'This is the TestsAppsController index view ';
<ide> $this->assertEquals($expected, $result);
<add> $this->assertNull(Router::getRequest(), 'requests were not popped off the stack, this will break url generation');
<ide>
<ide> $result = $this->object->requestAction('/tests_apps/some_method');
<ide> $expected = 5;
<ide> $this->assertEquals($expected, $result);
<add> $this->assertNull(Router::getRequest(), 'requests were not popped off the stack, this will break url generation');
<ide>
<ide> $result = $this->object->requestAction('/request_action/paginate_request_action');
<ide> $this->assertNull($result);
<add> $this->assertNull(Router::getRequest(), 'requests were not popped off the stack, this will break url generation');
<ide>
<ide> $result = $this->object->requestAction('/request_action/normal_request_action');
<ide> $expected = 'Hello World';
<ide><path>tests/TestCase/TestSuite/IntegrationTestCaseTest.php
<ide> public function testRequestSetsProperties()
<ide> {
<ide> $this->post('/posts/index');
<ide> $this->assertInstanceOf('Cake\Controller\Controller', $this->_controller);
<add> $this->assertNotEmpty($this->_viewName, 'View name not set');
<ide> $this->assertContains('Template' . DS . 'Posts' . DS . 'index.ctp', $this->_viewName);
<add> $this->assertNotEmpty($this->_layoutName, 'Layout name not set');
<ide> $this->assertContains('Template' . DS . 'Layout' . DS . 'default.ctp', $this->_layoutName);
<ide>
<ide> $this->assertTemplate('index'); | 7 |
Ruby | Ruby | remove old rubygems require | da84c9d0d243bb3b27748e250cb7ef71df70bb65 | <ide><path>activerecord/lib/active_record/connection_adapters/abstract/connection_specification.rb
<ide> def self.establish_connection(spec = nil)
<ide> unless spec.key?(:adapter) then raise AdapterNotSpecified, "database configuration does not specify adapter" end
<ide>
<ide> begin
<del> require 'rubygems'
<del> gem "activerecord-#{spec[:adapter]}-adapter"
<ide> require "active_record/connection_adapters/#{spec[:adapter]}_adapter"
<ide> rescue LoadError
<del> begin
<del> require "active_record/connection_adapters/#{spec[:adapter]}_adapter"
<del> rescue LoadError
<del> raise "Please install the #{spec[:adapter]} adapter: `gem install activerecord-#{spec[:adapter]}-adapter` (#{$!})"
<del> end
<add> raise "Please install the #{spec[:adapter]} adapter: `gem install activerecord-#{spec[:adapter]}-adapter` (#{$!})"
<ide> end
<ide>
<ide> adapter_method = "#{spec[:adapter]}_connection" | 1 |
PHP | PHP | remove redundant check | 2c91f119cb0bb5dc19d33100d7d256ef887cb697 | <ide><path>lib/Cake/Model/Datasource/Database/Sqlite.php
<ide> public function column($real) {
<ide> if (in_array($col, array('text', 'integer', 'float', 'boolean', 'timestamp', 'date', 'datetime', 'time'))) {
<ide> return $col;
<ide> }
<del> if (strpos($col, 'varchar') !== false || strpos($col, 'char') !== false) {
<add> if (strpos($col, 'char') !== false) {
<ide> return 'string';
<ide> }
<ide> if (in_array($col, array('blob', 'clob'))) { | 1 |
PHP | PHP | avoid use of reflection | bbc736bab11fd6b6a0152375368b8e0dcfedb1c3 | <ide><path>src/TestSuite/MiddlewareDispatcher.php
<ide> use Laminas\Diactoros\Stream;
<ide> use LogicException;
<ide> use Psr\Http\Message\ResponseInterface;
<del>use ReflectionClass;
<del>use ReflectionException;
<ide>
<ide> /**
<ide> * Dispatches a request capturing the response for integration
<ide> public function __construct(
<ide> $this->_class = $class;
<ide> $this->_constructorArgs = $constructorArgs ?: [CONFIG];
<ide>
<del> try {
<del> $reflect = new ReflectionClass($this->_class);
<del> /** @var \Cake\Core\HttpApplicationInterface $app */
<del> $app = $reflect->newInstanceArgs($this->_constructorArgs);
<del> $this->app = $app;
<del> } catch (ReflectionException $e) {
<del> throw new LogicException("Cannot load `{$this->_class}` for use in integration testing.", 0, $e);
<add> if (!class_exists($this->_class)) {
<add> throw new LogicException("Cannot load `{$this->_class}` for use in integration testing.", 0);
<ide> }
<add>
<add> $this->app = new $this->_class(...$this->_constructorArgs);
<ide> }
<ide>
<ide> /** | 1 |
Python | Python | improve tokenizer tests | 66ea739168cdb53a8a3ade35febf3a27f4cd1387 | <ide><path>tests/test_tokenization_common.py
<ide> def convert_batch_encode_plus_format_to_encode_plus(batch_encode_plus_sequences)
<ide> # TODO: this test can be combined with `test_sentencepiece_tokenize_and_convert_tokens_to_string` after the latter is extended to all tokenizers.
<ide> def test_tokenize_special_tokens(self):
<ide> """Test `tokenize` with special tokens."""
<del> tokenizers = self.get_tokenizers(fast=True)
<add> tokenizers = self.get_tokenizers(fast=True, do_lower_case=True)
<ide> for tokenizer in tokenizers:
<ide> with self.subTest(f"{tokenizer.__class__.__name__}"):
<ide> SPECIAL_TOKEN_1 = "[SPECIAL_TOKEN_1]"
<ide> def test_pickle_added_tokens(self):
<ide> self.assertEqual(tok1.__getstate__(), tok2.__getstate__())
<ide>
<ide> def test_added_tokens_do_lower_case(self):
<del> # TODO(thom) activate fast tokenizer tests once Rust tokenizers accepts white spaces in added tokens.
<del> tokenizers = [self.get_tokenizer(do_lower_case=True)] if self.test_slow_tokenizer else []
<add> tokenizers = self.get_tokenizers(do_lower_case=True)
<ide> for tokenizer in tokenizers:
<ide> with self.subTest(f"{tokenizer.__class__.__name__}"):
<ide> if not hasattr(tokenizer, "do_lower_case") or not tokenizer.do_lower_case:
<ide> def test_added_tokens_do_lower_case(self):
<ide> text = special_token + " aaaaa bbbbbb low cccccccccdddddddd l " + special_token
<ide> text2 = special_token + " AAAAA BBBBBB low CCCCCCCCCDDDDDDDD l " + special_token
<ide>
<del> toks0 = tokenizer.tokenize(text) # toks before adding new_toks
<add> toks_before_adding = tokenizer.tokenize(text) # toks before adding new_toks
<ide>
<ide> new_toks = ["aaaaa bbbbbb", "cccccccccdddddddd", "AAAAA BBBBBB", "CCCCCCCCCDDDDDDDD"]
<del> added = tokenizer.add_tokens(new_toks)
<del> self.assertEqual(added, 2)
<add> added = tokenizer.add_tokens([AddedToken(tok, lstrip=True, rstrip=True) for tok in new_toks])
<ide>
<del> toks = tokenizer.tokenize(text)
<del> toks2 = tokenizer.tokenize(text2)
<add> toks_after_adding = tokenizer.tokenize(text)
<add> toks_after_adding2 = tokenizer.tokenize(text2)
<ide>
<del> self.assertEqual(len(toks), len(toks2))
<del> self.assertListEqual(toks, toks2)
<del> if not isinstance(tokenizer, PreTrainedTokenizerFast):
<del> # Python tokenizers can have added tokens with spaces inside them
<del> # cf https://github.com/huggingface/tokenizers/issues/302
<del> self.assertNotEqual(len(toks), len(toks0)) # toks0 should be longer
<add> # Rust tokenizers dont't lowercase added tokens at the time calling `tokenizer.add_tokens`,
<add> # while python tokenizers do, so new_toks 0 and 2 would be treated as the same, so do new_toks 1 and 3.
<add> self.assertIn(added, [2, 4])
<add>
<add> self.assertListEqual(toks_after_adding, toks_after_adding2)
<add> self.assertTrue(
<add> len(toks_before_adding) > len(toks_after_adding), # toks_before_adding should be longer
<add> )
<ide>
<ide> # Check that none of the special tokens are lowercased
<ide> sequence_with_special_tokens = "A " + " yEs ".join(tokenizer.all_special_tokens) + " B"
<del> tokenized_sequence = tokenizer.tokenize(sequence_with_special_tokens)
<add> # Convert the tokenized list to str as some special tokens are tokenized like normal tokens
<add> # which have a prefix spacee e.g. the mask token of Albert, and cannot match the original
<add> # special tokens exactly.
<add> tokenized_sequence = "".join(tokenizer.tokenize(sequence_with_special_tokens))
<ide>
<ide> for special_token in tokenizer.all_special_tokens:
<ide> self.assertTrue(special_token in tokenized_sequence)
<ide>
<del> tokenizers = [self.get_tokenizer(do_lower_case=True)] if self.test_slow_tokenizer else []
<add> tokenizers = self.get_tokenizers(do_lower_case=True)
<ide> for tokenizer in tokenizers:
<ide> with self.subTest(f"{tokenizer.__class__.__name__}"):
<ide> if hasattr(tokenizer, "do_lower_case") and tokenizer.do_lower_case:
<ide> def test_added_tokens_do_lower_case(self):
<ide> text = special_token + " aaaaa bbbbbb low cccccccccdddddddd l " + special_token
<ide> text2 = special_token + " AAAAA BBBBBB low CCCCCCCCCDDDDDDDD l " + special_token
<ide>
<del> new_toks = ["aaaaa bbbbbb", "cccccccccdddddddd", "AAAAA BBBBBB", "CCCCCCCCCDDDDDDDD"]
<del>
<del> toks0 = tokenizer.tokenize(text) # toks before adding new_toks
<add> toks_before_adding = tokenizer.tokenize(text) # toks before adding new_toks
<ide>
<del> added = tokenizer.add_tokens(new_toks)
<add> new_toks = ["aaaaa bbbbbb", "cccccccccdddddddd", "AAAAA BBBBBB", "CCCCCCCCCDDDDDDDD"]
<add> added = tokenizer.add_tokens([AddedToken(tok, lstrip=True, rstrip=True) for tok in new_toks])
<ide> self.assertIn(added, [2, 4])
<ide>
<del> toks = tokenizer.tokenize(text)
<del> toks2 = tokenizer.tokenize(text2)
<add> toks_after_adding = tokenizer.tokenize(text)
<add> toks_after_adding2 = tokenizer.tokenize(text2)
<ide>
<del> self.assertEqual(len(toks), len(toks2)) # Length should still be the same
<del> self.assertNotEqual(toks[1], toks2[1]) # But at least the first non-special tokens should differ
<del> if not isinstance(tokenizer, PreTrainedTokenizerFast):
<del> # Python tokenizers can have added tokens with spaces inside them
<del> # cf https://github.com/huggingface/tokenizers/issues/302
<del> self.assertNotEqual(len(toks), len(toks0)) # toks0 should be longer
<add> self.assertEqual(len(toks_after_adding), len(toks_after_adding2)) # Length should still be the same
<add> self.assertNotEqual(
<add> toks_after_adding[1], toks_after_adding2[1]
<add> ) # But at least the first non-special tokens should differ
<add> self.assertTrue(
<add> len(toks_before_adding) > len(toks_after_adding), # toks_before_adding should be longer
<add> )
<ide>
<ide> def test_add_tokens_tokenizer(self):
<ide> tokenizers = self.get_tokenizers(do_lower_case=False)
<ide> def test_encode_decode_with_spaces(self):
<ide> for tokenizer in tokenizers:
<ide> with self.subTest(f"{tokenizer.__class__.__name__}"):
<ide>
<del> # new_toks = ["[ABC]", "[DEF]"] # TODO(thom) add this one back when Rust toks are ready: , "GHI IHG"]
<del> new_toks = [AddedToken("[ABC]", normalized=False), AddedToken("[DEF]", normalized=False)]
<add> new_toks = [
<add> AddedToken("[ABC]", normalized=False),
<add> AddedToken("[DEF]", normalized=False),
<add> AddedToken("GHI IHG", normalized=False),
<add> ]
<ide> tokenizer.add_tokens(new_toks)
<del> input = "[ABC][DEF][ABC][DEF]" # TODO(thom) add back cf above: "[ABC] [DEF] [ABC] GHI IHG [DEF]"
<add> input = "[ABC][DEF][ABC]GHI IHG[DEF]"
<ide> if self.space_between_special_tokens:
<del> output = "[ABC] [DEF] [ABC] [DEF]"
<add> output = "[ABC] [DEF] [ABC] GHI IHG [DEF]"
<ide> else:
<ide> output = input
<ide> encoded = tokenizer.encode(input, add_special_tokens=False) | 1 |
Javascript | Javascript | use common/fixtures in test-https-close | a9a0146c8b85fd807ad5ccdcc33b182e65efebab | <ide><path>test/parallel/test-https-close.js
<ide> const common = require('../common');
<ide> if (!common.hasCrypto)
<ide> common.skip('missing crypto');
<ide>
<del>const fs = require('fs');
<add>const fixtures = require('../common/fixtures');
<ide> const https = require('https');
<ide>
<ide> const options = {
<del> key: fs.readFileSync(`${common.fixturesDir}/keys/agent1-key.pem`),
<del> cert: fs.readFileSync(`${common.fixturesDir}/keys/agent1-cert.pem`)
<add> key: fixtures.readKey('agent1-key.pem'),
<add> cert: fixtures.readKey('agent1-cert.pem')
<ide> };
<ide>
<ide> const connections = {}; | 1 |
Python | Python | fix typo in `unwrap` docstring | a562cb2d44760d01a5481280d98248026fabeb40 | <ide><path>numpy/lib/function_base.py
<ide> def unwrap(p, discont=None, axis=-1, *, period=2*pi):
<ide> difference from their predecessor of more than ``max(discont, period/2)``
<ide> to their `period`-complementary values.
<ide>
<del> For the default case where `period` is :math:`2\pi` and is `discont` is
<add> For the default case where `period` is :math:`2\pi` and `discont` is
<ide> :math:`\pi`, this unwraps a radian phase `p` such that adjacent differences
<ide> are never greater than :math:`\pi` by adding :math:`2k\pi` for some
<ide> integer :math:`k`. | 1 |
Mixed | Python | interleave trials in benchmark script | c0007d56e9d653d42276dcc10dd1896507346bff | <ide><path>scripts/bench/README.md
<ide> Work-in-progress benchmarks.
<ide> ## Running the suite
<ide>
<ide> ```
<del>$ ./measure.py react-a.min.js >a.txt
<del>$ ./measure.py react-b.min.js >b.txt
<add>$ ./measure.py react-a.min.js a.txt react-b.min.js b.txt
<ide> $ ./analyze.py a.txt b.txt
<ide> ```
<ide>
<ide><path>scripts/bench/measure.py
<ide> import functools
<ide> import json
<ide> import os
<add>import random
<ide> import subprocess
<ide> import sys
<ide>
<ide>
<ide> def _run_js_in_jsc(jit, js, env):
<del> return subprocess.check_call(
<add> return subprocess.check_output(
<ide> ['jsc', '-e', """
<ide> function now() {
<ide> return preciseTime() * 1000;
<ide> def _run_js_in_jsc(jit, js, env):
<ide>
<ide>
<ide> def _run_js_in_node(js, env):
<del> return subprocess.check_call(
<add> return subprocess.check_output(
<ide> ['node', '-e', """
<ide> function now() {
<ide> var hrTime = process.hrtime();
<ide> def _run_js_in_node(js, env):
<ide>
<ide>
<ide> def _measure_ssr_ms(engine, react_path, bench_name, bench_path, measure_warm):
<del> engine(
<add> return engine(
<ide> """
<ide> var reactCode = readFile(ENV.react_path);
<ide> var START = now();
<ide> def _measure_ssr_ms(engine, react_path, bench_name, bench_path, measure_warm):
<ide>
<ide>
<ide> def _main():
<del> if len(sys.argv) != 2:
<del> sys.stderr.write("usage: measure.py react.min.js >out.txt\n")
<add> if len(sys.argv) < 2 or len(sys.argv) % 2 == 0:
<add> sys.stderr.write("usage: measure.py react.min.js out.txt react2.min.js out2.txt\n")
<ide> return 1
<del> react_path = sys.argv[1]
<add> # [(react_path, out_path)]
<add> react_paths = sys.argv[1::2]
<add> files = [open(out_path, 'w') for out_path in sys.argv[2::2]]
<ide>
<ide> trials = 30
<ide> sys.stderr.write("Measuring SSR for PE benchmark (%d trials)\n" % trials)
<add> sys.stderr.write("_" * trials + "\n")
<ide> for i in range(trials):
<ide> for engine in [
<ide> _run_js_in_jsc_jit,
<ide> _run_js_in_jsc_nojit,
<ide> _run_js_in_node
<ide> ]:
<del> _measure_ssr_ms(engine, react_path, 'pe', 'bench-pe-es5.js', False)
<add> engines = range(len(react_paths))
<add> random.shuffle(engines)
<add> for i in engines:
<add> out = _measure_ssr_ms(engine, react_paths[i], 'pe', 'bench-pe-es5.js', False)
<add> files[i].write(out)
<ide> sys.stderr.write(".")
<ide> sys.stderr.flush()
<ide> sys.stderr.write("\n")
<add> sys.stderr.flush()
<ide>
<del> trials = 3
<add> trials = 0
<ide> sys.stderr.write("Measuring SSR for PE with warm JIT (%d slow trials)\n" % trials)
<add> sys.stderr.write("_" * trials + "\n")
<ide> for i in range(trials):
<ide> for engine in [
<ide> _run_js_in_jsc_jit,
<ide> _run_js_in_jsc_nojit,
<ide> _run_js_in_node
<ide> ]:
<del> _measure_ssr_ms(engine, react_path, 'pe', 'bench-pe-es5.js', True)
<add> engines = range(len(react_paths))
<add> random.shuffle(engines)
<add> for i in engines:
<add> out = _measure_ssr_ms(engine, react_paths[i], 'pe', 'bench-pe-es5.js', True)
<add> files[i].write(out)
<ide> sys.stderr.write(".")
<ide> sys.stderr.flush()
<ide> sys.stderr.write("\n")
<add> sys.stderr.flush()
<add>
<add> for f in files:
<add> f.close()
<ide>
<ide>
<ide> if __name__ == '__main__': | 2 |
Javascript | Javascript | absorb `path` error cases | 4692e284e305e3ec2418f7f5005bed8d3e62ad11 | <ide><path>lib/internal/bootstrap/pre_execution.js
<ide> function patchProcessObject(expandArgv1) {
<ide> if (expandArgv1 && process.argv[1] && !process.argv[1].startsWith('-')) {
<ide> // Expand process.argv[1] into a full path.
<ide> const path = require('path');
<del> process.argv[1] = path.resolve(process.argv[1]);
<add> try {
<add> process.argv[1] = path.resolve(process.argv[1]);
<add> } catch {}
<ide> }
<ide>
<ide> // TODO(joyeecheung): most of these should be deprecated and removed, | 1 |
Python | Python | remove experimental optimizer usage on github | 36a140b8765eaa07525ac42a00cbd01a8b03b98e | <ide><path>official/modeling/optimization/optimizer_factory.py
<ide>
<ide> OPTIMIZERS_CLS = {
<ide> 'sgd': tf.keras.optimizers.SGD,
<del> 'sgd_experimental': tf.keras.optimizers.experimental.SGD,
<add> # TODO(chenmoneygithub): experimental.SGD
<ide> 'adam': tf.keras.optimizers.Adam,
<del> 'adam_experimental': tf.keras.optimizers.experimental.Adam,
<add> # TODO(chenmoneygithub): experimental.Adam
<ide> 'adamw': nlp_optimization.AdamWeightDecay,
<ide> 'lamb': tfa_optimizers.LAMB,
<ide> 'rmsprop': tf.keras.optimizers.RMSprop, | 1 |
Python | Python | simplify dag.set_dag_runs_state method | e945439abead0daf4cf6f8065069e6549c7e5205 | <ide><path>airflow/models/dag.py
<ide> def set_dag_runs_state(
<ide> query = query.filter(DagRun.execution_date >= start_date)
<ide> if end_date:
<ide> query = query.filter(DagRun.execution_date <= end_date)
<del> drs = query.all()
<del>
<del> dirty_ids = []
<del> for dr in drs:
<del> dr.state = state
<del> dirty_ids.append(dr.dag_id)
<add> query.update({DagRun.state: state})
<ide>
<ide> @provide_session
<ide> def clear(
<ide><path>tests/models/test_dag.py
<ide> def test_normalized_schedule_interval(
<ide> self.assertEqual(dag.normalized_schedule_interval, expected_n_schedule_interval)
<ide> self.assertEqual(dag.schedule_interval, schedule_interval)
<ide>
<add> def test_set_dag_runs_state(self):
<add> clear_db_runs()
<add> dag_id = "test_set_dag_runs_state"
<add> dag = DAG(dag_id=dag_id)
<add>
<add> for i in range(3):
<add> dag.create_dagrun(run_id=f"test{i}", state=State.RUNNING)
<add>
<add> dag.set_dag_runs_state(state=State.NONE)
<add> drs = DagRun.find(dag_id=dag_id)
<add>
<add> assert len(drs) == 3
<add> assert all(dr.state == State.NONE for dr in drs)
<add>
<ide>
<ide> class TestQueries(unittest.TestCase):
<ide> | 2 |
Javascript | Javascript | describe unflushed http requests | 080357e906e2ec34e669091ef345fc4442e23ea0 | <ide><path>src/ngMock/angular-mocks.js
<ide> function createHttpBackendMock($rootScope, $timeout, $delegate, $browser) {
<ide> }
<ide> }
<ide>
<add> handleResponse.description = method + ' ' + url;
<ide> return handleResponse;
<ide>
<ide> function handleResponse() {
<ide> function createHttpBackendMock($rootScope, $timeout, $delegate, $browser) {
<ide> $httpBackend.verifyNoOutstandingRequest = function(digest) {
<ide> if (digest !== false) $rootScope.$digest();
<ide> if (responses.length) {
<del> throw new Error('Unflushed requests: ' + responses.length);
<add> var unflushedDescriptions = responses.map(function(res) { return res.description; });
<add> throw new Error('Unflushed requests: ' + responses.length + '\n ' +
<add> unflushedDescriptions.join('\n '));
<ide> }
<ide> };
<ide>
<ide><path>test/ngMock/angular-mocksSpec.js
<ide> describe('ngMock', function() {
<ide>
<ide> expect(function() {
<ide> hb.verifyNoOutstandingRequest();
<del> }).toThrowError('Unflushed requests: 1');
<add> }).toThrowError('Unflushed requests: 1\n' +
<add> ' GET /some');
<ide> });
<ide>
<ide>
<ide> describe('ngMock', function() {
<ide>
<ide> expect(function() {
<ide> hb.verifyNoOutstandingRequest();
<del> }).toThrowError('Unflushed requests: 1');
<add> }).toThrowError('Unflushed requests: 1\n' +
<add> ' GET /some');
<ide> }));
<add>
<add>
<add> it('should describe multiple unflushed requests', function() {
<add> hb.when('GET').respond(200);
<add> hb.when('PUT').respond(200);
<add> hb('GET', '/some', null, noop, {});
<add> hb('PUT', '/elsewhere', null, noop, {});
<add>
<add> expect(function() {
<add> hb.verifyNoOutstandingRequest();
<add> }).toThrowError('Unflushed requests: 2\n' +
<add> ' GET /some\n' +
<add> ' PUT /elsewhere');
<add> });
<ide> });
<ide>
<ide> | 2 |
PHP | PHP | remove leading slash | 29901220e92daa31912cd116f7826dd73f8c1ddc | <ide><path>Cake/ORM/Table.php
<ide> public function findThreaded(Query $query, array $options = []) {
<ide> *
<ide> * @param mixed primary key value to find
<ide> * @param array $options options accepted by `Table::find()`
<del> * @throws \Cake\ORM\Error\RecordNotFoundException if the record with such id
<add> * @throws Cake\ORM\Error\RecordNotFoundException if the record with such id
<ide> * could not be found
<ide> * @return \Cake\ORM\Entity
<ide> * @see Table::find() | 1 |
Python | Python | add download script for librispeech dataset | 9bf9a837866425d0bc316848e5248d0eda1a3a2c | <ide><path>research/deep_speech/data/download.py
<add># Copyright 2018 The TensorFlow Authors. All Rights Reserved.
<add>#
<add># Licensed under the Apache License, Version 2.0 (the "License");
<add># you may not use this file except in compliance with the License.
<add># You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing, software
<add># distributed under the License is distributed on an "AS IS" BASIS,
<add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<add># See the License for the specific language governing permissions and
<add># limitations under the License.
<add># ==============================================================================
<add>"""Download and preprocess LibriSpeech dataset for DeepSpeech model."""
<add>
<add>from __future__ import absolute_import
<add>from __future__ import division
<add>from __future__ import print_function
<add>
<add>import codecs
<add>import fnmatch
<add>import os
<add>import sys
<add>import tarfile
<add>import tempfile
<add>import unicodedata
<add>
<add>from absl import app as absl_app
<add>from absl import flags as absl_flags
<add>import pandas
<add>from six.moves import urllib
<add>from sox import Transformer
<add>import tensorflow as tf
<add>
<add>LIBRI_SPEECH_URLS = {
<add> "train-clean-100":
<add> "http://www.openslr.org/resources/12/train-clean-100.tar.gz",
<add> "train-clean-360":
<add> "http://www.openslr.org/resources/12/train-clean-360.tar.gz",
<add> "train-other-500":
<add> "http://www.openslr.org/resources/12/train-other-500.tar.gz",
<add> "dev-clean":
<add> "http://www.openslr.org/resources/12/dev-clean.tar.gz",
<add> "dev-other":
<add> "http://www.openslr.org/resources/12/dev-other.tar.gz",
<add> "test-clean":
<add> "http://www.openslr.org/resources/12/test-clean.tar.gz",
<add> "test-other":
<add> "http://www.openslr.org/resources/12/test-other.tar.gz"
<add>}
<add>
<add>
<add>def download_and_extract(directory, url):
<add> """Download and extract the given split of dataset.
<add>
<add> Args:
<add> directory: the directory where to extract the tarball.
<add> url: the url to download the data file.
<add> """
<add>
<add> if not tf.gfile.Exists(directory):
<add> tf.gfile.MakeDirs(directory)
<add>
<add> _, tar_filepath = tempfile.mkstemp(suffix=".tar.gz")
<add>
<add> try:
<add> tf.logging.info("Downloading %s to %s" % (url, tar_filepath))
<add>
<add> def _progress(count, block_size, total_size):
<add> sys.stdout.write("\r>> Downloading {} {:.1f}%".format(
<add> tar_filepath, 100.0 * count * block_size / total_size))
<add> sys.stdout.flush()
<add>
<add> urllib.request.urlretrieve(url, tar_filepath, _progress)
<add> print()
<add> statinfo = os.stat(tar_filepath)
<add> tf.logging.info(
<add> "Successfully downloaded %s, size(bytes): %d" % (url, statinfo.st_size))
<add> with tarfile.open(tar_filepath, "r") as tar:
<add> tar.extractall(directory)
<add> finally:
<add> tf.gfile.Remove(tar_filepath)
<add>
<add>
<add>def convert_audio_and_split_transcript(input_dir, source_name, target_name,
<add> output_dir, output_file):
<add> """Convert FLAC to WAV and split the transcript.
<add>
<add> For audio file, convert the format from FLAC to WAV using the sox.Transformer
<add> library.
<add> For transcripts, each line contains the sequence id and the corresponding
<add> transcript (separated by space):
<add> Input data format: seq-id transcript_of_seq-id
<add> For example:
<add> 1-2-0 transcript_of_1-2-0.flac
<add> 1-2-1 transcript_of_1-2-1.flac
<add> ...
<add>
<add> Each sequence id has a corresponding .flac file.
<add> Parse the transcript file and generate a new csv file which has three columns:
<add> "wav_filename": the absolute path to a wav file.
<add> "wav_filesize": the size of the corresponding wav file.
<add> "transcript": the transcript for this audio segement.
<add>
<add> Args:
<add> input_dir: the directory which holds the input dataset.
<add> source_name: the name of the specified dataset. e.g. test-clean
<add> target_name: the directory name for the newly generated audio files.
<add> e.g. test-clean-wav
<add> output_dir: the directory to place the newly generated csv files.
<add> output_file: the name of the newly generated csv file. e.g. test-clean.csv
<add> """
<add>
<add> tf.logging.info("Preprocessing audio and transcript for %s" % source_name)
<add> source_dir = os.path.join(input_dir, source_name)
<add> target_dir = os.path.join(input_dir, target_name)
<add>
<add> if not tf.gfile.Exists(target_dir):
<add> tf.gfile.MakeDirs(target_dir)
<add>
<add> files = []
<add> tfm = Transformer()
<add> # Convert all FLAC file into WAV format. At the same time, generate the csv
<add> # file.
<add> for root, _, filenames in tf.gfile.Walk(source_dir):
<add> for filename in fnmatch.filter(filenames, "*.trans.txt"):
<add> trans_file = os.path.join(root, filename)
<add> with codecs.open(trans_file, "r", "utf-8") as fin:
<add> for line in fin:
<add> seqid, transcript = line.split(" ", 1)
<add> # We do a encode-decode transformation here because the output type
<add> # of encode is a bytes object, we need convert it to string.
<add> transcript = unicodedata.normalize("NFKD", transcript).encode(
<add> "ascii", "ignore").decode("ascii", "ignore").strip().lower()
<add>
<add> # Convert FLAC to WAV.
<add> flac_file = os.path.join(root, seqid + ".flac")
<add> wav_file = os.path.join(target_dir, seqid + ".wav")
<add> if not tf.gfile.Exists(wav_file):
<add> tfm.build(flac_file, wav_file)
<add> wav_filesize = os.path.getsize(wav_file)
<add>
<add> files.append((os.path.abspath(wav_file), wav_filesize, transcript))
<add>
<add> # Write to CSV file which contains three columns:
<add> # "wav_filename", "wav_filesize", "transcript".
<add> csv_file_path = os.path.join(output_dir, output_file)
<add> df = pandas.DataFrame(
<add> data=files, columns=["wav_filename", "wav_filesize", "transcript"])
<add> df.to_csv(csv_file_path, index=False, sep="\t")
<add> tf.logging.info("Successfully generated csv file {}".format(csv_file_path))
<add>
<add>
<add>def download_and_process_datasets(directory, datasets):
<add> """Download and pre-process the specified list of LibriSpeech dataset.
<add>
<add> Args:
<add> directory: the directory to put all the downloaded and preprocessed data.
<add> datasets: list of dataset names that will be downloaded and processed.
<add> """
<add>
<add> tf.logging.info("Preparing LibriSpeech dataset: {}".format(
<add> ",".join(datasets)))
<add> for dataset in datasets:
<add> tf.logging.info("Preparing dataset %s", dataset)
<add> dataset_dir = os.path.join(directory, dataset)
<add> download_and_extract(dataset_dir, LIBRI_SPEECH_URLS[dataset])
<add> convert_audio_and_split_transcript(
<add> dataset_dir + "/LibriSpeech", dataset, dataset + "-wav",
<add> dataset_dir + "/LibriSpeech", dataset + ".csv")
<add>
<add>
<add>def define_data_download_flags():
<add> """Define flags for data downloading."""
<add> absl_flags.DEFINE_string(
<add> "data_dir", "/tmp/librispeech_data",
<add> "Directory to download data and extract the tarball")
<add> absl_flags.DEFINE_bool("train_only", False,
<add> "If true, only download the training set")
<add> absl_flags.DEFINE_bool("dev_only", False,
<add> "If true, only download the dev set")
<add> absl_flags.DEFINE_bool("test_only", False,
<add> "If true, only download the test set")
<add>
<add>
<add>def main(_):
<add> if not tf.gfile.Exists(FLAGS.data_dir):
<add> tf.gfile.MakeDirs(FLAGS.data_dir)
<add>
<add> if FLAGS.train_only:
<add> download_and_process_datasets(
<add> FLAGS.data_dir,
<add> ["train-clean-100", "train-clean-360", "train-other-500"])
<add> elif FLAGS.dev_only:
<add> download_and_process_datasets(FLAGS.data_dir, ["dev-clean", "dev-other"])
<add> elif FLAGS.test_only:
<add> download_and_process_datasets(FLAGS.data_dir, ["test-clean", "test-other"])
<add> else:
<add> # By default we download the entire dataset.
<add> download_and_process_datasets(FLAGS.data_dir, LIBRI_SPEECH_URLS.keys())
<add>
<add>
<add>if __name__ == "__main__":
<add> tf.logging.set_verbosity(tf.logging.INFO)
<add> define_data_download_flags()
<add> FLAGS = absl_flags.FLAGS
<add> absl_app.run(main) | 1 |
PHP | PHP | make methods protected | 76cb5505845665a93816f05df980fc4a6533212c | <ide><path>src/Illuminate/Foundation/Console/PresetCommand.php
<ide> protected function none()
<ide> }
<ide>
<ide> /**
<del> * Install the "fresh" preset.
<add> * Install the "bootstrap" preset.
<ide> *
<ide> * @return void
<ide> */
<ide> protected function bootstrap()
<ide> *
<ide> * @return void
<ide> */
<del> public function vue()
<add> protected function vue()
<ide> {
<ide> Presets\Vue::install();
<ide>
<ide> public function vue()
<ide> *
<ide> * @return void
<ide> */
<del> public function react()
<add> protected function react()
<ide> {
<ide> Presets\React::install();
<ide> | 1 |
Javascript | Javascript | fix warnings in create view | bd004568d3e006b61391417fc405e9483996dc81 | <ide><path>Libraries/Components/Touchable/TouchableHighlight.js
<ide>
<ide> var ColorPropType = require('ColorPropType');
<ide> var NativeMethodsMixin = require('NativeMethodsMixin');
<del>var React = require('React');
<ide> const PropTypes = require('prop-types');
<add>var React = require('React');
<ide> var ReactNativeViewAttributes = require('ReactNativeViewAttributes');
<ide> var StyleSheet = require('StyleSheet');
<ide> var TimerMixin = require('react-timer-mixin');
<ide> var TouchableHighlight = React.createClass({
<ide> },
<ide>
<ide> getInitialState: function() {
<add> this._isMounted = false;
<ide> return merge(
<ide> this.touchableGetInitialState(), this._computeSyntheticState(this.props)
<ide> );
<ide> },
<ide>
<ide> componentDidMount: function() {
<add> this._isMounted = true;
<ide> ensurePositiveDelayProps(this.props);
<ide> ensureComponentIsNative(this.refs[CHILD_REF]);
<ide> },
<ide>
<add> componentWillMount: function() {
<add> this._isMounted = false;
<add> },
<add>
<ide> componentDidUpdate: function() {
<ide> ensureComponentIsNative(this.refs[CHILD_REF]);
<ide> },
<ide> var TouchableHighlight = React.createClass({
<ide> },
<ide>
<ide> _showUnderlay: function() {
<del> if (!this.isMounted() || !this._hasPressHandler()) {
<add> if (!this._isMounted || !this._hasPressHandler()) {
<ide> return;
<ide> }
<ide> | 1 |
Javascript | Javascript | prefer strict equality, type validation | e9b6fbbf170d4ef0031d3194d4c0148269037030 | <ide><path>lib/dgram.js
<ide> function lookup6(address, callback) {
<ide>
<ide>
<ide> function newHandle(type) {
<del> if (type == 'udp4') {
<add> if (type === 'udp4') {
<ide> const handle = new UDP();
<ide> handle.lookup = lookup4;
<ide> return handle;
<ide> }
<ide>
<del> if (type == 'udp6') {
<add> if (type === 'udp6') {
<ide> const handle = new UDP();
<ide> handle.lookup = lookup6;
<ide> handle.bind = handle.bind6;
<ide> exports._createSocketHandle = function(address, port, addressType, fd, flags) {
<ide> function Socket(type, listener) {
<ide> EventEmitter.call(this);
<ide>
<del> if (typeof type === 'object') {
<add> if (type !== null && typeof type === 'object') {
<ide> var options = type;
<ide> type = options.type;
<ide> }
<ide> Socket.prototype.bind = function(port_ /*, address, callback*/) {
<ide>
<ide> self._healthCheck();
<ide>
<del> if (this._bindState != BIND_STATE_UNBOUND)
<add> if (this._bindState !== BIND_STATE_UNBOUND)
<ide> throw new Error('Socket is already bound');
<ide>
<ide> this._bindState = BIND_STATE_BINDING;
<ide> Socket.prototype.send = function(buffer,
<ide>
<ide> self._healthCheck();
<ide>
<del> if (self._bindState == BIND_STATE_UNBOUND)
<add> if (self._bindState === BIND_STATE_UNBOUND)
<ide> self.bind({port: 0, exclusive: true}, null);
<ide>
<ide> if (list.length === 0)
<ide> list.push(Buffer.allocUnsafe(0));
<ide>
<ide> // If the socket hasn't been bound yet, push the outbound packet onto the
<ide> // send queue and send after binding is complete.
<del> if (self._bindState != BIND_STATE_BOUND) {
<add> if (self._bindState !== BIND_STATE_BOUND) {
<ide> enqueue(self, self.send.bind(self, list, port, address, callback));
<ide> return;
<ide> }
<ide><path>test/parallel/test-dgram-createSocket-type.js
<add>'use strict';
<add>require('../common');
<add>const assert = require('assert');
<add>const dgram = require('dgram');
<add>const invalidTypes = [
<add> 'test',
<add> ['udp4'],
<add> new String('udp4'),
<add> 1,
<add> {},
<add> true,
<add> false,
<add> null,
<add> undefined
<add>];
<add>const validTypes = [
<add> 'udp4',
<add> 'udp6',
<add> { type: 'udp4' },
<add> { type: 'udp6' }
<add>];
<add>
<add>// Error must be thrown with invalid types
<add>invalidTypes.forEach((invalidType) => {
<add> assert.throws(() => {
<add> dgram.createSocket(invalidType);
<add> }, /Bad socket type specified/);
<add>});
<add>
<add>// Error must not be thrown with valid types
<add>validTypes.forEach((validType) => {
<add> assert.doesNotThrow(() => {
<add> const socket = dgram.createSocket(validType);
<add> socket.close();
<add> });
<add>}); | 2 |
Python | Python | add regression test for #401 | 98c13d8aa9f51f4afe409f7ea6f730393de48308 | <ide><path>spacy/tests/regression/test_issue401.py
<add># coding: utf8
<add>from __future__ import unicode_literals
<add>
<add>import pytest
<add>
<add>
<add>@pytest.mark.xfail
<add>@pytest.mark.models
<add>@pytest.mark.parametrize('text,i', [("Jane's got a new car", 1),
<add> ("Jane thinks that's a nice car", 3)])
<add>def test_issue401(EN, text, i):
<add> """Text that 's in contractions is not lemmatized as '."""
<add> tokens = EN(text)
<add> assert tokens[i].lemma_ != "'" | 1 |
Text | Text | add information on hot module replacement | 4d2c268cd608bd3e9c72e981ecfa748c011324f0 | <ide><path>guide/english/react/hot-module-replacement/index.md
<add>---
<add>title: Hot Module Replacement
<add>---
<add>
<add># Hot Module Replacement
<add>Hot Module Replacement (HMR) will improve your experience while working within React. This helpful little tool allows you to reload the page you are working on without refreshing the page.
<add>
<add>### Why is this important?
<add>Imagine you are working on a dialog box that has five steps and you happen to be debugging step five. Without HMR, everytime you update the code and check your changes the page will refresh and you will need to go through steps 1-4 again. With HMR, the page loads the changes but the page does not refresh and you can continue debugging while on step five.
<add>
<add>To add HMR to your application you must do the following to your `src/index.js` file.
<add>```javascript
<add>import React from 'react';
<add>import ReactDOM from 'react-dom';
<add>import App from './App';
<add>import './index.css';
<add>
<add>ReactDOM.render(
<add> <App />
<add> document.getElementById('root')
<add>);
<add>
<add>if(module.hot) {
<add> module.hot.accept();
<add>}
<add>```
<add>### More Information
<add>[Hot Reloading with Time Travel (Video)](https://www.youtube.com/watch?v=xsSnOQynTHs) | 1 |
Javascript | Javascript | use semicolon for clarity | 04d07bf52575295f9cef1f752c8ae5cbc36cc5ca | <ide><path>test/parallel/test-child-process-double-pipe.js
<ide> const spawn = require('child_process').spawn;
<ide> let grep, sed, echo;
<ide>
<ide> if (common.isWindows) {
<del> grep = spawn('grep', ['--binary', 'o']),
<del> sed = spawn('sed', ['--binary', 's/o/O/']),
<add> grep = spawn('grep', ['--binary', 'o']);
<add> sed = spawn('sed', ['--binary', 's/o/O/']);
<ide> echo = spawn('cmd.exe',
<ide> ['/c', 'echo', 'hello&&', 'echo',
<ide> 'node&&', 'echo', 'and&&', 'echo', 'world']);
<ide> } else {
<del> grep = spawn('grep', ['o']),
<del> sed = spawn('sed', ['s/o/O/']),
<add> grep = spawn('grep', ['o']);
<add> sed = spawn('sed', ['s/o/O/']);
<ide> echo = spawn('echo', ['hello\nnode\nand\nworld\n']);
<ide> }
<ide>
<ide><path>test/parallel/test-tls-cert-chains-concat.js
<ide> connect({
<ide> const peer = pair.client.conn.getPeerCertificate();
<ide> debug('peer:\n', peer);
<ide> assert.strictEqual(peer.subject.emailAddress, 'adam.lippai@tresorit.com');
<del> assert.strictEqual(peer.subject.CN, 'Ádám Lippai'),
<add> assert.strictEqual(peer.subject.CN, 'Ádám Lippai');
<ide> assert.strictEqual(peer.issuer.CN, 'ca3');
<ide> assert.strictEqual(peer.serialNumber, 'D0082F458B6EFBE8');
<ide> | 2 |
Ruby | Ruby | remove array subclassing | 429caf69a9bc3f86e510933d45c8d0cad6db7d10 | <ide><path>Library/Homebrew/compilers.rb
<del>class Compilers < Array
<del> def include? cc
<add>class Compilers
<add> include Enumerable
<add>
<add> def initialize(*args)
<add> @compilers = Array.new(*args)
<add> end
<add>
<add> def each(*args, &block)
<add> @compilers.each(*args, &block)
<add> end
<add>
<add> def include?(cc)
<ide> cc = cc.name if cc.is_a? Compiler
<del> self.any? { |c| c.name == cc }
<add> @compilers.any? { |c| c.name == cc }
<add> end
<add>
<add> def <<(o)
<add> @compilers << o
<add> self
<ide> end
<ide> end
<ide>
<ide>
<del>class CompilerFailures < Array
<del> def include? cc
<add>class CompilerFailures
<add> include Enumerable
<add>
<add> def initialize(*args)
<add> @failures = Array.new(*args)
<add> end
<add>
<add> def each(*args, &block)
<add> @failures.each(*args, &block)
<add> end
<add>
<add> def include?(cc)
<ide> cc = Compiler.new(cc) unless cc.is_a? Compiler
<del> self.any? { |failure| failure.compiler == cc.name }
<add> @failures.any? { |failure| failure.compiler == cc.name }
<ide> end
<ide>
<del> def <<(failure)
<del> super(failure) unless self.include? failure.compiler
<add> def <<(o)
<add> @failures << o unless include? o.compiler
<add> self
<ide> end
<ide> end
<ide>
<ide> def select_compiler
<ide> # @compilers is our list of available compilers. If @f declares a
<ide> # failure with compiler foo, then we remove foo from the list if
<ide> # the failing build is >= the currently installed version of foo.
<del> @compilers.reject! do |cc|
<add> @compilers = @compilers.reject do |cc|
<ide> failure = @f.fails_with? cc
<ide> next unless failure
<ide> failure.build >= cc.build
<ide><path>Library/Homebrew/dependencies.rb
<ide> def parse_symbol_spec spec, tag
<ide>
<ide> end
<ide>
<add>class Dependencies
<add> include Enumerable
<add>
<add> def initialize(*args)
<add> @deps = Array.new(*args)
<add> end
<add>
<add> def each(*args, &block)
<add> @deps.each(*args, &block)
<add> end
<ide>
<del># A list of formula dependencies.
<del>class Dependencies < Array
<ide> def <<(o)
<del> super(o) unless include? o
<add> @deps << o unless @deps.include? o
<add> self
<add> end
<add>
<add> def empty?
<add> @deps.empty?
<add> end
<add>
<add> def *(arg)
<add> @deps * arg
<ide> end
<ide> end
<ide>
<ide><path>Library/Homebrew/test/test_dependencies.rb
<ide> def test_dependency_tags
<ide> def test_no_duplicate_dependencies
<ide> @d.add 'foo'
<ide> @d.add 'foo' => :build
<del> assert_equal 1, @d.deps.length
<add> assert_equal 1, @d.deps.count
<ide> assert_empty @d.find_dependency('foo').tags
<ide> end
<ide> end | 3 |
Ruby | Ruby | remove collection references from the superclass | 974ddb035c6fdba92883309b25fdd25befd74bd0 | <ide><path>actionview/lib/action_view/renderer/partial_renderer.rb
<ide> def initialize(*)
<ide> end
<ide>
<ide> def render(context, options, block)
<del> as = as_variable(options)
<del> setup(context, options, as)
<add> @as = as_variable(options)
<add> setup(context, options, @as)
<ide>
<ide> if @path
<del> template = find_template(@path, template_keys(@path, as, @collection))
<add> template = find_template(@path, template_keys(@path, @as))
<ide> else
<ide> if options[:cached]
<ide> raise NotImplementedError, "render caching requires a template. Please specify a partial when rendering"
<ide> def render(context, options, block)
<ide> end
<ide>
<ide> if !block && (layout = @options[:layout])
<del> layout = find_template(layout.to_s, template_keys(@path, as, @collection))
<add> layout = find_template(layout.to_s, template_keys(@path, @as))
<ide> end
<ide>
<ide> render_partial(context, template, layout, block)
<ide> end
<ide>
<ide> private
<del> def template_keys(path, as, collection)
<add> def template_keys(path, as)
<ide> if @has_object
<del> @locals.keys + retrieve_variable(path, as, collection)
<add> @locals.keys + retrieve_variable(path, as)
<ide> else
<ide> @locals.keys
<ide> end
<ide> def merge_prefix_into_object_path(prefix, object_path)
<ide> end
<ide> end
<ide>
<del> def retrieve_variable(path, as, collection)
<add> def retrieve_variable(path, as)
<ide> variable = as || begin
<ide> base = path[-1] == "/" ? "" : File.basename(path)
<ide> raise_invalid_identifier(path) unless base =~ /\A_?(.*?)(?:\.\w+)*\z/
<ide> $1.to_sym
<ide> end
<del> if collection
<del> variable_counter = :"#{variable}_counter"
<del> variable_iteration = :"#{variable}_iteration"
<del> end
<del> [variable, variable_counter, variable_iteration]
<add> [variable]
<ide> end
<ide>
<ide> IDENTIFIER_ERROR_MESSAGE = "The partial name (%s) is not a valid Ruby identifier; " \
<ide> def render_collection_with_partial(collection, partial, context, options, block)
<ide> raise NotImplementedError, "render caching requires a template. Please specify a partial when rendering"
<ide> end
<ide>
<del> template = find_template(partial, template_keys(partial, as, collection)) if partial
<add> template = find_template(partial, template_keys(partial, as)) if partial
<ide>
<ide> if !block && (layout = options[:layout])
<del> layout = find_template(layout.to_s, template_keys(partial, as, collection))
<add> layout = find_template(layout.to_s, template_keys(partial, as))
<ide> end
<ide>
<ide> render_collection(context, template, layout)
<ide> def render_collection_derive_partial(collection, context, options, block)
<ide> end
<ide>
<ide> private
<add> def retrieve_variable(path, as)
<add> vars = super
<add> variable = vars.first
<add> vars << :"#{variable}_counter"
<add> vars << :"#{variable}_iteration"
<add> vars
<add> end
<add>
<ide> def build_collection_iterator(collection, path, as, context)
<ide> if path
<del> SameCollectionIterator.new(collection, path, retrieve_variable(path, as, collection))
<add> SameCollectionIterator.new(collection, path, retrieve_variable(path, as))
<ide> else
<ide> paths = collection.map { |o| partial_path(o, context) }
<del> paths.map! { |path| retrieve_variable(path, as, collection).unshift(path) }
<add> paths.map! { |path| retrieve_variable(path, as).unshift(path) }
<ide> @path = nil
<ide> MixedCollectionIterator.new(collection, paths)
<ide> end | 1 |
Ruby | Ruby | add a test for default_url_options in am | 7fe4ca3253e902c67d4765eeece285ffc49f3d89 | <ide><path>railties/test/application/initializers/frameworks_test.rb
<ide> def setup
<ide> ActionMailer::Base.view_paths.include?(File.expand_path("app/views", app_path))
<ide> end
<ide>
<add> test "allows me to configure default url options for ActionMailer" do
<add> app_file "config/environments/development.rb", <<-RUBY
<add> Rails::Application.configure do
<add> config.action_mailer.default_url_options = { :host => "test.rails" }
<add> end
<add> RUBY
<add>
<add> require "#{app_path}/config/environment"
<add> assert "test.rails", ActionMailer::Base.default_url_options[:host]
<add> end
<add>
<ide> # AS
<ide> test "if there's no config.active_support.bare, all of ActiveSupport is required" do
<ide> use_frameworks [] | 1 |
Text | Text | update the redux flow.md | ae1a2e8ae22558c2dd566a561185f33aecede7c9 | <ide><path>docs/Basics/The Redux Flow.md
<ide> Redux implements an architecture with unidirectional data flow. What does this m
<ide>
<ide> Here is how any change in a Redux app happens:
<ide>
<del>1. You call `store.dispatch(action)`. An `action` is just a plain object describing “what happened”. For example, `{ type: 'LIKE_ARTICLE', articleId: 42 }`, `{ type: 'USER_FETCHED', response: ... }`, or `{ type: 'ADD_TODO', text: 'Use Redux' }`. Actions are like newspapers, reporting anything that may result in changing the state of your app. You can call `store.dispatch(action)` from your components, XHR callbacks, scheduled intervals, or anywhere else.
<del>2. The Redux store will call the [reducer function](../Reference/Glossary.md#reducer) you gave it. It will pass the current state tree as the first argument, and the action as the second one. For example, your root reducer may receive `{ todos: ['Read docs'] }` as `state` and `{ type: 'ADD_TODO', text: 'Understand the flow' }` as `action`.
<del>3. Now, how you structure your root reducer function is completely up to you. However, Redux ships with `combineReducers` helper which is useful for “splitting” the root reducer into several separate reducer functions that each manage a slice of the state tree. Let’s imagine your passed `combineReducers({ todos: todos })` as your root reducer, where `todos` is a reducer function you wrote, and that just manages the `todos` array. The way `combineReducers` works is that, given `{ a: someFunction, b: someOtherFunction }` as its arguments, it will call `someFunction` with `state.a`, `someOtherFunction` with `state.b`, and combine their results into the new root `state`. In our example, it will pass `state.todos` as `state` to your `todos` reducer, and assemble the next root state with the same `{ todos: Array }` shape. The `todos` reducer might get `['Read docs']` as the current state, and return `['Read docs', 'Understand the flow']` as the next state. The reducer returned by `combineReducers` will then return `{ todos: ['Read docs', 'Understand the flow'] }` as the next root state. While `combineReducers` is a handy helper, you don’t have to use it, and you can write your own root reducer just fine.
<del>4. The Redux store saves the next state tree returned by the reducer. This is now the next state of your app! It will invoke every listener registered with `store.subscribe(listener)`. The listeners may call `store.getState()` to read the current state. This is where you can update your UI using the new state. If you use bindings like [React Redux](https://github.com/gaearon/react-redux), that’s exactly where they schedule a `component.setState()` call.
<add>1. You call `store.dispatch(action)`. An `action` is just a plain object describing “what happened”. For example, `{ type: 'LIKE_ARTICLE', articleId: 42 }`, `{ type: 'USER_FETCHED', response: ... }`, or `{ type: 'ADD_TODO', text: 'Use Redux' }`. **Actions are like newspapers, reporting anything that may result in changing the state of your app.** You can call `store.dispatch(action)` from your components, XHR callbacks, scheduled intervals, or anywhere else.
<add>2. The Redux store will call the [reducer function](../Reference/Glossary.md#reducer) you gave it. It will **pass the current state tree as the first argument, and the action as the second one.** For example, your root reducer may receive `{ todos: ['Read docs'] }` as `state` and `{ type: 'ADD_TODO', text: 'Understand the flow' }` as `action`.
<add>3. Now, how you structure your root reducer function is completely up to you. However, **Redux ships with `combineReducers` helper which is useful for “splitting” the root reducer into several separate reducer functions that each manage a slice of the state tree.** Let’s imagine your passed `combineReducers({ todos: todos })` as your root reducer, where `todos` is a reducer function you wrote, and that just manages the `todos` array. The way `combineReducers` works is that, given `{ a: someFunction, b: someOtherFunction }` as its arguments, it will call `someFunction` with `state.a`, `someOtherFunction` with `state.b`, and combine their results into the new root `state`. In our example, it will pass `state.todos` as `state` to your `todos` reducer, and assemble the next root state with the same `{ todos: Array }` shape. The `todos` reducer might get `['Read docs']` as the current state, and return `['Read docs', 'Understand the flow']` as the next state. The reducer returned by `combineReducers` will then return `{ todos: ['Read docs', 'Understand the flow'] }` as the next root state. While `combineReducers` is a handy helper, you don’t have to use it, and you can write your own root reducer just fine.
<add>4. **The Redux store saves the next state tree returned by the reducer.** This is now the next state of your app! It will invoke every listener registered with `store.subscribe(listener)`. The listeners may call `store.getState()` to read the current state. This is where you can update your UI using the new state. If you use bindings like [React Redux](https://github.com/gaearon/react-redux), that’s exactly where they schedule a `component.setState()` call.
<ide>
<ide> This is all there is to it.
<ide>
<del>One important addition is that, if you use any middleware on the store, it wraps the store’s `dispatch` function and may add support for dispatching promises, [thunks](https://github.com/gaearon/redux-thunk) or other potentially asynchronous [intermediate actions](../Reference/Glossary.md#intermediate-action). In the end, after being processed with middleware, they all become “raw” plain objects, which your reducer function will receive.
<add>One important addition is that, **if you use any middleware on the store, it wraps the store’s `dispatch` function** and may add support for dispatching promises, [thunks](https://github.com/gaearon/redux-thunk) or other potentially asynchronous [intermediate actions](../Reference/Glossary.md#intermediate-action). In the end, after being processed with middleware, they all become “raw” plain objects, which your reducer function will receive.
<ide>
<ide> --------------------------
<ide> Next: [Userland and Core](Userland and Core.md) | 1 |
Python | Python | add creating_job_id to dagrun table | 112f7d716900556a7a41e3a8eea197f6bcfe9ed9 | <ide><path>airflow/jobs/backfill_job.py
<ide> def _get_dag_run(self, run_date: datetime, dag: DAG, session: Session = None):
<ide> session=session,
<ide> conf=self.conf,
<ide> run_type=DagRunType.BACKFILL_JOB,
<add> creating_job_id=self.id,
<ide> )
<ide>
<ide> # set required transient field
<ide><path>airflow/jobs/base_job.py
<ide> from airflow.configuration import conf
<ide> from airflow.exceptions import AirflowException
<ide> from airflow.executors.executor_loader import ExecutorLoader
<add>from airflow.models import DagRun
<ide> from airflow.models.base import ID_LEN, Base
<ide> from airflow.models.taskinstance import TaskInstance
<ide> from airflow.stats import Stats
<ide> class BaseJob(Base, LoggingMixin):
<ide> foreign_keys=id,
<ide> backref=backref('queued_by_job', uselist=False),
<ide> )
<add>
<add> dag_runs = relationship(
<add> DagRun,
<add> primaryjoin=id == DagRun.creating_job_id,
<add> foreign_keys=id,
<add> backref=backref('creating_job'),
<add> )
<add>
<ide> """
<ide> TaskInstances which have been enqueued by this Job.
<ide>
<ide><path>airflow/jobs/scheduler_job.py
<ide> def _create_dag_runs(self, dag_models: Iterable[DagModel], session: Session) ->
<ide> state=State.RUNNING,
<ide> external_trigger=False,
<ide> session=session,
<del> dag_hash=dag_hash
<add> dag_hash=dag_hash,
<add> creating_job_id=self.id,
<ide> )
<ide>
<ide> self._update_dag_next_dagruns(dag_models, session)
<ide><path>airflow/migrations/versions/364159666cbd_add_job_id_to_dagrun_table.py
<add>#
<add># Licensed to the Apache Software Foundation (ASF) under one
<add># or more contributor license agreements. See the NOTICE file
<add># distributed with this work for additional information
<add># regarding copyright ownership. The ASF licenses this file
<add># to you under the Apache License, Version 2.0 (the
<add># "License"); you may not use this file except in compliance
<add># with the License. You may obtain a copy of the License at
<add>#
<add># http://www.apache.org/licenses/LICENSE-2.0
<add>#
<add># Unless required by applicable law or agreed to in writing,
<add># software distributed under the License is distributed on an
<add># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
<add># KIND, either express or implied. See the License for the
<add># specific language governing permissions and limitations
<add># under the License.
<add>
<add>"""Add creating_job_id to DagRun table
<add>
<add>Revision ID: 364159666cbd
<add>Revises: 849da589634d
<add>Create Date: 2020-10-10 09:08:07.332456
<add>
<add>"""
<add>
<add>import sqlalchemy as sa
<add>from alembic import op
<add>
<add># revision identifiers, used by Alembic.
<add>revision = '364159666cbd'
<add>down_revision = '849da589634d'
<add>branch_labels = None
<add>depends_on = None
<add>
<add>
<add>def upgrade():
<add> """Apply Add creating_job_id to DagRun table"""
<add> op.add_column('dag_run', sa.Column('creating_job_id', sa.Integer))
<add>
<add>
<add>def downgrade():
<add> """Unapply Add job_id to DagRun table"""
<add> op.drop_column('dag_run', 'creating_job_id')
<ide><path>airflow/models/dag.py
<ide> def create_dagrun(
<ide> conf=None,
<ide> run_type=None,
<ide> session=None,
<del> dag_hash=None
<add> dag_hash=None,
<add> creating_job_id=None,
<ide> ):
<ide> """
<ide> Creates a dag run from this dag including the tasks associated with this dag.
<ide> def create_dagrun(
<ide> :type external_trigger: bool
<ide> :param conf: Dict containing configuration/parameters to pass to the DAG
<ide> :type conf: dict
<add> :param creating_job_id: id of the job creating this DagRun
<add> :type creating_job_id: int
<ide> :param session: database session
<ide> :type session: sqlalchemy.orm.session.Session
<ide> :param dag_hash: Hash of Serialized DAG
<ide> def create_dagrun(
<ide> conf=conf,
<ide> state=state,
<ide> run_type=run_type.value,
<del> dag_hash=dag_hash
<add> dag_hash=dag_hash,
<add> creating_job_id=creating_job_id
<ide> )
<ide> session.add(run)
<ide> session.flush()
<ide><path>airflow/models/dagrun.py
<ide> class DagRun(Base, LoggingMixin):
<ide> end_date = Column(UtcDateTime)
<ide> _state = Column('state', String(50), default=State.RUNNING)
<ide> run_id = Column(String(ID_LEN))
<add> creating_job_id = Column(Integer)
<ide> external_trigger = Column(Boolean, default=True)
<ide> run_type = Column(String(50), nullable=False)
<ide> conf = Column(PickleType)
<ide> def __init__(
<ide> state: Optional[str] = None,
<ide> run_type: Optional[str] = None,
<ide> dag_hash: Optional[str] = None,
<add> creating_job_id: Optional[int] = None,
<ide> ):
<ide> self.dag_id = dag_id
<ide> self.run_id = run_id
<ide> def __init__(
<ide> self.state = state
<ide> self.run_type = run_type
<ide> self.dag_hash = dag_hash
<add> self.creating_job_id = creating_job_id
<ide> super().__init__()
<ide>
<ide> def __repr__(self):
<ide><path>tests/jobs/test_backfill_job.py
<ide> def test_reset_orphaned_tasks_specified_dagrun(self):
<ide> ti2.refresh_from_db(session=session)
<ide> self.assertEqual(State.SCHEDULED, ti1.state)
<ide> self.assertEqual(State.NONE, ti2.state)
<add>
<add> def test_job_id_is_assigned_to_dag_run(self):
<add> dag_id = 'test_job_id_is_assigned_to_dag_run'
<add> dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, schedule_interval='@daily')
<add> DummyOperator(task_id="dummy_task", dag=dag)
<add>
<add> job = BackfillJob(
<add> dag=dag,
<add> executor=MockExecutor(),
<add> start_date=datetime.datetime.now() - datetime.timedelta(days=1)
<add> )
<add> job.run()
<add> dr: DagRun = dag.get_last_dagrun()
<add> assert dr.creating_job_id == job.id
<ide><path>tests/jobs/test_scheduler_job.py
<ide> def test_send_sla_callbacks_to_processor_sla_with_task_slas(self):
<ide> full_filepath=dag.fileloc, dag_id=dag_id
<ide> )
<ide>
<add> def test_scheduler_sets_job_id_on_dag_run(self):
<add> dag = DAG(
<add> dag_id='test_scheduler_sets_job_id_on_dag_run',
<add> start_date=DEFAULT_DATE)
<add>
<add> DummyOperator(
<add> task_id='dummy',
<add> dag=dag,
<add> )
<add>
<add> dagbag = DagBag(
<add> dag_folder=os.path.join(settings.DAGS_FOLDER, "no_dags.py"),
<add> include_examples=False,
<add> read_dags_from_db=True
<add> )
<add> dagbag.bag_dag(dag=dag, root_dag=dag)
<add> dagbag.sync_to_db()
<add> dag_model = DagModel.get_dagmodel(dag.dag_id)
<add>
<add> scheduler = SchedulerJob(executor=self.null_exec)
<add> scheduler.processor_agent = mock.MagicMock()
<add>
<add> with create_session() as session:
<add> scheduler._create_dag_runs([dag_model], session)
<add>
<add> assert dag.get_last_dagrun().creating_job_id == scheduler.id
<add>
<ide>
<ide> @pytest.mark.xfail(reason="Work out where this goes")
<ide> def test_task_with_upstream_skip_process_task_instances():
<ide><path>tests/models/test_dag.py
<ide> def test_create_dagrun_run_type_is_obtained_from_run_id(self):
<ide> dr = dag.create_dagrun(run_id="custom_is_set_to_manual", state=State.NONE)
<ide> assert dr.run_type == DagRunType.MANUAL.value
<ide>
<add> def test_create_dagrun_job_id_is_set(self):
<add> job_id = 42
<add> dag = DAG(dag_id="test_create_dagrun_job_id_is_set")
<add> dr = dag.create_dagrun(
<add> run_id="test_create_dagrun_job_id_is_set", state=State.NONE, creating_job_id=job_id
<add> )
<add> assert dr.creating_job_id == job_id
<add>
<ide> @parameterized.expand(
<ide> [
<ide> (State.NONE,), | 9 |
Python | Python | remove stray comments | d96c6359a488b2ccdd0eb61b4ba04f3cc5ad45a7 | <ide><path>numpy/random/tests/test_generator_mt19937.py
<ide> def test_multivariate_normal(self, method):
<ide> assert_array_almost_equal(actual, desired, decimal=15)
<ide>
<ide> # Check for default size, was raising deprecation warning
<del> #random = Generator(MT19937(self.seed))
<ide> actual = random.multivariate_normal(mean, cov, method=method)
<del> # the factor matrix is the same for all methods
<del> #random = Generator(MT19937(self.seed))
<del> #desired = np.array([-1.747478062846581, 11.25613495182354])
<ide> desired = np.array([0.233278563284287, 9.424140804347195])
<ide> assert_array_almost_equal(actual, desired, decimal=15)
<ide> # Check that non symmetric covariance input raises exception when | 1 |
Text | Text | add checkbox to verify changes in a browser | 72fcb6c65919766c22793e56792b3b3bb5d1ee4a | <ide><path>.github/PULL_REQUEST_TEMPLATE.md
<ide> Please list the specific changes involved in this pull request.
<ide> ## Requirements Checklist
<ide> - [ ] Feature implemented / Bug fixed
<ide> - [ ] If necessary, more likely in a feature request than a bug fix
<add> - [ ] Change has been verified in an actual browser (Chome, Firefox, IE)
<ide> - [ ] Unit Tests updated or fixed
<ide> - [ ] Docs/guides updated
<ide> - [ ] Example created ([starter template on JSBin](http://jsbin.com/axedog/edit?html,output)) | 1 |
Ruby | Ruby | add yosemite to mac versions | 6bbe82062ec862d3edc22cade7c401688ae68837 | <ide><path>Library/Homebrew/os/mac/version.rb
<ide> module OS
<ide> module Mac
<ide> class Version < ::Version
<ide> SYMBOLS = {
<add> :yosemite => '10.10',
<ide> :mavericks => '10.9',
<ide> :mountain_lion => '10.8',
<ide> :lion => '10.7', | 1 |
PHP | PHP | implement translation behavior default locale | 7472b01428c67ec873d096c992a02a4d42991b2a | <ide><path>src/Model/Behavior/TranslateBehavior.php
<ide> use ArrayObject;
<ide> use Cake\Collection\Collection;
<ide> use Cake\Event\Event;
<add>use Cake\I18n\I18n;
<ide> use Cake\ORM\Behavior;
<ide> use Cake\ORM\Entity;
<ide> use Cake\ORM\Query;
<ide> class TranslateBehavior extends Behavior {
<ide> 'implementedFinders' => ['translations' => 'findTranslations'],
<ide> 'implementedMethods' => ['locale' => 'locale'],
<ide> 'fields' => [],
<del> 'translationTable' => 'i18n'
<add> 'translationTable' => 'i18n',
<add> 'defaultLocale' => 'en_US'
<ide> ];
<ide>
<ide> /**
<ide> public function setupFieldAssociations($fields, $table) {
<ide> public function beforeFind(Event $event, $query) {
<ide> $locale = $this->locale();
<ide>
<del> if (empty($locale)) {
<add> if ($locale === $this->config('defaultLocale')) {
<ide> return;
<ide> }
<ide>
<ide> public function beforeSave(Event $event, Entity $entity, ArrayObject $options) {
<ide>
<ide> $this->_bundleTranslatedFields($entity);
<ide>
<del> if (!$locale) {
<add> if ($locale === $this->config('defaultLocale')) {
<ide> return;
<ide> }
<ide>
<ide> public function afterSave(Event $event, Entity $entity) {
<ide> */
<ide> public function locale($locale = null) {
<ide> if ($locale === null) {
<del> return $this->_locale;
<add> return $this->_locale ?: I18n::defaultLocale();
<ide> }
<ide> return $this->_locale = (string)$locale;
<ide> }
<ide> public function findTranslations(Query $query, array $options) {
<ide> protected function _rowMapper($results, $locale) {
<ide> return $results->map(function($row) use ($locale) {
<ide> $options = ['setter' => false, 'guard' => false];
<add> $hydrated = !is_array($row);
<ide>
<ide> foreach ($this->_config['fields'] as $field) {
<add>
<ide> $name = $field . '_translation';
<del> $translation = $row->get($name);
<add> $translation = isset($row[$name]) ? $row[$name] : null;
<ide>
<ide> if ($translation === null || $translation === false) {
<del> $row->unsetProperty($name);
<add> unset($row[$name]);
<ide> continue;
<ide> }
<ide>
<del> $content = $translation->get('content');
<add> $content = isset($translation['content']) ? $translation['content'] : null;
<ide> if ($content !== null) {
<del> $row->set($field, $content, $options);
<add> $row[$field] = $content;
<ide> }
<ide>
<ide> unset($row[$name]);
<ide> }
<ide>
<del> $row->set('_locale', $locale, $options);
<del> $row->clean();
<add> $row['_locale'] = $locale;
<add> if ($hydrated) {
<add> $row->clean();
<add> }
<add>
<ide> return $row;
<ide> });
<ide> }
<ide><path>tests/TestCase/Model/Behavior/TranslateBehaviorTest.php
<ide>
<ide> use Cake\Collection\Collection;
<ide> use Cake\Event\Event;
<add>use Cake\I18n\I18n;
<ide> use Cake\Model\Behavior\TranslateBehavior;
<ide> use Cake\ORM\Entity;
<ide> use Cake\ORM\TableRegistry;
<ide> class TranslateBehaviorTest extends TestCase {
<ide> ];
<ide>
<ide> public function tearDown() {
<add> I18n::defaultLocale('en_US');
<ide> parent::tearDown();
<ide> TableRegistry::clear();
<ide> }
<ide> public function testFindSingleLocale() {
<ide> $this->assertSame($expected, $results);
<ide> }
<ide>
<add>/**
<add> * Tests that fields from a translated model use the I18n class locale
<add> * and that it propogates to associated models
<add> *
<add> * @return void
<add> */
<add> public function testFindSingleLocaleAssociatedEnv() {
<add> I18n::defaultLocale('eng');
<add>
<add> $table = TableRegistry::get('Articles');
<add> $table->addBehavior('Translate', ['fields' => ['title', 'body']]);
<add>
<add> $table->hasMany('Comments');
<add> $table->Comments->addBehavior('Translate', ['fields' => ['comment']]);
<add>
<add> $results = $table->find()
<add> ->select(['id', 'title', 'body'])
<add> ->contain(['Comments' => ['fields' => ['article_id', 'comment']]])
<add> ->hydrate(false)
<add> ->toArray();
<add>
<add> $expected = [
<add> [
<add> 'id' => 1,
<add> 'title' => 'Title #1',
<add> 'body' => 'Content #1',
<add> 'comments' => [
<add> ['article_id' => 1, 'comment' => 'Comment #1', '_locale' => 'eng'],
<add> ['article_id' => 1, 'comment' => 'Comment #2', '_locale' => 'eng'],
<add> ['article_id' => 1, 'comment' => 'Comment #3', '_locale' => 'eng'],
<add> ['article_id' => 1, 'comment' => 'Comment #4', '_locale' => 'eng']
<add> ],
<add> '_locale' => 'eng'
<add> ],
<add> [
<add> 'id' => 2,
<add> 'title' => 'Title #2',
<add> 'body' => 'Content #2',
<add> 'comments' => [
<add> ['article_id' => 2, 'comment' => 'First Comment for Second Article', '_locale' => 'eng'],
<add> ['article_id' => 2, 'comment' => 'Second Comment for Second Article', '_locale' => 'eng']
<add> ],
<add> '_locale' => 'eng'
<add> ],
<add> [
<add> 'id' => 3,
<add> 'title' => 'Title #3',
<add> 'body' => 'Content #3',
<add> 'comments' => [],
<add> '_locale' => 'eng'
<add> ]
<add> ];
<add> $this->assertSame($expected, $results);
<add>
<add> I18n::defaultLocale('spa');
<add>
<add> $results = $table->find()
<add> ->select(['id', 'title', 'body'])
<add> ->contain(['Comments' => ['fields' => ['article_id', 'comment']]])
<add> ->hydrate(false)
<add> ->toArray();
<add>
<add> $expected = [
<add> [
<add> 'id' => 1,
<add> 'title' => 'First Article',
<add> 'body' => 'First Article Body',
<add> 'comments' => [
<add> ['article_id' => 1, 'comment' => 'First Comment for First Article', '_locale' => 'spa'],
<add> ['article_id' => 1, 'comment' => 'Second Comment for First Article', '_locale' => 'spa'],
<add> ['article_id' => 1, 'comment' => 'Third Comment for First Article', '_locale' => 'spa'],
<add> ['article_id' => 1, 'comment' => 'Comentario #4', '_locale' => 'spa']
<add> ],
<add> '_locale' => 'spa'
<add> ],
<add> [
<add> 'id' => 2,
<add> 'title' => 'Second Article',
<add> 'body' => 'Second Article Body',
<add> 'comments' => [
<add> ['article_id' => 2, 'comment' => 'First Comment for Second Article', '_locale' => 'spa'],
<add> ['article_id' => 2, 'comment' => 'Second Comment for Second Article', '_locale' => 'spa']
<add> ],
<add> '_locale' => 'spa'
<add> ],
<add> [
<add> 'id' => 3,
<add> 'title' => 'Third Article',
<add> 'body' => 'Third Article Body',
<add> 'comments' => [],
<add> '_locale' => 'spa'
<add> ]
<add> ];
<add> $this->assertSame($expected, $results);
<add>
<add> }
<add>
<ide> /**
<ide> * Tests that fields from a translated model are not overriden if translation
<ide> * is null | 2 |
Java | Java | add viewmanager param to @reactmodulelist | c27cc9c1ac9c2bda9b88a8af66aa13efd8ca9a0a | <ide><path>ReactAndroid/src/main/java/com/facebook/react/CoreModulesPackage.java
<ide> * require special integration with other framework parts (e.g. with the list of packages to load
<ide> * view managers from).
<ide> */
<del>@ReactModuleList({
<del> AndroidInfoModule.class,
<del> AnimationsDebugModule.class,
<del> DeviceEventManagerModule.class,
<del> ExceptionsManagerModule.class,
<del> HeadlessJsTaskSupportModule.class,
<del> SourceCodeModule.class,
<del> Timing.class,
<del> UIManagerModule.class,
<del> // Debug only
<del> DebugComponentOwnershipModule.class,
<del> JSCHeapCapture.class,
<del> JSCSamplingProfiler.class,
<del>})
<add>@ReactModuleList(
<add> javaModules = {
<add> AndroidInfoModule.class,
<add> AnimationsDebugModule.class,
<add> DeviceEventManagerModule.class,
<add> ExceptionsManagerModule.class,
<add> HeadlessJsTaskSupportModule.class,
<add> SourceCodeModule.class,
<add> Timing.class,
<add> UIManagerModule.class,
<add> // Debug only
<add> DebugComponentOwnershipModule.class,
<add> JSCHeapCapture.class,
<add> JSCSamplingProfiler.class,
<add> }
<add>)
<ide> /* package */ class CoreModulesPackage extends LazyReactPackage {
<ide>
<ide> private final ReactInstanceManager mReactInstanceManager;
<ide><path>ReactAndroid/src/main/java/com/facebook/react/module/annotations/ReactModuleList.java
<ide> public @interface ReactModuleList {
<ide>
<ide> /**
<del> * The native modules in this list should be annotated with {@link ReactModule}.
<del> * @return List of native modules.
<add> * The Java modules in this list should be annotated with {@link ReactModule}.
<add> * @return List of Java modules in the package.
<ide> */
<del> Class<? extends NativeModule>[] value();
<add> Class<? extends NativeModule>[] javaModules();
<add>
<add> /**
<add> * The View Managers in this list should be annotated with {@link ReactModule}.
<add> * @return List of view manager in the package.
<add> */
<add> Class<? extends NativeModule>[] viewManagers() default {};
<ide> }
<ide><path>ReactAndroid/src/main/java/com/facebook/react/module/processing/ReactModuleSpecProcessor.java
<ide> public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment
<ide> ReactModuleList reactModuleList = typeElement.getAnnotation(ReactModuleList.class);
<ide> List<String> nativeModules = new ArrayList<>();
<ide> try {
<del> reactModuleList.value(); // throws MirroredTypesException
<add> reactModuleList.javaModules(); // throws MirroredTypesException
<ide> } catch (MirroredTypesException mirroredTypesException) {
<ide> List<? extends TypeMirror> typeMirrors = mirroredTypesException.getTypeMirrors();
<ide> for (TypeMirror typeMirror : typeMirrors) { | 3 |
Javascript | Javascript | remove a unnecessary concatenation | c9802801598a138df4bf58767c151e83ce328856 | <ide><path>lib/repl.js
<ide> function REPLServer(prompt, stream, eval_, useGlobal, ignoreUndefined) {
<ide> // Check to see if a REPL keyword was used. If it returns true,
<ide> // display next prompt and return.
<ide> if (cmd && cmd.charAt(0) === '.' && isNaN(parseFloat(cmd))) {
<del> var matches = cmd.match(/^(\.[^\s]+)\s*(.*)$/);
<add> var matches = cmd.match(/^\.([^\s]+)\s*(.*)$/);
<ide> var keyword = matches && matches[1];
<ide> var rest = matches && matches[2];
<ide> if (self.parseREPLKeyword(keyword, rest) === true) {
<ide> REPLServer.prototype.defineCommand = function(keyword, cmd) {
<ide> } else if (!util.isFunction(cmd.action)) {
<ide> throw new Error('bad argument, action must be a function');
<ide> }
<del> this.commands['.' + keyword] = cmd;
<add> this.commands[keyword] = cmd;
<ide> };
<ide>
<ide> REPLServer.prototype.memory = function memory(cmd) { | 1 |
Javascript | Javascript | remove unnecessary require('buffer').buffer | 5e68e168b8e44356f9d97a672836bef718130565 | <ide><path>test/parallel/test-buffer-nopendingdep-map.js
<ide> 'use strict';
<ide>
<ide> const common = require('../common');
<del>const Buffer = require('buffer').Buffer;
<ide>
<ide> process.on('warning', common.mustNotCall('A warning should not be emitted'));
<ide>
<ide><path>test/parallel/test-buffer-pending-deprecation.js
<ide> 'use strict';
<ide>
<ide> const common = require('../common');
<del>const Buffer = require('buffer').Buffer;
<ide>
<ide> const bufferWarning = 'The Buffer() and new Buffer() constructors are not ' +
<ide> 'recommended for use due to security and usability ' +
<ide><path>test/parallel/test-buffer-sharedarraybuffer.js
<ide>
<ide> require('../common');
<ide> const assert = require('assert');
<del>const Buffer = require('buffer').Buffer;
<ide>
<ide> const sab = new SharedArrayBuffer(24);
<ide> const arr1 = new Uint16Array(sab);
<ide><path>test/parallel/test-buffer-slow.js
<ide> const common = require('../common');
<ide> const assert = require('assert');
<ide> const buffer = require('buffer');
<del>const Buffer = buffer.Buffer;
<ide> const SlowBuffer = buffer.SlowBuffer;
<ide>
<ide> const ones = [1, 1, 1, 1];
<ide><path>test/parallel/test-buffer-tojson.js
<ide>
<ide> require('../common');
<ide> const assert = require('assert');
<del>const Buffer = require('buffer').Buffer;
<ide>
<ide> {
<ide> assert.strictEqual(JSON.stringify(Buffer.alloc(0)),
<ide><path>test/parallel/test-buffer-zero-fill.js
<ide>
<ide> require('../common');
<ide> const assert = require('assert');
<del>const Buffer = require('buffer').Buffer;
<ide>
<ide> const buf1 = Buffer(100);
<ide> const buf2 = new Buffer(100); | 6 |
Text | Text | remove unnecessary whitespace | 3ad89fced21d896a70a8bffb5e6e2146ff36135d | <ide><path>guides/source/rails_on_rack.md
<ide> Here's how it loads the middlewares:
<ide> ```ruby
<ide> def middleware
<ide> middlewares = []
<del> middlewares << [Rails::Rack::Debugger] if options[:debugger]
<add> middlewares << [Rails::Rack::Debugger] if options[:debugger]
<ide> middlewares << [::Rack::ContentLength]
<ide> Hash.new(middlewares)
<ide> end | 1 |
Python | Python | add assert_array_max_ulp comparison function | b325f7350facb82a1fc72b4a52e61d90a0101962 | <ide><path>numpy/testing/tests/test_utils.py
<ide> def test_simple(self):
<ide> x = np.array([1e-5, 1, 1000, 10500], dtype=dt)
<ide> assert_array_almost_equal(spacing(x), ref[dt], decimal=dec)
<ide>
<add>class TestULP(unittest.TestCase):
<add> def test_equal(self):
<add> x = np.random.randn(10)
<add> assert_array_max_ulp(x, x, maxulp=0)
<add>
<add> def test_single(self):
<add> # Generate 1 + small deviation, check that adding eps gives a few UNL
<add> x = np.ones(10).astype(np.float32)
<add> x += 0.01 * np.random.randn(10).astype(np.float32)
<add> eps = np.finfo(np.float32).eps
<add> assert_array_max_ulp(x, x+eps, maxulp=20)
<add>
<add> def test_double(self):
<add> # Generate 1 + small deviation, check that adding eps gives a few UNL
<add> x = np.ones(10).astype(np.float32)
<add> x += 0.01 * np.random.randn(10).astype(np.float64)
<add> eps = np.finfo(np.float64).eps
<add> assert_array_max_ulp(x, x+eps, maxulp=200)
<add>
<add> def test_inf(self):
<add> for dt in [np.float32, np.float64]:
<add> inf = np.array([np.inf]).astype(dt)
<add> big = np.array([np.finfo(dt).max])
<add> assert_array_max_ulp(inf, big, maxulp=200)
<add>
<add> def test_nan(self):
<add> # Test that nan is 'far' from small, tiny, inf, max and min
<add> for dt in [np.float32, np.float64]:
<add> if dt == np.float32:
<add> maxulp = 1e6
<add> else:
<add> maxulp = 1e12
<add> inf = np.array([np.inf]).astype(dt)
<add> nan = np.array([np.nan]).astype(dt)
<add> big = np.array([np.finfo(dt).max])
<add> tiny = np.array([np.finfo(dt).tiny])
<add> zero = np.array([np.PZERO]).astype(dt)
<add> nzero = np.array([np.NZERO]).astype(dt)
<add> self.failUnlessRaises(AssertionError,
<add> lambda: assert_array_max_ulp(nan, inf,
<add> maxulp=maxulp))
<add> self.failUnlessRaises(AssertionError,
<add> lambda: assert_array_max_ulp(nan, big,
<add> maxulp=maxulp))
<add> self.failUnlessRaises(AssertionError,
<add> lambda: assert_array_max_ulp(nan, tiny,
<add> maxulp=maxulp))
<add> self.failUnlessRaises(AssertionError,
<add> lambda: assert_array_max_ulp(nan, zero,
<add> maxulp=maxulp))
<add> self.failUnlessRaises(AssertionError,
<add> lambda: assert_array_max_ulp(nan, nzero,
<add> maxulp=maxulp))
<ide> if __name__ == '__main__':
<ide> run_module_suite()
<ide><path>numpy/testing/utils.py
<ide> 'assert_array_almost_equal', 'assert_raises', 'build_err_msg',
<ide> 'decorate_methods', 'jiffies', 'memusage', 'print_assert_equal',
<ide> 'raises', 'rand', 'rundocs', 'runstring', 'verbose', 'measure',
<del> 'assert_', 'spacing', 'assert_array_almost_equal_nulp']
<add> 'assert_', 'spacing', 'assert_array_almost_equal_nulp',
<add> 'assert_array_max_ulp']
<ide>
<ide> verbose = 0
<ide>
<ide> def assert_array_almost_equal_nulp(x, y, nulp=1):
<ide> raise AssertionError("X and Y are not equal to %d ULP "\
<ide> "(max is %d)" % (nulp, max_nulp))
<ide>
<add>def assert_array_max_ulp(a, b, maxulp=1, dtype=None):
<add> """Given two arrays a and b, check that every item differs in at most N
<add> Unit in the Last Place."""
<add> import numpy as np
<add> ret = nulp_diff(a, b, dtype)
<add> if not np.all(ret <= maxulp):
<add> raise AssertionError("Arrays are not almost equal up to %d ULP" % \
<add> maxulp)
<add> return ret
<add>
<ide> def nulp_diff(x, y, dtype=None):
<ide> """For each item in x and y, eeturn the number of representable floating
<ide> points between them. | 2 |
Ruby | Ruby | give better suggestion on git/hg dependency | 40e64263226d4017b2437fbd5fd777c41ab3f063 | <ide><path>Library/Homebrew/cmd/audit.rb
<ide> def audit_deps
<ide> Or if it is indeed a runtime denpendency
<ide> depends_on "#{dep}" => :run
<ide> EOS
<del> when "git", "ruby", "mercurial"
<del> problem "Don't use #{dep} as a dependency. We allow non-Homebrew #{dep} installations."
<add> when "git"
<add> problem "Use `depends_on :git` instead of `depends_on 'git'`"
<add> when "mercurial"
<add> problem "Use `depends_on :hg` instead of `depends_on 'mercurial'`"
<add> when "ruby"
<add> problem "Don't use ruby as a dependency. We allow non-Homebrew ruby installations."
<ide> when 'gfortran'
<ide> problem "Use `depends_on :fortran` instead of `depends_on 'gfortran'`"
<ide> when 'open-mpi', 'mpich2' | 1 |
Go | Go | improve error for getstore() | 79bf46fd79f70edc93b0336db788419912a399fe | <ide><path>libnetwork/error.go
<ide> func (mr ManagerRedirectError) Error() string {
<ide>
<ide> // Maskable denotes the type of this error
<ide> func (mr ManagerRedirectError) Maskable() {}
<add>
<add>// ErrDataStoreNotInitialized is returned if an invalid data scope is passed
<add>// for getting data store
<add>type ErrDataStoreNotInitialized string
<add>
<add>func (dsni ErrDataStoreNotInitialized) Error() string {
<add> return fmt.Sprintf("datastore for scope %q is not initialized", string(dsni))
<add>}
<ide><path>libnetwork/store.go
<ide> func (n *network) getEndpointsFromStore() ([]*endpoint, error) {
<ide> func (c *controller) updateToStore(kvObject datastore.KVObject) error {
<ide> cs := c.getStore(kvObject.DataScope())
<ide> if cs == nil {
<del> return fmt.Errorf("datastore for scope %q is not initialized ", kvObject.DataScope())
<add> return ErrDataStoreNotInitialized(kvObject.DataScope())
<ide> }
<ide>
<ide> if err := cs.PutObjectAtomic(kvObject); err != nil {
<ide> func (c *controller) updateToStore(kvObject datastore.KVObject) error {
<ide> func (c *controller) deleteFromStore(kvObject datastore.KVObject) error {
<ide> cs := c.getStore(kvObject.DataScope())
<ide> if cs == nil {
<del> return fmt.Errorf("datastore for scope %q is not initialized ", kvObject.DataScope())
<add> return ErrDataStoreNotInitialized(kvObject.DataScope())
<ide> }
<ide>
<ide> retry: | 2 |
Text | Text | fix syntax error [ci skip] | 8cdeeb5b26c4e5e2a45bd8500ca5db0e025cb39f | <ide><path>guides/source/caching_with_rails.md
<ide> gem 'connection_pool'
<ide> Next, pass the `:pool_size` and/or `:pool_timeout` options when configuring the cache store:
<ide>
<ide> ```ruby
<del>config.cache_store = :mem_cache_store, "cache.example.com", pool_size: 5, pool_timeout: 5
<add>config.cache_store = :mem_cache_store, "cache.example.com", { pool_size: 5, pool_timeout: 5 }
<ide> ```
<ide>
<ide> * `:pool_size` - This option sets the number of connections per process (defaults to 5). | 1 |
Go | Go | forbid certain paths within docker build add | f712e10cb21d0056593ea23152d347637396c810 | <ide><path>buildfile.go
<ide> func (b *buildFile) addContext(container *Container, orig, dest string) error {
<ide> if strings.HasSuffix(dest, "/") {
<ide> destPath = destPath + "/"
<ide> }
<add> if !strings.HasPrefix(origPath, b.context) {
<add> return fmt.Errorf("Forbidden path: %s", origPath)
<add> }
<ide> fi, err := os.Stat(origPath)
<ide> if err != nil {
<ide> return err
<ide><path>buildfile_test.go
<ide> func TestBuildEntrypoint(t *testing.T) {
<ide> if img.Config.Entrypoint[0] != "/bin/echo" {
<ide> }
<ide> }
<add>
<add>func TestForbiddenContextPath(t *testing.T) {
<add> runtime, err := newTestRuntime()
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> defer nuke(runtime)
<add>
<add> srv := &Server{
<add> runtime: runtime,
<add> pullingPool: make(map[string]struct{}),
<add> pushingPool: make(map[string]struct{}),
<add> }
<add>
<add> context := testContextTemplate{`
<add> from {IMAGE}
<add> maintainer dockerio
<add> add ../../ test/
<add> `,
<add> [][2]string{{"test.txt", "test1"}, {"other.txt", "other"}}, nil}
<add>
<add> httpServer, err := mkTestingFileServer(context.remoteFiles)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> defer httpServer.Close()
<add>
<add> idx := strings.LastIndex(httpServer.URL, ":")
<add> if idx < 0 {
<add> t.Fatalf("could not get port from test http server address %s", httpServer.URL)
<add> }
<add> port := httpServer.URL[idx+1:]
<add>
<add> ip := srv.runtime.networkManager.bridgeNetwork.IP
<add> dockerfile := constructDockerfile(context.dockerfile, ip, port)
<add>
<add> buildfile := NewBuildFile(srv, ioutil.Discard, false)
<add> _, err = buildfile.Build(mkTestContext(dockerfile, context.files, t))
<add>
<add> if err == nil {
<add> t.Log("Error should not be nil")
<add> t.Fail()
<add> }
<add>
<add> if err.Error() != "Forbidden path: /" {
<add> t.Logf("Error message is not expected: %s", err.Error())
<add> t.Fail()
<add> }
<add>} | 2 |
Python | Python | remove unreachable code from manualschema | 588b61e1718807cce6691090ed28175bf6fdb268 | <ide><path>rest_framework/schemas/inspectors.py
<ide> def get_link(self, path, method, base_url):
<ide> description=self._description
<ide> )
<ide>
<del> return self._link
<del>
<ide>
<ide> class DefaultSchema(object):
<ide> """Allows overriding AutoSchema using DEFAULT_SCHEMA_CLASS setting""" | 1 |
Python | Python | remove trailing whitespace | 33bacd5114e1b22f9ff50670342385dbf04050e6 | <ide><path>numpy/lib/npyio.py
<ide> def __getattribute__(self, key):
<ide> return object.__getattribute__(self, '_obj')[key]
<ide> except KeyError:
<ide> raise AttributeError(key)
<del>
<add>
<ide> def __dir__(self):
<ide> """
<ide> Enables dir(bagobj) to list the files in an NpzFile.
<del>
<add>
<ide> This also enables tab-completion in an interpreter or IPython.
<ide> """
<ide> return object.__getattribute__(self, '_obj').keys() | 1 |
Javascript | Javascript | use async/await in `openitem()` | 77ea97e623cdef220de6e933b56ef30539e2c94b | <ide><path>src/workspace.js
<del>'use strict'
<add>'use babel'
<ide>
<ide> const _ = require('underscore-plus')
<ide> const url = require('url')
<ide> module.exports = class Workspace extends Model {
<ide> }
<ide> }
<ide>
<del> openItem (item, options = {}) {
<add> async openItem (item, options = {}) {
<ide> let {pane, split} = options
<ide>
<del> if (item == null) return Promise.resolve()
<del> if (pane != null && pane.isDestroyed()) return Promise.resolve(item)
<add> if (item == null) return undefined
<add> if (pane != null && pane.isDestroyed()) return item
<ide>
<ide> const uri = options.uri == null && typeof item.getURI === 'function' ? item.getURI() : options.uri
<ide>
<ide> module.exports = class Workspace extends Model {
<ide> // in the center location (legacy behavior)
<ide> let location
<ide> if (paneContainer == null && pane == null && split == null && uri != null) {
<del> location = this.itemLocationStore.load(uri)
<add> location = await this.itemLocationStore.load(uri)
<ide> }
<ide>
<del> return Promise.resolve(location)
<del> .then(location => {
<del> if (paneContainer == null) {
<del> if (location == null && typeof item.getDefaultLocation === 'function') {
<del> location = item.getDefaultLocation()
<del> }
<del> paneContainer = this.docks[location] || this.getCenter()
<del> }
<del> })
<del> .then(() => {
<del> if (pane != null) return pane
<del> pane = paneContainer.getActivePane()
<del> switch (split) {
<del> case 'left': return pane.findLeftmostSibling()
<del> case 'right': return pane.findOrCreateRightmostSibling()
<del> case 'up': return pane.findTopmostSibling()
<del> case 'down': return pane.findOrCreateBottommostSibling()
<del> default: return pane
<del> }
<del> })
<del> .then(pane => {
<del> if (!options.pending && (pane.getPendingItem() === item)) {
<del> pane.clearPendingItem()
<del> }
<add> if (paneContainer == null) {
<add> if (location == null && typeof item.getDefaultLocation === 'function') {
<add> location = item.getDefaultLocation()
<add> }
<add> paneContainer = this.docks[location] || this.getCenter()
<add> }
<ide>
<del> const activatePane = options.activatePane != null ? options.activatePane : true
<del> const activateItem = options.activateItem != null ? options.activateItem : true
<del> this.itemOpened(item)
<del> if (activateItem) {
<del> pane.activateItem(item, {pending: options.pending})
<del> }
<del> if (activatePane) {
<del> pane.activate()
<del> }
<del> paneContainer.activate()
<add> if (pane == null) {
<add> pane = paneContainer.getActivePane()
<add> switch (split) {
<add> case 'left':
<add> pane = pane.findLeftmostSibling()
<add> break
<add> case 'right':
<add> pane = pane.findOrCreateRightmostSibling()
<add> break
<add> case 'up':
<add> pane = pane.findTopmostSibling()
<add> break
<add> case 'down':
<add> pane = pane.findOrCreateBottommostSibling()
<add> break
<add> }
<add> }
<ide>
<del> let initialColumn = 0
<del> let initialLine = 0
<del> if (!Number.isNaN(options.initialLine)) {
<del> initialLine = options.initialLine
<del> }
<del> if (!Number.isNaN(options.initialColumn)) {
<del> initialColumn = options.initialColumn
<del> }
<del> if ((initialLine >= 0) || (initialColumn >= 0)) {
<del> if (typeof item.setCursorBufferPosition === 'function') {
<del> item.setCursorBufferPosition([initialLine, initialColumn])
<del> }
<del> }
<add> if (!options.pending && (pane.getPendingItem() === item)) {
<add> pane.clearPendingItem()
<add> }
<ide>
<del> const index = pane.getActiveItemIndex()
<del> this.emitter.emit('did-open', {uri, pane, item, index})
<del> return item
<del> })
<add> const activatePane = options.activatePane != null ? options.activatePane : true
<add> const activateItem = options.activateItem != null ? options.activateItem : true
<add> this.itemOpened(item)
<add> if (activateItem) {
<add> pane.activateItem(item, {pending: options.pending})
<add> }
<add> if (activatePane) {
<add> pane.activate()
<add> }
<add> paneContainer.activate()
<add>
<add> let initialColumn = 0
<add> let initialLine = 0
<add> if (!Number.isNaN(options.initialLine)) {
<add> initialLine = options.initialLine
<add> }
<add> if (!Number.isNaN(options.initialColumn)) {
<add> initialColumn = options.initialColumn
<add> }
<add> if ((initialLine >= 0) || (initialColumn >= 0)) {
<add> if (typeof item.setCursorBufferPosition === 'function') {
<add> item.setCursorBufferPosition([initialLine, initialColumn])
<add> }
<add> }
<add>
<add> const index = pane.getActiveItemIndex()
<add> this.emitter.emit('did-open', {uri, pane, item, index})
<add> return item
<ide> }
<ide>
<ide> openTextFile (uri, options) { | 1 |
Ruby | Ruby | use quiet_safe_system to silence bzr updates | 5d811c519beee45edac5387e92f1f2bee4f72452 | <ide><path>Library/Homebrew/download_strategy.rb
<ide> def clone_repo
<ide> end
<ide>
<ide> def update
<del> @clone.cd { safe_system bzrpath, 'update' }
<add> @clone.cd { quiet_safe_system bzrpath, "update" }
<ide> end
<ide>
<ide> def bzrpath | 1 |
Ruby | Ruby | fix unstated usage of inflector | 8bb162f008bb8e3a66b4a0d98b9f56cad45a4ab3 | <ide><path>railties/lib/rails/railtie.rb
<ide> require 'rails/initializable'
<ide> require 'rails/configuration'
<add>require 'active_support/inflector'
<ide>
<ide> module Rails
<ide> class Railtie
<ide> def inherited(base)
<ide> end
<ide>
<ide> def railtie_name(railtie_name = nil)
<del> @railtie_name ||= name.demodulize.underscore
<ide> @railtie_name = railtie_name if railtie_name
<del> @railtie_name
<add> @railtie_name ||= default_name
<ide> end
<ide>
<ide> def railtie_names
<ide> def generators(&blk)
<ide> def abstract_railtie?(base)
<ide> ABSTRACT_RAILTIES.include?(base.name)
<ide> end
<add>
<add> def default_name
<add> ActiveSupport::Inflector.underscore(ActiveSupport::Inflector.demodulize(name))
<add> end
<ide> end
<ide>
<ide> def rake_tasks | 1 |
Java | Java | fix some warning | 0b2c0cfb4f8194c3dbb0b1fb1c0a6e08e18207a0 | <ide><path>spring-expression/src/test/java/org/springframework/expression/spel/IndexingTests.java
<ide> public void emptyList() {
<ide> assertEquals("", expression.getValue(this, String.class));
<ide> }
<ide>
<add> @SuppressWarnings("unchecked")
<ide> @Test
<ide> public void resolveCollectionElementType() {
<ide> listNotGeneric = new ArrayList(2);
<ide> public void resolveCollectionElementTypeNull() {
<ide>
<ide> }
<ide>
<add> @SuppressWarnings("unchecked")
<ide> @Test
<ide> public void resolveMapKeyValueTypes() {
<ide> mapNotGeneric = new HashMap();
<ide> public void resolveMapKeyValueTypes() {
<ide> @FieldAnnotation
<ide> public Map mapNotGeneric;
<ide>
<add> @SuppressWarnings("unchecked")
<ide> @Test
<ide> public void testListOfScalar() {
<ide> listOfScalarNotGeneric = new ArrayList(1);
<ide> public void testListOfScalar() {
<ide> public List listOfScalarNotGeneric;
<ide>
<ide>
<add> @SuppressWarnings("unchecked")
<ide> @Test
<ide> public void testListsOfMap() {
<ide> listOfMapsNotGeneric = new ArrayList();
<ide><path>spring-expression/src/test/java/org/springframework/expression/spel/SpelCompilationCoverageTests.java
<ide> public int getKey2() {
<ide> return 1;
<ide> }
<ide> }
<del>
<add>
<add> @SuppressWarnings("serial")
<ide> public static class MessageHeaders extends HashMap<String,Object> { }
<ide>
<ide> public static class GenericMessageTestHelper<T> {
<ide><path>spring-test/src/main/java/org/springframework/test/context/ContextConfiguration.java
<ide> * The term <em>annotated class</em> can refer to any of the following.
<ide> *
<ide> * <ul>
<del> * <li>A class annotated with {@link org.springframework.context.annotation.Configuration
<del> * @Configuration}</li>
<add> * <li>A class annotated with @{@link org.springframework.context.annotation.Configuration
<add> * Configuration}</li>
<ide> * <li>A component (i.e., a class annotated with
<ide> * {@link org.springframework.stereotype.Component @Component},
<ide> * {@link org.springframework.stereotype.Service @Service},
<ide><path>spring-test/src/main/java/org/springframework/test/context/transaction/TransactionalTestExecutionListener.java
<ide> * {@link org.springframework.transaction.annotation.TransactionManagementConfigurer TransactionManagementConfigurer}
<ide> * can be implemented by an
<ide> * {@link org.springframework.context.annotation.Configuration @Configuration}
<del> * class. See {@link TestContextTransactionUtils#retrieveTransactionManager()}
<add> * class. See {@link TestContextTransactionUtils#retrieveTransactionManager}
<ide> * for details on the algorithm used to look up a transaction manager in
<ide> * the test's {@code ApplicationContext}.
<ide> * | 4 |
Ruby | Ruby | use the host glibc version only | 0db0db95169a9d8437c8b058d9b82be40f1766b3 | <ide><path>Library/Homebrew/extend/os/linux/development_tools.rb
<ide> def default_compiler
<ide> end
<ide>
<ide> def build_system_info
<del> brewed_glibc_version = begin
<del> Formula["glibc"].any_installed_version
<del> rescue FormulaUnavailableError
<del> nil
<del> end
<del> glibc_version = brewed_glibc_version || OS::Linux::Glibc.system_version
<del> generic_build_system_info.merge "glibc_version" => glibc_version
<add> generic_build_system_info.merge "glibc_version" => OS::Linux::Glibc.system_version
<ide> end
<ide> end
<ide> end | 1 |
Python | Python | fix tests in tests/www/test_views.py | 52604a3444e4473f5c20f56624624869ea50ef9b | <ide><path>tests/www/test_views.py
<ide> def test_trigger_dag_form_origin_url(self, test_origin, expected_origin):
<ide> # https://github.com/python/cpython/pull/24297/files
<ide> # Check if tests are running with a Python version containing the above fix
<ide> # where ";" is removed as a separator
<del> if parse_qsl(";a=b") != [(';a', 'b')]:
<del> expected_url = expected_origin.replace("%3B", "&")
<del> expected_url += "="
<add> if parse_qsl(";a=b") != [(';a', 'b')] and ";" in test_origin:
<add> expected_origin = expected_origin.replace("%3B", "&")
<add> expected_origin += "="
<ide>
<ide> resp = self.client.get(f'trigger?dag_id={test_dag_id}&origin={test_origin}')
<ide> self.check_content_in_response(
<ide> def test_get_safe_url(self, test_url, expected_url, mock_url_for):
<ide> # https://github.com/python/cpython/pull/24297/files
<ide> # Check if tests are running with a Python version containing the above fix
<ide> # where ";" is removed as a separator
<del> if parse_qsl(";a=b") != [(';a', 'b')]:
<add> if parse_qsl(";a=b") != [(';a', 'b')] and ";" in test_url:
<ide> expected_url = expected_url.replace("%3B", "&")
<ide> expected_url += "="
<ide> | 1 |
PHP | PHP | use created object to access atom constant | 68f2f8a1ea9c7bf08f486e3d995661951d1af4cd | <ide><path>Cake/Test/TestCase/Utility/TimeTest.php
<ide> public function testToServer() {
<ide> */
<ide> public function testToAtom() {
<ide> $dateTime = new \DateTime;
<del> $this->assertEquals($dateTime->format(DATE_ATOM), $this->Time->toAtom(time()));
<add> $this->assertEquals($dateTime->format($dateTime::ATOM), $this->Time->toAtom(time()));
<ide> }
<ide>
<ide> /**
<ide><path>Cake/Utility/Time.php
<ide> public static function toServer($dateString, $timezone = null, $format = 'Y-m-d
<ide> public static function toAtom($dateString, $timezone = null) {
<ide> $dateTime = new \DateTime;
<ide> return $dateTime->setTimestamp(static::fromString($dateString, $timezone))
<del> ->format(DATE_ATOM);
<add> ->format($dateTime::ATOM);
<ide> }
<ide>
<ide> /** | 2 |
Ruby | Ruby | fix #postgresql_version docs | 4d0464187cebdf513f91a61c356a2452043d27e7 | <ide><path>activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
<ide> def distinct(columns, orders) #:nodoc:
<ide> end
<ide>
<ide> protected
<del> # Returns the version of the connected PostgreSQL version.
<add> # Returns the version of the connected PostgreSQL server.
<ide> def postgresql_version
<ide> @postgresql_version ||=
<ide> if @connection.respond_to?(:server_version) | 1 |
Python | Python | fix versionadded tags | 209889c55989ae5cfde6d48503761af33b84e128 | <ide><path>numpy/core/fromnumeric.py
<ide> def amax(a, axis=None, out=None, keepdims=np._NoValue):
<ide> Axis or axes along which to operate. By default, flattened input is
<ide> used.
<ide>
<del> .. versionadded: 1.7.0
<add> .. versionadded:: 1.7.0
<ide>
<ide> If this is a tuple of ints, the maximum is selected over multiple axes,
<ide> instead of a single axis or all the axes as before.
<ide> def amin(a, axis=None, out=None, keepdims=np._NoValue):
<ide> Axis or axes along which to operate. By default, flattened input is
<ide> used.
<ide>
<del> .. versionadded: 1.7.0
<add> .. versionadded:: 1.7.0
<ide>
<ide> If this is a tuple of ints, the minimum is selected over multiple axes,
<ide> instead of a single axis or all the axes as before.
<ide> def mean(a, axis=None, dtype=None, out=None, keepdims=np._NoValue):
<ide> Axis or axes along which the means are computed. The default is to
<ide> compute the mean of the flattened array.
<ide>
<del> .. versionadded: 1.7.0
<add> .. versionadded:: 1.7.0
<ide>
<ide> If this is a tuple of ints, a mean is performed over multiple axes,
<ide> instead of a single axis or all the axes as before.
<ide> def std(a, axis=None, dtype=None, out=None, ddof=0, keepdims=np._NoValue):
<ide> Axis or axes along which the standard deviation is computed. The
<ide> default is to compute the standard deviation of the flattened array.
<ide>
<del> .. versionadded: 1.7.0
<add> .. versionadded:: 1.7.0
<ide>
<ide> If this is a tuple of ints, a standard deviation is performed over
<ide> multiple axes, instead of a single axis or all the axes as before.
<ide> def var(a, axis=None, dtype=None, out=None, ddof=0, keepdims=np._NoValue):
<ide> Axis or axes along which the variance is computed. The default is to
<ide> compute the variance of the flattened array.
<ide>
<del> .. versionadded: 1.7.0
<add> .. versionadded:: 1.7.0
<ide>
<ide> If this is a tuple of ints, a variance is performed over multiple axes,
<ide> instead of a single axis or all the axes as before.
<ide><path>numpy/linalg/linalg.py
<ide> def slogdet(a):
<ide> Broadcasting rules apply, see the `numpy.linalg` documentation for
<ide> details.
<ide>
<del> .. versionadded:: 1.6.0.
<add> .. versionadded:: 1.6.0
<ide>
<ide> The determinant is computed via LU factorization using the LAPACK
<ide> routine z/dgetrf. | 2 |
Python | Python | fix image resizing in preprocessing/image | 4302d8060d06b8a19f2d16b5565ffdabf82277dd | <ide><path>keras/preprocessing/image.py
<ide> def load_img(path, grayscale=False, target_size=None):
<ide> else: # Ensure 3 channel even when loaded image is grayscale
<ide> img = img.convert('RGB')
<ide> if target_size:
<del> img = img.resize(target_size)
<add> img = img.resize((target_size[1], target_size[0]))
<ide> return img
<ide>
<ide> | 1 |
PHP | PHP | fix issue with logging scopes | 72f4d4fac09645c99c8443ec995b67a11eb1eeea | <ide><path>lib/Cake/Log/CakeLog.php
<ide> public static function write($type, $message, $scope = array()) {
<ide> $logged = false;
<ide> foreach (self::$_Collection->enabled() as $streamName) {
<ide> $logger = self::$_Collection->{$streamName};
<del> $types = null;
<del> $scopes = array();
<add> $types = $scopes = $config = array();
<ide> if ($logger instanceof BaseLog) {
<ide> $config = $logger->config();
<del> if (isset($config['types'])) {
<del> $types = $config['types'];
<del> }
<del> if (isset($config['scopes'])) {
<del> $scopes = $config['scopes'];
<del> }
<ide> }
<del> if (is_string($scope)) {
<del> $inScope = in_array($scope, $scopes);
<del> } else {
<del> $intersect = array_intersect($scope, $scopes);
<del> $inScope = !empty($intersect);
<add> if (isset($config['types'])) {
<add> $types = $config['types'];
<ide> }
<del> if (empty($types) || in_array($type, $types) || in_array($type, $scopes) && $inScope) {
<add> if (isset($config['scopes'])) {
<add> $scopes = $config['scopes'];
<add> }
<add> $inScope = (count(array_intersect((array)$scope, $scopes)) > 0);
<add> $correctLevel = in_array($type, $types);
<add>
<add> if (
<add> // No config is a catch all (bc mode)
<add> (empty($types) && empty($scopes)) ||
<add> // BC layer for mixing scope & level
<add> (in_array($type, $scopes)) ||
<add> // no scopes, but has level
<add> (empty($scopes) && $correctLevel) ||
<add> // exact scope + level
<add> ($correctLevel && $inScope)
<add> ) {
<ide> $logger->write($type, $message);
<ide> $logged = true;
<ide> }
<ide><path>lib/Cake/Test/Case/Log/CakeLogTest.php
<ide> protected function _deleteLogs() {
<ide>
<ide> /**
<ide> * test backward compatible scoped logging
<add> *
<add> * @return void
<ide> */
<ide> public function testScopedLoggingBC() {
<del> $this->_deleteLogs();
<del>
<ide> $this->_resetLogConfig();
<add>
<ide> CakeLog::config('shops', array(
<ide> 'engine' => 'FileLog',
<ide> 'types' => array('info', 'notice', 'warning'),
<ide> 'scopes' => array('transactions', 'orders'),
<ide> 'file' => 'shops',
<del> ));
<add> ));
<add> $this->_deleteLogs();
<ide>
<ide> CakeLog::write('info', 'info message');
<ide> $this->assertFalse(file_exists(LOGS . 'error.log'));
<del> $this->assertTrue(file_exists(LOGS . 'shops.log'));
<ide> $this->assertTrue(file_exists(LOGS . 'debug.log'));
<ide>
<ide> $this->_deleteLogs();
<ide> public function testScopedLoggingBC() {
<ide>
<ide> CakeLog::write('warning', 'warning message');
<ide> $this->assertTrue(file_exists(LOGS . 'error.log'));
<del> $this->assertTrue(file_exists(LOGS . 'shops.log'));
<ide> $this->assertFalse(file_exists(LOGS . 'debug.log'));
<ide>
<ide> $this->_deleteLogs();
<ide>
<ide> CakeLog::drop('shops');
<ide> }
<ide>
<add>
<add> public function testScopedLoggingExclusive() {
<add> $this->_deleteLogs();
<add>
<add> CakeLog::config('shops', array(
<add> 'engine' => 'FileLog',
<add> 'types' => array('info', 'notice', 'warning'),
<add> 'scopes' => array('transactions', 'orders'),
<add> 'file' => 'shops.log',
<add> ));
<add> CakeLog::config('eggs', array(
<add> 'engine' => 'FileLog',
<add> 'types' => array('info', 'notice', 'warning'),
<add> 'scopes' => array('eggs'),
<add> 'file' => 'eggs.log',
<add> ));
<add>
<add> CakeLog::write('info', 'transactions message', 'transactions');
<add> $this->assertFalse(file_exists(LOGS . 'eggs.log'));
<add> $this->assertTrue(file_exists(LOGS . 'shops.log'));
<add>
<add> $this->_deleteLogs();
<add>
<add> CakeLog::write('info', 'eggs message', 'eggs');
<add> $this->assertTrue(file_exists(LOGS . 'eggs.log'));
<add> $this->assertFalse(file_exists(LOGS . 'shops.log'));
<add> }
<add>
<ide> /**
<ide> * test scoped logging
<ide> *
<ide> * @return void
<ide> */
<ide> public function testScopedLogging() {
<del> if (file_exists(LOGS . 'shops.log')) {
<del> unlink(LOGS . 'shops.log');
<del> }
<del> if (file_exists(LOGS . 'error.log')) {
<del> unlink(LOGS . 'error.log');
<del> }
<del> if (file_exists(LOGS . 'debug.log')) {
<del> unlink(LOGS . 'debug.log');
<del> }
<del>
<ide> $this->_resetLogConfig();
<add> $this->_deleteLogs();
<ide> CakeLog::config('shops', array(
<ide> 'engine' => 'FileLog',
<ide> 'types' => array('info', 'notice', 'warning'),
<ide> 'scopes' => array('transactions', 'orders'),
<del> 'file' => 'shops',
<del> ));
<add> 'file' => 'shops.log',
<add> ));
<ide>
<ide> CakeLog::write('info', 'info message', 'transactions');
<ide> $this->assertFalse(file_exists(LOGS . 'error.log')); | 2 |
Javascript | Javascript | make curve creategeometry 2d/3d friendly | c9e88e32a7e6a8a03b2d5a619ee7cd61942aa772 | <ide><path>src/extras/core/CurvePath.js
<ide> THREE.CurvePath.prototype.createGeometry = function( points ) {
<ide>
<ide> for ( var i = 0; i < points.length; i ++ ) {
<ide>
<del> geometry.vertices.push( new THREE.Vector3( points[ i ].x, points[ i ].y, points[ i ].z ) );
<add> geometry.vertices.push( new THREE.Vector3( points[ i ].x, points[ i ].y, points[ i ].z || 0) );
<ide>
<ide> }
<ide> | 1 |
Python | Python | require timetable class be registered via plugin | be7efb1d30929a7f742f5b7735a3d6fbadadd352 | <ide><path>airflow/plugins_manager.py
<ide> from airflow import settings
<ide> from airflow.utils.entry_points import entry_points_with_dist
<ide> from airflow.utils.file import find_path_from_directory
<add>from airflow.utils.module_loading import as_importable_string
<ide>
<ide> if TYPE_CHECKING:
<ide> from airflow.hooks.base import BaseHook
<add> from airflow.timetables.base import Timetable
<ide>
<ide> log = logging.getLogger(__name__)
<ide>
<ide> global_operator_extra_links: Optional[List[Any]] = None
<ide> operator_extra_links: Optional[List[Any]] = None
<ide> registered_operator_link_classes: Optional[Dict[str, Type]] = None
<add>timetable_classes: Optional[Dict[str, Type["Timetable"]]] = None
<ide> """Mapping of class names to class of OperatorLinks registered by plugins.
<ide>
<ide> Used by the DAG serialization code to only allow specific classes to be created
<ide> class AirflowPlugin:
<ide> # buttons.
<ide> operator_extra_links: List[Any] = []
<ide>
<add> # A list of timetable classes that can be used for DAG scheduling.
<add> timetables: List[Type["Timetable"]] = []
<add>
<ide> @classmethod
<ide> def validate(cls):
<ide> """Validates that plugin has a name."""
<ide> def initialize_extra_operators_links_plugins():
<ide> )
<ide>
<ide>
<add>def initialize_timetables_plugins():
<add> """Collect timetable classes registered by plugins."""
<add> global timetable_classes
<add>
<add> if timetable_classes is not None:
<add> return
<add>
<add> ensure_plugins_loaded()
<add>
<add> if plugins is None:
<add> raise AirflowPluginException("Can't load plugins.")
<add>
<add> log.debug("Initialize extra timetables plugins")
<add>
<add> timetable_classes = {
<add> as_importable_string(timetable_class): timetable_class
<add> for plugin in plugins
<add> for timetable_class in plugin.timetables
<add> }
<add>
<add>
<ide> def integrate_executor_plugins() -> None:
<ide> """Integrate executor plugins to the context."""
<ide> global plugins
<ide><path>airflow/serialization/serialized_objects.py
<ide> import logging
<ide> from dataclasses import dataclass
<ide> from inspect import Parameter, signature
<del>from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, Union
<add>from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, Type, Union
<ide>
<ide> import cattr
<ide> import pendulum
<ide> from dateutil import relativedelta
<del>
<del>try:
<del> from functools import cache
<del>except ImportError:
<del> from functools import lru_cache
<del>
<del> cache = lru_cache(maxsize=None)
<ide> from pendulum.tz.timezone import FixedTimezone, Timezone
<ide>
<add>from airflow.compat.functools import cache
<ide> from airflow.configuration import conf
<ide> from airflow.exceptions import AirflowException, SerializationError
<ide> from airflow.models.baseoperator import BaseOperator, BaseOperatorLink
<ide> def decode_timezone(var: Union[str, int]) -> Timezone:
<ide> return pendulum.timezone(var)
<ide>
<ide>
<del>def encode_timetable(var: Timetable) -> Dict[str, Any]:
<add>def _get_registered_timetable(importable_string: str) -> Optional[Type[Timetable]]:
<add> from airflow import plugins_manager
<add>
<add> if importable_string.startswith("airflow.timetables."):
<add> return import_string(importable_string)
<add> plugins_manager.initialize_timetables_plugins()
<add> return plugins_manager.timetable_classes.get(importable_string)
<add>
<add>
<add>class _TimetableNotRegistered(ValueError):
<add> def __init__(self, type_string: str) -> None:
<add> self.type_string = type_string
<add>
<add> def __str__(self) -> str:
<add> return f"Timetable class {self.type_string!r} is not registered"
<add>
<add>
<add>def _encode_timetable(var: Timetable) -> Dict[str, Any]:
<ide> """Encode a timetable instance.
<ide>
<ide> This delegates most of the serialization work to the type, so the behavior
<ide> can be completely controlled by a custom subclass.
<ide> """
<del> return {"type": as_importable_string(type(var)), "value": var.serialize()}
<add> timetable_class = type(var)
<add> importable_string = as_importable_string(timetable_class)
<add> if _get_registered_timetable(importable_string) != timetable_class:
<add> raise _TimetableNotRegistered(importable_string)
<add> return {"__type": importable_string, "__var": var.serialize()}
<ide>
<ide>
<del>def decode_timetable(var: Dict[str, Any]) -> Timetable:
<add>def _decode_timetable(var: Dict[str, Any]) -> Timetable:
<ide> """Decode a previously serialized timetable.
<ide>
<ide> Most of the deserialization logic is delegated to the actual type, which
<ide> we import from string.
<ide> """
<del> return import_string(var["type"]).deserialize(var["value"])
<add> importable_string = var["__type"]
<add> timetable_class = _get_registered_timetable(importable_string)
<add> if timetable_class is None:
<add> raise _TimetableNotRegistered(importable_string)
<add> return timetable_class.deserialize(var["__var"])
<ide>
<ide>
<ide> class BaseSerialization:
<ide> def serialize_to_json(
<ide> if key in decorated_fields:
<ide> serialized_object[key] = cls._serialize(value)
<ide> elif key == "timetable":
<del> serialized_object[key] = encode_timetable(value)
<add> serialized_object[key] = _encode_timetable(value)
<ide> else:
<ide> value = cls._serialize(value)
<ide> if isinstance(value, dict) and "__type" in value:
<ide> def serialize_dag(cls, dag: DAG) -> dict:
<ide> return serialize_dag
<ide> except SerializationError:
<ide> raise
<del> except Exception:
<del> raise SerializationError(f'Failed to serialize dag {dag.dag_id!r}')
<add> except Exception as e:
<add> raise SerializationError(f'Failed to serialize DAG {dag.dag_id!r}: {e}')
<ide>
<ide> @classmethod
<ide> def deserialize_dag(cls, encoded_dag: Dict[str, Any]) -> 'SerializedDAG':
<ide> def deserialize_dag(cls, encoded_dag: Dict[str, Any]) -> 'SerializedDAG':
<ide> # Value structure matches exactly
<ide> pass
<ide> elif k == "timetable":
<del> v = decode_timetable(v)
<add> v = _decode_timetable(v)
<ide> elif k in cls._decorated_fields:
<ide> v = cls._deserialize(v)
<ide> # else use v as it is
<ide><path>tests/serialization/test_dag_serialization.py
<ide> import importlib.util
<ide> import multiprocessing
<ide> import os
<del>import unittest
<ide> from datetime import datetime, timedelta, timezone
<ide> from glob import glob
<ide> from unittest import mock
<ide> import pytest
<ide> from dateutil.relativedelta import FR, relativedelta
<ide> from kubernetes.client import models as k8s
<del>from parameterized import parameterized
<ide>
<add>from airflow.exceptions import SerializationError
<ide> from airflow.hooks.base import BaseHook
<ide> from airflow.kubernetes.pod_generator import PodGenerator
<ide> from airflow.models import DAG, Connection, DagBag, TaskInstance
<ide> os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir)
<ide> )
<ide>
<add>CUSTOM_TIMETABLE_SERIALIZED = {
<add> "__type": "tests.test_utils.timetables.CustomSerializationTimetable",
<add> "__var": {"value": "foo"},
<add>}
<add>
<ide>
<ide> def make_example_dags(module_path):
<ide> """Loads DAGs from a module for test."""
<ide> def serialize_subprocess(queue, dag_folder):
<ide> queue.put(None)
<ide>
<ide>
<del>class TestStringifiedDAGs(unittest.TestCase):
<add>@pytest.fixture()
<add>def timetable_plugin(monkeypatch):
<add> """Patch plugins manager to always and only return our custom timetable."""
<add> from airflow import plugins_manager
<add>
<add> monkeypatch.setattr(plugins_manager, "initialize_timetables_plugins", lambda: None)
<add> monkeypatch.setattr(
<add> plugins_manager,
<add> "timetable_classes",
<add> {"tests.test_utils.timetables.CustomSerializationTimetable": CustomSerializationTimetable},
<add> )
<add>
<add>
<add>class TestStringifiedDAGs:
<ide> """Unit tests for stringified DAGs."""
<ide>
<del> def setUp(self):
<del> super().setUp()
<add> def setup_method(self):
<add> self.backup_base_hook_get_connection = BaseHook.get_connection
<ide> BaseHook.get_connection = mock.Mock(
<ide> return_value=Connection(
<ide> extra=(
<ide> def setUp(self):
<ide> )
<ide> self.maxDiff = None
<ide>
<add> def teardown_method(self):
<add> BaseHook.get_connection = self.backup_base_hook_get_connection
<add>
<ide> def test_serialization(self):
<ide> """Serialization and deserialization should work for every DAG and Operator."""
<ide> dags = collect_dags()
<ide> def test_serialization(self):
<ide> # Compares with the ground truth of JSON string.
<ide> self.validate_serialized_dag(serialized_dags['simple_dag'], serialized_simple_dag_ground_truth)
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "timetable, serialized_timetable",
<ide> [
<ide> (
<ide> cron_timetable("0 0 * * *"),
<ide> {
<del> "type": "airflow.timetables.interval.CronDataIntervalTimetable",
<del> "value": {"expression": "0 0 * * *", "timezone": "UTC"},
<add> "__type": "airflow.timetables.interval.CronDataIntervalTimetable",
<add> "__var": {"expression": "0 0 * * *", "timezone": "UTC"},
<ide> },
<ide> ),
<ide> (
<ide> CustomSerializationTimetable("foo"),
<del> {
<del> "type": "tests.test_utils.timetables.CustomSerializationTimetable",
<del> "value": {"value": "foo"},
<del> },
<add> CUSTOM_TIMETABLE_SERIALIZED,
<ide> ),
<ide> ],
<ide> )
<add> @pytest.mark.usefixtures("timetable_plugin")
<ide> def test_dag_serialization_to_timetable(self, timetable, serialized_timetable):
<ide> """Verify a timetable-backed schedule_interval is excluded in serialization."""
<ide> dag = get_timetable_based_simple_dag(timetable)
<ide> def test_dag_serialization_to_timetable(self, timetable, serialized_timetable):
<ide>
<ide> self.validate_serialized_dag(serialized_dag, expected)
<ide>
<add> def test_dag_serialization_unregistered_custom_timetable(self):
<add> """Verify serialization fails without timetable registration."""
<add> dag = get_timetable_based_simple_dag(CustomSerializationTimetable("bar"))
<add> with pytest.raises(SerializationError) as ctx:
<add> SerializedDAG.to_dict(dag)
<add>
<add> message = (
<add> "Failed to serialize DAG 'simple_dag': Timetable class "
<add> "'tests.test_utils.timetables.CustomSerializationTimetable' "
<add> "is not registered"
<add> )
<add> assert str(ctx.value) == message
<add>
<ide> def validate_serialized_dag(self, json_dag, ground_truth_dag):
<ide> """Verify serialized DAGs match the ground truth."""
<ide> assert json_dag['dag']['fileloc'].split('/')[-1] == 'test_dag_serialization.py'
<ide> def test_roundtrip_provider_example_dags(self):
<ide> serialized_dag = SerializedDAG.from_json(SerializedDAG.to_json(dag))
<ide> self.validate_deserialized_dag(serialized_dag, dag)
<ide>
<del> @parameterized.expand([(cron_timetable("0 0 * * *"),), (CustomSerializationTimetable("foo"),)])
<add> @pytest.mark.parametrize(
<add> "timetable",
<add> [cron_timetable("0 0 * * *"), CustomSerializationTimetable("foo")],
<add> )
<add> @pytest.mark.usefixtures("timetable_plugin")
<ide> def test_dag_roundtrip_from_timetable(self, timetable):
<ide> """Verify a timetable-backed serialization can be deserialized."""
<ide> dag = get_timetable_based_simple_dag(timetable)
<ide> def validate_deserialized_task(
<ide> else:
<ide> assert serialized_task.subdag is None
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "dag_start_date, task_start_date, expected_task_start_date",
<ide> [
<ide> (datetime(2019, 8, 1, tzinfo=timezone.utc), None, datetime(2019, 8, 1, tzinfo=timezone.utc)),
<ide> (
<ide> def validate_deserialized_task(
<ide> datetime(2019, 8, 1, tzinfo=timezone.utc),
<ide> ),
<ide> (pendulum.datetime(2019, 8, 1, tz='UTC'), None, pendulum.datetime(2019, 8, 1, tz='UTC')),
<del> ]
<add> ],
<ide> )
<ide> def test_deserialization_start_date(self, dag_start_date, task_start_date, expected_task_start_date):
<ide> dag = DAG(dag_id='simple_dag', start_date=dag_start_date)
<ide> def test_deserialization_with_dag_context(self):
<ide> # should not raise RuntimeError: dictionary changed size during iteration
<ide> SerializedDAG.to_dict(dag)
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "dag_end_date, task_end_date, expected_task_end_date",
<ide> [
<ide> (datetime(2019, 8, 1, tzinfo=timezone.utc), None, datetime(2019, 8, 1, tzinfo=timezone.utc)),
<ide> (
<ide> def test_deserialization_with_dag_context(self):
<ide> datetime(2019, 7, 30, tzinfo=timezone.utc),
<ide> datetime(2019, 7, 30, tzinfo=timezone.utc),
<ide> ),
<del> ]
<add> ],
<ide> )
<ide> def test_deserialization_end_date(self, dag_end_date, task_end_date, expected_task_end_date):
<ide> dag = DAG(dag_id='simple_dag', start_date=datetime(2019, 8, 1), end_date=dag_end_date)
<ide> def test_deserialization_end_date(self, dag_end_date, task_end_date, expected_ta
<ide> simple_task = dag.task_dict["simple_task"]
<ide> assert simple_task.end_date == expected_task_end_date
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "serialized_timetable, expected_timetable",
<ide> [
<del> ({"type": "airflow.timetables.simple.NullTimetable", "value": {}}, NullTimetable()),
<add> ({"__type": "airflow.timetables.simple.NullTimetable", "__var": {}}, NullTimetable()),
<ide> (
<ide> {
<del> "type": "airflow.timetables.interval.CronDataIntervalTimetable",
<del> "value": {"expression": "@weekly", "timezone": "UTC"},
<add> "__type": "airflow.timetables.interval.CronDataIntervalTimetable",
<add> "__var": {"expression": "@weekly", "timezone": "UTC"},
<ide> },
<ide> cron_timetable("0 0 * * 0"),
<ide> ),
<del> ({"type": "airflow.timetables.simple.OnceTimetable", "value": {}}, OnceTimetable()),
<add> ({"__type": "airflow.timetables.simple.OnceTimetable", "__var": {}}, OnceTimetable()),
<ide> (
<ide> {
<del> "type": "airflow.timetables.interval.DeltaDataIntervalTimetable",
<del> "value": {"delta": 86400.0},
<add> "__type": "airflow.timetables.interval.DeltaDataIntervalTimetable",
<add> "__var": {"delta": 86400.0},
<ide> },
<ide> delta_timetable(timedelta(days=1)),
<ide> ),
<del> ]
<add> (CUSTOM_TIMETABLE_SERIALIZED, CustomSerializationTimetable("foo")),
<add> ],
<ide> )
<add> @pytest.mark.usefixtures("timetable_plugin")
<ide> def test_deserialization_timetable(
<ide> self,
<ide> serialized_timetable,
<ide> def test_deserialization_timetable(
<ide> dag = SerializedDAG.from_dict(serialized)
<ide> assert dag.timetable == expected_timetable
<ide>
<del> @parameterized.expand(
<add> def test_deserialization_timetable_unregistered(self):
<add> serialized = {
<add> "__version": 1,
<add> "dag": {
<add> "default_args": {"__type": "dict", "__var": {}},
<add> "_dag_id": "simple_dag",
<add> "fileloc": __file__,
<add> "tasks": [],
<add> "timezone": "UTC",
<add> "timetable": CUSTOM_TIMETABLE_SERIALIZED,
<add> },
<add> }
<add> SerializedDAG.validate_schema(serialized)
<add> with pytest.raises(ValueError) as ctx:
<add> SerializedDAG.from_dict(serialized)
<add> message = (
<add> "Timetable class "
<add> "'tests.test_utils.timetables.CustomSerializationTimetable' "
<add> "is not registered"
<add> )
<add> assert str(ctx.value) == message
<add>
<add> @pytest.mark.parametrize(
<add> "serialized_schedule_interval, expected_timetable",
<ide> [
<ide> (None, NullTimetable()),
<ide> ("@weekly", cron_timetable("0 0 * * 0")),
<ide> def test_deserialization_timetable(
<ide> {"__type": "timedelta", "__var": 86400.0},
<ide> delta_timetable(timedelta(days=1)),
<ide> ),
<del> ]
<add> ],
<ide> )
<ide> def test_deserialization_schedule_interval(
<ide> self,
<ide> def test_deserialization_schedule_interval(
<ide> dag = SerializedDAG.from_dict(serialized)
<ide> assert dag.timetable == expected_timetable
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "val, expected",
<ide> [
<ide> (relativedelta(days=-1), {"__type": "relativedelta", "__var": {"days": -1}}),
<ide> (relativedelta(month=1, days=-1), {"__type": "relativedelta", "__var": {"month": 1, "days": -1}}),
<ide> # Every friday
<ide> (relativedelta(weekday=FR), {"__type": "relativedelta", "__var": {"weekday": [4]}}),
<ide> # Every second friday
<ide> (relativedelta(weekday=FR(2)), {"__type": "relativedelta", "__var": {"weekday": [4, 2]}}),
<del> ]
<add> ],
<ide> )
<ide> def test_roundtrip_relativedelta(self, val, expected):
<ide> serialized = SerializedDAG._serialize(val)
<ide> def test_roundtrip_relativedelta(self, val, expected):
<ide> round_tripped = SerializedDAG._deserialize(serialized)
<ide> assert val == round_tripped
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "val, expected_val",
<ide> [
<ide> (None, {}),
<ide> ({"param_1": "value_1"}, {"param_1": "value_1"}),
<del> ]
<add> ],
<ide> )
<ide> def test_dag_params_roundtrip(self, val, expected_val):
<ide> """
<ide> def test_dag_params_roundtrip(self, val, expected_val):
<ide> assert expected_val == deserialized_dag.params
<ide> assert expected_val == deserialized_simple_task.params
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "val, expected_val",
<ide> [
<ide> (None, {}),
<ide> ({"param_1": "value_1"}, {"param_1": "value_1"}),
<del> ]
<add> ],
<ide> )
<ide> def test_task_params_roundtrip(self, val, expected_val):
<ide> """
<ide> def test_extra_serialized_field_and_operator_links(self):
<ide> google_link_from_plugin = simple_task.get_extra_links(test_date, GoogleLink.name)
<ide> assert "https://www.google.com" == google_link_from_plugin
<ide>
<del> def test_extra_operator_links_logs_error_for_non_registered_extra_links(self):
<add> def test_extra_operator_links_logs_error_for_non_registered_extra_links(self, caplog):
<ide> """
<ide> Assert OperatorLinks not registered via Plugins and if it is not an inbuilt Operator Link,
<ide> it can still deserialize the DAG (does not error) but just logs an error
<ide> def execute(self, context):
<ide>
<ide> serialized_dag = SerializedDAG.to_dict(dag)
<ide>
<del> with self.assertLogs("airflow.serialization.serialized_objects", level="ERROR") as log_output:
<add> with caplog.at_level("ERROR", logger="airflow.serialization.serialized_objects"):
<ide> SerializedDAG.from_dict(serialized_dag)
<del> received_logs = log_output.output[0]
<del> expected_err_msg = (
<del> "Operator Link class 'tests.serialization.test_dag_serialization.TaskStateLink' "
<del> "not registered"
<del> )
<del> assert expected_err_msg in received_logs
<add>
<add> expected_err_msg = (
<add> "Operator Link class 'tests.serialization.test_dag_serialization.TaskStateLink' not registered"
<add> )
<add> assert expected_err_msg in caplog.text
<ide>
<ide> def test_extra_serialized_field_and_multiple_operator_links(self):
<ide> """
<ide> def __eq__(self, other):
<ide> def __ne__(self, other):
<ide> return not self.__eq__(other)
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "templated_field, expected_field",
<ide> [
<ide> (None, None),
<ide> ([], []),
<ide> def __ne__(self, other):
<ide> "'nested2': ClassWithCustomAttributes({'att3': '{{ task.task_id }}', 'att4': "
<ide> "'{{ task.task_id }}', 'template_fields': ['att3']}), 'template_fields': ['nested1']})",
<ide> ),
<del> ]
<add> ],
<ide> )
<ide> def test_templated_fields_exist_in_serialized_dag(self, templated_field, expected_field):
<ide> """
<ide> def test_edge_info_serialization(self):
<ide>
<ide> assert serialized_dag.edge_info == dag.edge_info
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "mode, expect_custom_deps",
<ide> [
<ide> ("poke", False),
<ide> ("reschedule", True),
<del> ]
<add> ],
<ide> )
<ide> def test_serialize_sensor(self, mode, expect_custom_deps):
<ide> from airflow.sensors.base import BaseSensorOperator
<ide> def poke(self, context):
<ide>
<ide> assert op.deps == serialized_op.deps
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "passed_success_callback, expected_value",
<ide> [
<ide> ({"on_success_callback": lambda x: print("hi")}, True),
<ide> ({}, False),
<del> ]
<add> ],
<ide> )
<ide> def test_dag_on_success_callback_roundtrip(self, passed_success_callback, expected_value):
<ide> """
<ide> def test_dag_on_success_callback_roundtrip(self, passed_success_callback, expect
<ide>
<ide> assert deserialized_dag.has_on_success_callback is expected_value
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "passed_failure_callback, expected_value",
<ide> [
<ide> ({"on_failure_callback": lambda x: print("hi")}, True),
<ide> ({}, False),
<del> ]
<add> ],
<ide> )
<ide> def test_dag_on_failure_callback_roundtrip(self, passed_failure_callback, expected_value):
<ide> """
<ide> def test_dag_on_failure_callback_roundtrip(self, passed_failure_callback, expect
<ide>
<ide> assert deserialized_dag.has_on_failure_callback is expected_value
<ide>
<del> @parameterized.expand(
<add> @pytest.mark.parametrize(
<add> "object_to_serialized, expected_output",
<ide> [
<ide> (
<ide> ['task_1', 'task_5', 'task_2', 'task_4'],
<ide> def test_dag_on_failure_callback_roundtrip(self, passed_failure_callback, expect
<ide> ('task_1', 'task_5', 'task_2', 3, ["x", "y"]),
<ide> ['task_1', 'task_5', 'task_2', 3, ["x", "y"]],
<ide> ),
<del> ]
<add> ],
<ide> )
<ide> def test_serialized_objects_are_sorted(self, object_to_serialized, expected_output):
<ide> """Test Serialized Sets are sorted while list and tuple preserve order"""
<ide><path>tests/test_utils/timetables.py
<ide> def __init__(self, value: str):
<ide> def deserialize(cls, data):
<ide> return cls(data["value"])
<ide>
<add> def __eq__(self, other) -> bool:
<add> """Only for testing purposes."""
<add> if not isinstance(other, CustomSerializationTimetable):
<add> return False
<add> return self.value == other.value
<add>
<ide> def serialize(self):
<ide> return {"value": self.value}
<ide> | 4 |
Go | Go | add tests for 'images' subcommand | 3dba4022adbd1f21a8b2ec4a6035cacfe143b380 | <ide><path>commands.go
<ide> func (srv *Server) CmdImages(stdin io.ReadCloser, stdout io.Writer, args ...stri
<ide> return nil
<ide> }
<ide>
<del> fmt.Fprintf(stdout, "digraph G {\n")
<add> fmt.Fprintf(stdout, "digraph docker {\n")
<ide>
<ide> var parentImage *Image
<ide> var err error
<ide><path>commands_test.go
<ide> func cmdWait(srv *Server, container *Container) error {
<ide> return closeWrap(stdout, stdoutPipe)
<ide> }
<ide>
<add>func cmdImages(srv *Server, args ...string) (string, error) {
<add> stdout, stdoutPipe := io.Pipe()
<add>
<add> go func() {
<add> if err := srv.CmdImages(nil, stdoutPipe, args...); err != nil {
<add> return
<add> }
<add>
<add> // force the pipe closed, so that the code below gets an EOF
<add> stdoutPipe.Close()
<add> }()
<add>
<add> output, err := ioutil.ReadAll(stdout)
<add> if err != nil {
<add> return "", err
<add> }
<add>
<add> // Cleanup pipes
<add> return string(output), closeWrap(stdout, stdoutPipe)
<add>}
<add>
<add>// TestImages checks that 'docker images' displays information correctly
<add>func TestImages(t *testing.T) {
<add>
<add> runtime, err := newTestRuntime()
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> defer nuke(runtime)
<add>
<add> srv := &Server{runtime: runtime}
<add>
<add> output, err := cmdImages(srv)
<add>
<add> if !strings.Contains(output, "REPOSITORY") {
<add> t.Fatal("'images' should have a header")
<add> }
<add> if !strings.Contains(output, "docker-ut") {
<add> t.Fatal("'images' should show the docker-ut image")
<add> }
<add> if !strings.Contains(output, "e9aa60c60128") {
<add> t.Fatal("'images' should show the docker-ut image id")
<add> }
<add>
<add> output, err = cmdImages(srv, "-q")
<add>
<add> if strings.Contains(output, "REPOSITORY") {
<add> t.Fatal("'images -q' should not have a header")
<add> }
<add> if strings.Contains(output, "docker-ut") {
<add> t.Fatal("'images' should not show the docker-ut image name")
<add> }
<add> if !strings.Contains(output, "e9aa60c60128") {
<add> t.Fatal("'images' should show the docker-ut image id")
<add> }
<add>
<add> output, err = cmdImages(srv, "-viz")
<add>
<add> if !strings.HasPrefix(output, "digraph docker {") {
<add> t.Fatal("'images -v' should start with the dot header")
<add> }
<add> if !strings.HasSuffix(output, "}\n") {
<add> t.Fatal("'images -v' should end with a '}'")
<add> }
<add> if !strings.Contains(output, "base -> \"e9aa60c60128\" [style=invis]") {
<add> t.Fatal("'images -v' should have the docker-ut image id node")
<add> }
<add>
<add> // todo: add checks for -a
<add>}
<add>
<ide> // TestRunHostname checks that 'docker run -h' correctly sets a custom hostname
<ide> func TestRunHostname(t *testing.T) {
<ide> runtime, err := newTestRuntime() | 2 |
Ruby | Ruby | assign a new session_id to session options hash | 54a0b01f760354ee8002d136b322d6ea429b67f6 | <ide><path>actionpack/lib/action_dispatch/request/session.rb
<ide> def options
<ide> def destroy
<ide> clear
<ide> options = self.options || {}
<del> @by.send(:destroy_session, @env, options[:id], options)
<del> options[:id] = nil
<add> new_sid = @by.send(:destroy_session, @env, options[:id], options)
<add> options[:id] = new_sid # Reset session id with a new value or nil
<ide> @loaded = false
<ide> end
<ide> | 1 |
Text | Text | update filtering docs | 286cf57a8d22aafd51054a40a5cf8a58edfc8226 | <ide><path>docs/api-guide/filtering.md
<ide> Note that you can use both an overridden `.get_queryset()` and generic filtering
<ide> """
<ide> model = Product
<ide> serializer_class = ProductSerializer
<del> filter_class = ProductFilter
<add> filterset_class = ProductFilter
<ide>
<ide> def get_queryset(self):
<ide> user = self.request.user
<ide> A complete example using both `DjangoObjectPermissionsFilter` and `DjangoObjectP
<ide> **permissions.py**:
<ide>
<ide> class CustomObjectPermissions(permissions.DjangoObjectPermissions):
<del> """
<del> Similar to `DjangoObjectPermissions`, but adding 'view' permissions.
<del> """
<add> """
<add> Similar to `DjangoObjectPermissions`, but adding 'view' permissions.
<add> """
<ide> perms_map = {
<ide> 'GET': ['%(app_label)s.view_%(model_name)s'],
<ide> 'OPTIONS': ['%(app_label)s.view_%(model_name)s'],
<ide> A complete example using both `DjangoObjectPermissionsFilter` and `DjangoObjectP
<ide> **views.py**:
<ide>
<ide> class EventViewSet(viewsets.ModelViewSet):
<del> """
<del> Viewset that only lists events if user has 'view' permissions, and only
<del> allows operations on individual events if user has appropriate 'view', 'add',
<del> 'change' or 'delete' permissions.
<del> """
<add> """
<add> Viewset that only lists events if user has 'view' permissions, and only
<add> allows operations on individual events if user has appropriate 'view', 'add',
<add> 'change' or 'delete' permissions.
<add> """
<ide> queryset = Event.objects.all()
<ide> serializer_class = EventSerializer
<ide> filter_backends = (filters.DjangoObjectPermissionsFilter,) | 1 |
Javascript | Javascript | implement bound partial semantics | f529ab6b85fc279cfd3eb54c31b5686c96494304 | <ide><path>packages/ember-htmlbars/lib/env.js
<ide> import updateSelf from "ember-htmlbars/hooks/update-self";
<ide> import getRoot from "ember-htmlbars/hooks/get-root";
<ide> import getChild from "ember-htmlbars/hooks/get-child";
<ide> import getValue from "ember-htmlbars/hooks/get-value";
<del>import cleanup from "ember-htmlbars/hooks/cleanup";
<add>import cleanupRenderNode from "ember-htmlbars/hooks/cleanup-render-node";
<add>import destroyRenderNode from "ember-htmlbars/hooks/destroy-render-node";
<ide> import classify from "ember-htmlbars/hooks/classify";
<ide> import component from "ember-htmlbars/hooks/component";
<ide> import lookupHelper from "ember-htmlbars/hooks/lookup-helper";
<ide> merge(emberHooks, {
<ide> getValue: getValue,
<ide> subexpr: subexpr,
<ide> concat: concat,
<del> cleanup: cleanup,
<add> cleanupRenderNode: cleanupRenderNode,
<add> destroyRenderNode: destroyRenderNode,
<ide> classify: classify,
<ide> component: component,
<ide> lookupHelper: lookupHelper,
<ide><path>packages/ember-htmlbars/lib/hooks/cleanup-render-node.js
<add>/**
<add>@module ember
<add>@submodule ember-htmlbars
<add>*/
<add>
<add>export default function cleanupRenderNode(renderNode) {
<add> var state = renderNode.state;
<add> if (!state) { return; }
<add>
<add> if (state.view) {
<add> var view = state.view;
<add> view.destroy();
<add> }
<add>
<add> if (state.toDestroy) {
<add> var toDestroy = state.toDestroy;
<add>
<add> for (var i=0, l=toDestroy.length; i<l; i++) {
<add> toDestroy[i].destroy();
<add> }
<add>
<add> state.toDestroy = [];
<add> }
<add>}
<ide><path>packages/ember-htmlbars/lib/hooks/cleanup.js
<del>/**
<del>@module ember
<del>@submodule ember-htmlbars
<del>*/
<del>
<del>export default function cleanup(renderNode) {
<del> var state = renderNode.state;
<del> if (!state) { return; }
<del>
<del> if (state.view) {
<del> var view = state.view;
<del> view.destroy();
<del> }
<del>
<del> var i, l;
<del>
<del> if (state.toDestroy) {
<del> var toDestroy = state.toDestroy;
<del>
<del> for (i=0, l=toDestroy.length; i<l; i++) {
<del> toDestroy[i].destroy();
<del> }
<del>
<del> state.toDestroy = null;
<del> }
<del>
<del> var unsubscribers = state.unsubscribers;
<del> if (!unsubscribers) { return; }
<del>
<del> for (i=0, l=unsubscribers.length; i<l; i++) {
<del> unsubscribers[i]();
<del> }
<del>}
<ide><path>packages/ember-htmlbars/lib/hooks/destroy-render-node.js
<add>/**
<add>@module ember
<add>@submodule ember-htmlbars
<add>*/
<add>
<add>export default function destroyRenderNode(renderNode) {
<add> var state = renderNode.state;
<add> if (!state) { return; }
<add>
<add> var unsubscribers = state.unsubscribers;
<add> if (!unsubscribers) { return; }
<add>
<add> for (var i=0, l=unsubscribers.length; i<l; i++) {
<add> unsubscribers[i]();
<add> }
<add>}
<ide><path>packages/ember-htmlbars/lib/keywords/partial.js
<ide> import lookupPartial from "ember-views/system/lookup_partial";
<ide> import { internal } from "htmlbars-runtime";
<ide>
<del>export default function partialKeyword(morph, env, scope, params, hash, template, inverse, visitor) {
<del> var found = lookupPartial(env, env.hooks.getValue(params[0])).raw;
<add>export default {
<add> setupState: function(state, env, scope, params, hash) {
<add> state.lastPartialName = state.partialName;
<add> state.partialName = env.hooks.getValue(params[0]);
<add> },
<ide>
<del> internal.hostBlock(morph, env, scope, found, null, null, visitor, function(options) {
<del> options.templates.template.yield();
<del> });
<add> isStable: function(state, env) {
<add> return state.lastPartialName === state.partialName;
<add> },
<ide>
<del> return true;
<del>}
<add> render: function(renderNode, env, scope, params, hash, template, inverse, visitor) {
<add> var state = renderNode.state;
<add> if (!state.partialName) { return true; }
<add> var found = lookupPartial(env, state.partialName);
<add> if (!found) { return true; }
<add>
<add> internal.hostBlock(renderNode, env, scope, found.raw, null, null, visitor, function(options) {
<add> options.templates.template.yield();
<add> });
<add> }
<add>};
<ide><path>packages/ember-htmlbars/tests/compat/make_bound_helper_test.js
<ide> import {
<ide> } from 'ember-runtime/system/string';
<ide>
<ide> import EmberHandlebars from "ember-htmlbars/compat";
<add>import { deprecation as eachDeprecation } from "ember-htmlbars/helpers/each";
<ide>
<ide> var compile, helpers, helper;
<ide> compile = EmberHandlebars.compile;
<ide> QUnit.module("ember-htmlbars: compat - makeBoundHelper", {
<ide> });
<ide>
<ide> QUnit.test("primitives should work correctly [DEPRECATED]", function() {
<del> expectDeprecation('Using the context switching form of {{each}} is deprecated. Please use the keyword form (`{{#each foo in bar}}`) instead.');
<add> expectDeprecation(eachDeprecation);
<ide> expectDeprecation('Using the context switching form of `{{with}}` is deprecated. Please use the keyword form (`{{with foo as bar}}`) instead.');
<ide>
<ide> view = EmberView.create({
<ide><path>packages/ember-views/lib/system/lookup_partial.js
<ide> import Ember from "ember-metal/core"; // Ember.assert
<ide> import EmberError from "ember-metal/error";
<ide>
<ide> export default function lookupPartial(env, templateName) {
<add> if (templateName == null) { return; }
<add>
<ide> var nameParts = templateName.split("/");
<ide> var lastPart = nameParts[nameParts.length - 1];
<ide> | 7 |
Javascript | Javascript | remove support for inline anonymous templates | bb149dcbb7df91866fce10e6dbec78c3e439d0ee | <ide><path>packages/ember-handlebars/lib/loader.js
<ide> Ember.Handlebars.bootstrap = function(ctx) {
<ide> // Get the name of the script, used by Ember.View's templateName property.
<ide> // First look for data-template-name attribute, then fall back to its
<ide> // id if no name is found.
<del> templateName = script.attr('data-template-name') || script.attr('id'),
<del> template = compile(script.html()),
<del> view, viewPath, elementId, options;
<add> templateName = script.attr('data-template-name') || script.attr('id') || 'application',
<add> template = compile(script.html());
<ide>
<del> if (templateName) {
<del> // For templates which have a name, we save them and then remove them from the DOM
<del> Ember.TEMPLATES[templateName] = template;
<add> // For templates which have a name, we save them and then remove them from the DOM
<add> Ember.TEMPLATES[templateName] = template;
<ide>
<del> // Remove script tag from DOM
<del> script.remove();
<del> } else {
<del> if (script.parents('head').length !== 0) {
<del> // don't allow inline templates in the head
<del> throw new Ember.Error("Template found in <head> without a name specified. " +
<del> "Please provide a data-template-name attribute.\n" +
<del> script.html());
<del> }
<del>
<del> // For templates which will be evaluated inline in the HTML document, instantiates a new
<del> // view, and replaces the script tag holding the template with the new
<del> // view's DOM representation.
<del> //
<del> // Users can optionally specify a custom view subclass to use by setting the
<del> // data-view attribute of the script tag.
<del> viewPath = script.attr('data-view');
<del> view = viewPath ? Ember.get(viewPath) : Ember.View;
<del>
<del> // Get the id of the script, used by Ember.View's elementId property,
<del> // Look for data-element-id attribute.
<del> elementId = script.attr('data-element-id');
<del>
<del> options = { template: template };
<del> if (elementId) { options.elementId = elementId; }
<del>
<del> view = view.create(options);
<del>
<del> view._insertElementLater(function() {
<del> script.replaceWith(this.$());
<del>
<del> // Avoid memory leak in IE
<del> script = null;
<del> });
<del> }
<add> // Remove script tag from DOM
<add> script.remove();
<ide> });
<ide> };
<ide>
<ide> function bootstrap() {
<ide> from the DOM after processing.
<ide> */
<ide>
<del>Ember.$(document).ready(bootstrap);
<ide> Ember.onLoad('application', bootstrap);
<ide><path>packages/ember-handlebars/tests/loader_test.js
<ide> module("test Ember.Handlebars.bootstrap", {
<ide> }
<ide> });
<ide>
<del>test('template with data-template-name should add a new template to Ember.TEMPLATES', function() {
<del> Ember.$('#qunit-fixture').html('<script type="text/x-handlebars" data-template-name="funkyTemplate" >{{Tobias.firstName}} {{Tobias.lastName}}</script>');
<del>
<add>function checkTemplate(templateName) {
<ide> Ember.run(function() {
<ide> Ember.Handlebars.bootstrap(Ember.$('#qunit-fixture'));
<del> Tobias = Ember.Object.create({
<del> firstName: 'Tobias',
<del> lastName: 'Fünke'
<del> });
<ide> });
<del>
<del> ok(Ember.TEMPLATES['funkyTemplate'], 'template with name funkyTemplate available');
<del> equal(Ember.$('#qunit-fixture').text(), '', 'no template content is added');
<del>});
<del>
<del>test('template with id instead of data-template-name should add a new template to Ember.TEMPLATES', function() {
<del> Ember.$('#qunit-fixture').html('<script type="text/x-handlebars" id="funkyTemplate" >{{Tobias.firstName}} takes {{Tobias.drug}}</script>');
<del>
<del> Ember.run(function() {
<del> Ember.Handlebars.bootstrap(Ember.$('#qunit-fixture'));
<del> Tobias = Ember.Object.create({
<add> var template = Ember.TEMPLATES[templateName];
<add> ok(template, 'template is available on Ember.TEMPLATES');
<add> equal(Ember.$('#qunit-fixture script').length, 0, 'script removed');
<add> var view = Ember.View.create({
<add> template: template,
<add> context: {
<ide> firstName: 'Tobias',
<ide> drug: 'teamocil'
<del> });
<add> }
<ide> });
<del>
<del> ok(Ember.TEMPLATES['funkyTemplate'], 'template with name funkyTemplate available');
<del> equal(Ember.$('#qunit-fixture').text(), '', 'no template content is added');
<del>});
<del>
<del>test('inline template should be added', function() {
<del> Ember.$('#qunit-fixture').html('<script type="text/x-handlebars" >{{Tobias.firstName}} {{Tobias.lastName}}</script>');
<del>
<ide> Ember.run(function() {
<del> Ember.Handlebars.bootstrap(Ember.$('#qunit-fixture'));
<del> Tobias = Ember.Object.create({
<del> firstName: 'Tobias',
<del> lastName: 'Fünke'
<del> });
<add> view.createElement();
<ide> });
<del>
<del> equal(Ember.$('#qunit-fixture').text(), 'Tobias Fünke', 'template is rendered');
<del>});
<del>
<del>test('template with data-element-id should add an id attribute to the view', function() {
<del> Ember.$('#qunit-fixture').html('<script type="text/x-handlebars" data-element-id="application">Hello World !</script>');
<del>
<add> equal(view.$().text(), 'Tobias takes teamocil', 'template works');
<ide> Ember.run(function() {
<del> Ember.Handlebars.bootstrap(Ember.$('#qunit-fixture'));
<add> view.destroy();
<ide> });
<add>}
<ide>
<del> equal(Ember.$('#qunit-fixture #application').text(), 'Hello World !', 'view exists with id');
<add>test('template with data-template-name should add a new template to Ember.TEMPLATES', function() {
<add> Ember.$('#qunit-fixture').html('<script type="text/x-handlebars" data-template-name="funkyTemplate">{{firstName}} takes {{drug}}</script>');
<add>
<add> checkTemplate('funkyTemplate');
<ide> });
<ide>
<del>test('template without data-element-id should still get an attribute', function() {
<del> Ember.$('#qunit-fixture').html('<script type="text/x-handlebars">Hello World!</script>');
<add>test('template with id instead of data-template-name should add a new template to Ember.TEMPLATES', function() {
<add> Ember.$('#qunit-fixture').html('<script type="text/x-handlebars" id="funkyTemplate" >{{firstName}} takes {{drug}}</script>');
<ide>
<del> Ember.run(function() {
<del> Ember.Handlebars.bootstrap(Ember.$('#qunit-fixture'));
<del> });
<add> checkTemplate('funkyTemplate');
<add>});
<add>
<add>test('template without data-template-name or id should default to application', function() {
<add> Ember.$('#qunit-fixture').html('<script type="text/x-handlebars">{{firstName}} takes {{drug}}</script>');
<ide>
<del> var id = Ember.$('#qunit-fixture .ember-view').attr('id');
<del> ok(id && /^ember\d+$/.test(id), "has standard Ember id");
<add> checkTemplate('application');
<ide> });
<ide>
<ide> test('template with type text/x-raw-handlebars should be parsed', function() { | 2 |
Ruby | Ruby | fix removal of old mirrors | 3ebd7df62d0b05a8a010bec74793cc4e688a2fc8 | <ide><path>Library/Homebrew/dev-cmd/bump-formula-pr.rb
<ide> def bump_formula_pr
<ide> replacement_pairs << [/^ revision \d+\n(\n( head "))?/m, "\\2"]
<ide> end
<ide>
<del> replacement_pairs << [/(^ mirror .*\n)?/, ""] if requested_spec == :stable
<add> replacement_pairs += formula_spec.mirrors.map do |mirror|
<add> [/ +mirror \"#{mirror}\"\n/m, ""]
<add> end
<ide>
<ide> replacement_pairs += if new_url_hash
<ide> [ | 1 |
Python | Python | use dbapihook as base for jdbchook | 7ea51e619e365e6193937790c24d150f6f811e17 | <ide><path>airflow/hooks/jdbc_hook.py
<ide> import logging
<ide> import jaydebeapi
<ide>
<del>from airflow.hooks.base_hook import BaseHook
<add>from airflow.hooks.dbapi_hook import DbApiHook
<ide>
<del>class JdbcHook(BaseHook):
<add>class JdbcHook(DbApiHook):
<ide> """
<ide> General hook for jdbc db access.
<ide>
<ide> If a connection id is specified, host, port, schema, username and password will be taken from the predefined connection.
<ide> Raises an airflow error if the given connection id doesn't exist.
<ide> Otherwise host, port, schema, username and password can be specified on the fly.
<ide>
<add>
<add>
<ide> :param jdbc_url: jdbc connection url
<ide> :type jdbc_url: string
<ide> :param jdbc_driver_name: jdbc driver name
<ide> class JdbcHook(BaseHook):
<ide> :type sql: string or string pointing to a template file. File must have
<ide> a '.sql' extensions.
<ide> """
<add>
<add>
<add> conn_name_attr = 'jdbc_conn_id'
<add> default_conn_name = 'jdbc_default'
<add> supports_autocommit = True
<add>
<ide> def __init__(
<del> self, jdbc_driver_name = None, jdbc_driver_loc = None,host=None, login=None,
<del> psw=None, db=None, port=None, extra=None, conn_id=None):
<del>
<del> if (conn_id is None):
<del> self.host = host
<del> self.login = login
<del> self.psw = psw
<del> #self.db = db
<del> #self.port = port
<del> self.extra = extra
<del> self.jdbc_driver_loc = jdbc_driver_loc
<del> self.jdbc_driver_name = jdbc_driver_name
<del> else:
<del> conn = self.get_connection(conn_id)
<del> self.host = conn.host
<del> self.login = conn.login
<del> self.psw = conn.password
<del> #self.db = conn.schema
<del> #self.port = conn.port
<del> self.extra = conn.extra
<del> self.jdbc_driver_loc = conn.extra_dejson.get('jdbc_drv_path')
<del> self.jdbc_driver_name = conn.extra_dejson.get('jdbc_drv_clsname')
<add> self, *args, **kwargs):
<add>
<add> super(JdbcHook,self).__init__(*args,**kwargs)
<add>
<add> #conn_id = getattr(self, self.conn_name_attr)
<add> #if (conn_id is None):
<add> # self.host = host
<add> # self.login = login
<add> # self.psw = psw
<add> # #self.db = db
<add> # #self.port = port
<add> # self.extra = extra
<add> # self.jdbc_driver_loc = jdbc_driver_loc
<add> # self.jdbc_driver_name = jdbc_driver_name
<add> #else:
<add> conn = self.get_connection(getattr(self, self.conn_name_attr))
<add> self.host = conn.host
<add> self.login = conn.login
<add> self.psw = conn.password
<add> #self.db = conn.schema
<add> #self.port = conn.port
<add> self.extra = conn.extra
<add> self.jdbc_driver_loc = conn.extra_dejson.get('jdbc_drv_path')
<add> self.jdbc_driver_name = conn.extra_dejson.get('jdbc_drv_clsname')
<ide>
<ide>
<ide> #self.jdbc_url = jdbc_url.format(self.host, self.port, self.db, self.extra)
<ide> def get_conn(self):
<ide> self.jdbc_driver_loc,)
<ide> return conn
<ide>
<del> def get_records(self, sql, autocommit=False):
<del> '''
<del> Executes the sql and returns a set of records.
<del> '''
<del> conn = self.get_conn()
<del> conn.jconn.autocommit = autocommit
<del> cur = conn.cursor()
<del> cur.execute(sql)
<del> rows = [] if not cur._rs else cur.fetchall()
<del> cur.close()
<del> conn.close()
<del> return rows
<del>
<del> def get_pandas_df(self, sql):
<del> '''
<del> Executes the sql and returns a pandas dataframe
<del> '''
<del> import pandas.io.sql as psql
<del> conn = self.get_conn()
<del> df = psql.read_sql(sql, con=conn)
<del> conn.close()
<del> return df
<del>
<del> def run(self, sql, autocommit=False):
<add> def run(self, sql, autocommit=False, parameters=None):
<add> """
<add> Runs a command
<add> """
<ide> conn = self.get_conn()
<del> conn.jconn.autocommit = autocommit
<add> if self.supports_autocommit:
<add> conn.jconn.autocommit = autocommit
<ide> cur = conn.cursor()
<ide> cur.execute(sql)
<ide> conn.commit()
<ide><path>airflow/models.py
<ide> def get_hook(self):
<ide> elif self.conn_type == 'sqlite':
<ide> return hooks.SqliteHook(sqlite_conn_id=self.conn_id)
<ide> elif self.conn_type == 'jdbc':
<del> return hooks.JdbcHook(conn_id=self.conn_id)
<add> return hooks.JdbcHook(jdbc_conn_id=self.conn_id)
<ide> except:
<ide> return None
<ide>
<ide><path>airflow/operators/jdbc_operator.py
<ide> class JdbcOperator(BaseOperator):
<ide>
<ide> @apply_defaults
<ide> def __init__(
<del> self, sql, jdbc_driver_name=None, jdbc_driver_loc=None,
<del> conn_id='jdbc_default', autocommit=False,
<add> self, sql,
<add> jdbc_conn_id='jdbc_default', autocommit=False,
<ide> *args, **kwargs):
<ide> super(JdbcOperator, self).__init__(*args, **kwargs)
<ide>
<del> self.jdbc_driver_name=jdbc_driver_name
<del> self.jdbc_driver_loc=jdbc_driver_loc
<ide> self.sql = sql
<del> self.conn_id = conn_id
<add> self.jdbc_conn_id = jdbc_conn_id
<ide> self.autocommit = autocommit
<ide>
<ide> def execute(self, context):
<ide> logging.info('Executing: ' + self.sql)
<del> self.hook = JdbcHook(conn_id=self.conn_id,jdbc_driver_loc=self.jdbc_driver_loc, jdbc_driver_name=self.jdbc_driver_name)
<add> self.hook = JdbcHook(jdbc_conn_id=self.jdbc_conn_id)
<ide> for row in self.hook.get_records(self.sql, self.autocommit):
<ide> logging.info('Result: ' + ','.join(map(str,row)) )
<ide>\ No newline at end of file | 3 |
Python | Python | achieve compatibility with tensorflow 1.0rc1 | 1de4bf1b5989f76e377f0b8022b41773a354ba99 | <ide><path>keras/backend/tensorflow_backend.py
<ide> # Change its value via `manual_variable_initialization(value)`.
<ide> _MANUAL_VAR_INIT = False
<ide>
<add># These two integers contain the tensorflow version for coping with API breaks.
<add>tf_major_version = int(tf.__version__.split('.')[0])
<add>tf_minor_version = int(tf.__version__.split('.')[1])
<add>
<ide>
<ide> def clear_session():
<ide> """Destroys the current TF graph and creates a new one.
<ide> def variable(value, dtype=None, name=None):
<ide> sparse_coo = value.tocoo()
<ide> indices = np.concatenate((np.expand_dims(sparse_coo.row, 1),
<ide> np.expand_dims(sparse_coo.col, 1)), 1)
<del> v = tf.SparseTensor(indices=indices,
<del> values=sparse_coo.data,
<del> shape=sparse_coo.shape)
<add> if tf_major_version >= 1:
<add> v = tf.SparseTensor(indices=indices,
<add> values=sparse_coo.data,
<add> dense_shape=sparse_coo.shape)
<add> else:
<add> v = tf.SparseTensor(indices=indices,
<add> values=sparse_coo.data,
<add> shape=sparse_coo.shape)
<ide> v._dims = len(sparse_coo.shape)
<ide> v._keras_shape = sparse_coo.shape
<ide> v._uses_learning_phase = False
<ide> def concatenate(tensors, axis=-1):
<ide> if py_all([is_sparse(x) for x in tensors]):
<ide> return tf.sparse_concat(axis, tensors)
<ide> else:
<del> try:
<del> return tf.concat_v2([to_dense(x) for x in tensors], axis)
<del> except AttributeError:
<del> return tf.concat(axis, [to_dense(x) for x in tensors])
<add> if tf_major_version >= 1:
<add> return tf.concat([to_dense(x) for x in tensors], axis)
<add> else:
<add> try:
<add> return tf.concat_v2([to_dense(x) for x in tensors], axis)
<add> except AttributeError:
<add> return tf.concat(axis, [to_dense(x) for x in tensors])
<ide>
<ide>
<ide> def reshape(x, shape):
<ide> def ctc_decode(y_pred, input_length, greedy=True, beam_width=100,
<ide> sequence_length=input_length, beam_width=beam_width,
<ide> top_paths=top_paths)
<ide>
<del> decoded_dense = [tf.sparse_to_dense(st.indices, st.shape, st.values, default_value=-1)
<del> for st in decoded]
<add> if tf_major_version >= 1:
<add> decoded_dense = [tf.sparse_to_dense(st.indices, st.dense_shape, st.values, default_value=-1)
<add> for st in decoded]
<add> else:
<add> decoded_dense = [tf.sparse_to_dense(st.indices, st.shape, st.values, default_value=-1)
<add> for st in decoded]
<ide>
<ide> return (decoded_dense, log_prob)
<ide> | 1 |
Java | Java | reimplement "skiplast" operator | fe9d383bcb989045316d4c2add4a5196cf3c55d4 | <ide><path>rxjava-core/src/main/java/rx/Observable.java
<ide> public final Observable<T> skip(long time, TimeUnit unit, Scheduler scheduler) {
<ide> * @see <a href="http://msdn.microsoft.com/en-us/library/hh211750.aspx">MSDN: Observable.SkipLast</a>
<ide> */
<ide> public final Observable<T> skipLast(int count) {
<del> return create(OperationSkipLast.skipLast(this, count));
<add> return lift(new OperationSkipLast<T>(count));
<ide> }
<ide>
<ide> /**
<ide><path>rxjava-core/src/main/java/rx/operators/OperationSkipLast.java
<ide> /**
<ide> * Copyright 2014 Netflix, Inc.
<del> *
<add> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> * You may obtain a copy of the License at
<del> *
<add> *
<ide> * http://www.apache.org/licenses/LICENSE-2.0
<del> *
<add> *
<ide> * Unless required by applicable law or agreed to in writing, software
<ide> * distributed under the License is distributed on an "AS IS" BASIS,
<ide> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<ide> import java.util.LinkedList;
<ide> import java.util.List;
<ide> import java.util.concurrent.TimeUnit;
<del>import java.util.concurrent.locks.ReentrantLock;
<ide>
<ide> import rx.Observable;
<ide> import rx.Observable.OnSubscribeFunc;
<add>import rx.Observable.Operator;
<ide> import rx.Observer;
<ide> import rx.Scheduler;
<ide> import rx.Subscriber;
<ide> /**
<ide> * Bypasses a specified number of elements at the end of an observable sequence.
<ide> */
<del>public class OperationSkipLast {
<add>public class OperationSkipLast<T> implements Operator<T, T> {
<ide>
<del> /**
<del> * Bypasses a specified number of elements at the end of an observable
<del> * sequence.
<del> * <p>
<del> * This operator accumulates a queue with a length enough to store the first
<del> * count elements. As more elements are received, elements are taken from
<del> * the front of the queue and produced on the result sequence. This causes
<del> * elements to be delayed.
<del> *
<del> * @param source
<del> * the source sequence.
<del> * @param count
<del> * number of elements to bypass at the end of the source
<del> * sequence.
<del> * @return An observable sequence containing the source sequence elements
<del> * except for the bypassed ones at the end.
<del> *
<del> * @throws IndexOutOfBoundsException
<del> * count is less than zero.
<del> */
<del> public static <T> OnSubscribeFunc<T> skipLast(
<del> Observable<? extends T> source, int count) {
<del> return new SkipLast<T>(source, count);
<del> }
<add> private final int count;
<ide>
<del> private static class SkipLast<T> implements OnSubscribeFunc<T> {
<del> private final int count;
<del> private final Observable<? extends T> source;
<del>
<del> private SkipLast(Observable<? extends T> source, int count) {
<del> this.count = count;
<del> this.source = source;
<add> public OperationSkipLast(int count) {
<add> if (count < 0) {
<add> throw new IndexOutOfBoundsException("count could not be negative");
<ide> }
<add> this.count = count;
<add> }
<ide>
<del> public Subscription onSubscribe(final Observer<? super T> observer) {
<del> if (count < 0) {
<del> throw new IndexOutOfBoundsException(
<del> "count could not be negative");
<del> }
<del> final SafeObservableSubscription subscription = new SafeObservableSubscription();
<del> return subscription.wrap(source.unsafeSubscribe(new Subscriber<T>() {
<del>
<del> private final ReentrantLock lock = new ReentrantLock();
<add> @Override
<add> public Subscriber<? super T> call(final Subscriber<? super T> subscriber) {
<add> return new Subscriber<T>(subscriber) {
<add> /**
<add> * Store the last count elements until now.
<add> */
<add> private final Deque<T> deque = new LinkedList<T>();
<ide>
<del> /**
<del> * Store the last count elements until now.
<del> */
<del> private final Deque<T> deque = new LinkedList<T>();
<add> @Override
<add> public void onCompleted() {
<add> subscriber.onCompleted();
<add> }
<ide>
<del> @Override
<del> public void onCompleted() {
<del> observer.onCompleted();
<del> }
<add> @Override
<add> public void onError(Throwable e) {
<add> subscriber.onError(e);
<add> }
<ide>
<del> @Override
<del> public void onError(Throwable e) {
<del> observer.onError(e);
<add> @Override
<add> public void onNext(T value) {
<add> if (count == 0) {
<add> // If count == 0, we do not need to put value into deque
<add> // and remove it at once. We can emit the value
<add> // directly.
<add> subscriber.onNext(value);
<add> return;
<ide> }
<del>
<del> @Override
<del> public void onNext(T value) {
<del> if (count == 0) {
<del> // If count == 0, we do not need to put value into deque
<del> // and remove it at once. We can emit the value
<del> // directly.
<del> try {
<del> observer.onNext(value);
<del> } catch (Throwable ex) {
<del> observer.onError(ex);
<del> subscription.unsubscribe();
<del> }
<del> return;
<del> }
<del> lock.lock();
<del> try {
<del> deque.offerLast(value);
<del> if (deque.size() > count) {
<del> // Now deque has count + 1 elements, so the first
<del> // element in the deque definitely does not belong
<del> // to the last count elements of the source
<del> // sequence. We can emit it now.
<del> observer.onNext(deque.removeFirst());
<del> }
<del> } catch (Throwable ex) {
<del> observer.onError(ex);
<del> subscription.unsubscribe();
<del> } finally {
<del> lock.unlock();
<del> }
<add> deque.offerLast(value);
<add> if (deque.size() > count) {
<add> // Now deque has count + 1 elements, so the first
<add> // element in the deque definitely does not belong
<add> // to the last count elements of the source
<add> // sequence. We can emit it now.
<add> subscriber.onNext(deque.removeFirst());
<ide> }
<add> }
<ide>
<del> }));
<del> }
<add> };
<ide> }
<ide>
<ide> /**
<ide> * Skip delivering values in the time window before the values.
<del> *
<add> *
<ide> * @param <T>
<ide> * the result value type
<ide> */
<ide><path>rxjava-core/src/test/java/rx/operators/OperationSkipLastTest.java
<ide> import static org.mockito.Mockito.never;
<ide> import static org.mockito.Mockito.times;
<ide> import static org.mockito.Mockito.verify;
<del>import static rx.operators.OperationSkipLast.skipLast;
<ide>
<add>import java.util.Arrays;
<ide> import java.util.concurrent.TimeUnit;
<ide>
<ide> import org.junit.Test;
<ide> public class OperationSkipLastTest {
<ide>
<ide> @Test
<ide> public void testSkipLastEmpty() {
<del> Observable<String> w = Observable.empty();
<del> Observable<String> observable = Observable.create(skipLast(w, 2));
<add> Observable<String> observable = Observable.<String>empty().skipLast(2);
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> Observer<String> observer = mock(Observer.class);
<ide> public void testSkipLastEmpty() {
<ide>
<ide> @Test
<ide> public void testSkipLast1() {
<del> Observable<String> w = Observable.from("one", "two", "three");
<del> Observable<String> observable = Observable.create(skipLast(w, 2));
<add> Observable<String> observable = Observable.from(Arrays.asList("one", "two", "three")).skipLast(2);
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> Observer<String> observer = mock(Observer.class);
<ide> public void testSkipLast1() {
<ide>
<ide> @Test
<ide> public void testSkipLast2() {
<del> Observable<String> w = Observable.from("one", "two");
<del> Observable<String> observable = Observable.create(skipLast(w, 2));
<add> Observable<String> observable = Observable.from(Arrays.asList("one", "two")).skipLast(2);
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> Observer<String> observer = mock(Observer.class);
<ide> public void testSkipLast2() {
<ide> @Test
<ide> public void testSkipLastWithZeroCount() {
<ide> Observable<String> w = Observable.from("one", "two");
<del> Observable<String> observable = Observable.create(skipLast(w, 0));
<add> Observable<String> observable = w.skipLast(0);
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> Observer<String> observer = mock(Observer.class);
<ide> public void testSkipLastWithZeroCount() {
<ide>
<ide> @Test
<ide> public void testSkipLastWithNull() {
<del> Observable<String> w = Observable.from("one", null, "two");
<del> Observable<String> observable = Observable.create(skipLast(w, 1));
<add> Observable<String> observable = Observable.from(Arrays.asList("one", null, "two")).skipLast(1);
<ide>
<ide> @SuppressWarnings("unchecked")
<ide> Observer<String> observer = mock(Observer.class);
<ide> public void testSkipLastWithNull() {
<ide> verify(observer, times(1)).onCompleted();
<ide> }
<ide>
<del> @Test
<add> @Test(expected = IndexOutOfBoundsException.class)
<ide> public void testSkipLastWithNegativeCount() {
<del> Observable<String> w = Observable.from("one");
<del> Observable<String> observable = Observable.create(skipLast(w, -1));
<del>
<del> @SuppressWarnings("unchecked")
<del> Observer<String> observer = mock(Observer.class);
<del> observable.subscribe(observer);
<del> verify(observer, never()).onNext(any(String.class));
<del> verify(observer, times(1)).onError(
<del> any(IndexOutOfBoundsException.class));
<del> verify(observer, never()).onCompleted();
<add> Observable.from("one").skipLast(-1);
<ide> }
<ide>
<ide> @Test | 3 |
Text | Text | add missing documentation to cli/import.md | 3101941120db54dde89933d76d3ea23c4bde0f7c | <ide><path>docs/reference/commandline/import.md
<ide> weight=1
<ide>
<ide> # import
<ide>
<del> Usage: docker import URL|- [REPOSITORY[:TAG]]
<add> Usage: docker import file|URL|- [REPOSITORY[:TAG]]
<ide>
<ide> Create an empty filesystem image and import the contents of the
<ide> tarball (.tar, .tar.gz, .tgz, .bzip, .tar.xz, .txz) into it, then
<ide> optionally tag it.
<ide>
<ide> -c, --change=[] Apply specified Dockerfile instructions while importing the image
<ide>
<del>URLs must start with `http` and point to a single file archive (.tar,
<del>.tar.gz, .tgz, .bzip, .tar.xz, or .txz) containing a root filesystem. If
<del>you would like to import from a local directory or archive, you can use
<del>the `-` parameter to take the data from `STDIN`.
<add>You can specify a `URL` or `-` (dash) to take data directly from `STDIN`. The
<add>`URL` can point to an archive (.tar, .tar.gz, .tgz, .bzip, .tar.xz, or .txz)
<add>containing a fileystem or to an individual file on the Docker host. If you
<add>specify an archive, Docker untars it in the container relative to the `/`
<add>(root). If you specify an individual file, you must specify the full path within
<add>the host. To import from a remote location, specify a `URI` that begins with the
<add>`http://` or `https://` protocol.
<ide>
<ide> The `--change` option will apply `Dockerfile` instructions to the image
<ide> that is created.
<ide> Import to docker via pipe and `STDIN`.
<ide>
<ide> $ cat exampleimage.tgz | docker import - exampleimagelocal:new
<ide>
<add>Import to docker from a local archive.
<add>
<add> $ docker import /path/to/exampleimage.tgz
<add>
<ide> **Import from a local directory:**
<ide>
<ide> $ sudo tar -c . | docker import - exampleimagedir | 1 |
Go | Go | remove extraneous mount aliases | 39a3700c01142713e88ff954e86b5dc38174511b | <ide><path>api/client/service/opts.go
<ide> func (m *MountOpt) Set(value string) error {
<ide> key := strings.ToLower(parts[0])
<ide>
<ide> if len(parts) == 1 {
<del> if key == "readonly" || key == "ro" {
<add> switch key {
<add> case "readonly", "ro":
<ide> mount.ReadOnly = true
<ide> continue
<del> }
<del>
<del> if key == "volume-nocopy" {
<add> case "volume-nocopy":
<ide> volumeOptions().NoCopy = true
<ide> continue
<ide> }
<ide> func (m *MountOpt) Set(value string) error {
<ide> switch key {
<ide> case "type":
<ide> mount.Type = swarm.MountType(strings.ToLower(value))
<del> case "source", "name", "src":
<add> case "source", "src":
<ide> mount.Source = value
<del> case "target", "dst", "dest", "destination", "path":
<add> case "target", "dst", "destination":
<ide> mount.Target = value
<ide> case "readonly", "ro":
<del> ro, err := strconv.ParseBool(value)
<add> mount.ReadOnly, err = strconv.ParseBool(value)
<ide> if err != nil {
<del> return fmt.Errorf("invalid value for readonly: %s", value)
<add> return fmt.Errorf("invalid value for %s: %s", key, value)
<ide> }
<del> mount.ReadOnly = ro
<ide> case "bind-propagation":
<ide> bindOptions().Propagation = swarm.MountPropagation(strings.ToLower(value))
<ide> case "volume-nocopy":
<ide><path>api/client/service/opts_test.go
<ide> func TestMountOptSetNoError(t *testing.T) {
<ide> // tests several aliases that should have same result.
<ide> "type=bind,target=/target,source=/source",
<ide> "type=bind,src=/source,dst=/target",
<del> "type=bind,name=/source,dst=/target",
<del> "type=bind,name=/source,path=/target",
<add> "type=bind,source=/source,dst=/target",
<add> "type=bind,src=/source,target=/target",
<ide> } {
<ide> var mount MountOpt
<ide> | 2 |
PHP | PHP | apply fixes from styleci | 7133f119d28e2767cb8ae5ea5cc74cc166c892cf | <ide><path>src/Illuminate/Validation/ValidationException.php
<ide> namespace Illuminate\Validation;
<ide>
<ide> use Exception;
<del>use Illuminate\Support\Facades\Validator as ValidatorFacade;
<ide> use Illuminate\Support\Arr;
<add>use Illuminate\Support\Facades\Validator as ValidatorFacade;
<ide>
<ide> class ValidationException extends Exception
<ide> { | 1 |
Ruby | Ruby | modernise the grc formula somewhat | ceef7729e0e3913aff3babb02a7f760de1257a76 | <ide><path>Library/Homebrew/formula.rb
<ide> def info; prefix+'share'+'info' end
<ide> def include; prefix+'include' end
<ide> def share; prefix+'share' end
<ide>
<del> # generally we don't want these to be inside the keg
<add> # generally we don't want var stuff inside the keg
<ide> def var; HOMEBREW_PREFIX+'var' end
<add> # configuration needs to be preserved past upgrades
<ide> def etc; HOMEBREW_PREFIX+'etc' end
<del>
<add>
<ide> # reimplement if we don't autodetect the download strategy you require
<ide> def download_strategy
<ide> case url | 1 |
Python | Python | fix scipy intersphinx link | 0b64e2a91483b80b86ab8a44534e09c015fdea88 | <ide><path>doc/source/conf.py
<ide> def setup(app):
<ide> intersphinx_mapping = {
<ide> 'neps': ('https://numpy.org/neps', None),
<ide> 'python': ('https://docs.python.org/3', None),
<del> 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None),
<add> 'scipy': ('https://docs.scipy.org/doc/scipy', None),
<ide> 'matplotlib': ('https://matplotlib.org/stable', None),
<ide> 'imageio': ('https://imageio.readthedocs.io/en/stable', None),
<ide> 'skimage': ('https://scikit-image.org/docs/stable', None), | 1 |
Go | Go | forbid login of a null-string username | 755df347fbaf6a02e6bdc2d5b7339aade2dbade0 | <ide><path>api/client/login.go
<ide> func (cli *DockerCli) configureAuth(flUser, flPassword, flEmail, serverAddress s
<ide> if !ok {
<ide> authconfig = types.AuthConfig{}
<ide> }
<add> authconfig.Username = strings.TrimSpace(authconfig.Username)
<ide>
<del> if flUser == "" {
<add> if flUser = strings.TrimSpace(flUser); flUser == "" {
<ide> cli.promptWithDefault("Username", authconfig.Username)
<ide> flUser = readInput(cli.in, cli.out)
<ide> flUser = strings.TrimSpace(flUser)
<ide> func (cli *DockerCli) configureAuth(flUser, flPassword, flEmail, serverAddress s
<ide> }
<ide> }
<ide>
<add> if flUser == "" {
<add> return authconfig, fmt.Errorf("Error: Non-null Username Required")
<add> }
<add>
<ide> if flPassword == "" {
<ide> oldState, err := term.SaveState(cli.inFd)
<ide> if err != nil {
<ide> func (cli *DockerCli) configureAuth(flUser, flPassword, flEmail, serverAddress s
<ide>
<ide> term.RestoreTerminal(cli.inFd, oldState)
<ide> if flPassword == "" {
<del> return authconfig, fmt.Errorf("Error : Password Required")
<add> return authconfig, fmt.Errorf("Error: Password Required")
<ide> }
<ide> }
<ide> | 1 |
Text | Text | update missed title | d542b9a9b90be9d4712d54f25735183a54a47324 | <ide><path>curriculum/challenges/english/07-scientific-computing-with-python/lectures-python-for-everybody/python-dictionaries.english.md
<ide> ---
<ide> id: 5e7b9f090b6c005b0e76f067
<del>title: Dictionaries A
<add>title: Python Dictionaries
<ide> challengeType: 11
<ide> isRequired: true
<ide> videoId: dnzvfimrRMg | 1 |
Python | Python | remove debugging code | 2ad48898682e6c8f043e5c2317ccd30a3094eeb4 | <ide><path>test.py
<ide> def main(args):
<ide> masterMode = (args[0] == '-m')
<ide> manifestFile = args[0] if not masterMode else manifestFile
<ide>
<del>
<del>
<del> masterMode = True
<del>
<del>
<del>
<ide> setUp(manifestFile, masterMode)
<ide>
<ide> server = HTTPServer(('127.0.0.1', 8080), PDFTestHandler) | 1 |
Javascript | Javascript | add tests for root-scope-used activation hook | e0041c3c24988574b3ca69b5b862af35c91887e5 | <ide><path>spec/workspace-spec.js
<ide> describe('Workspace', () => {
<ide> })
<ide> })
<ide>
<add> describe('the root-scope-used hook', () => {
<add> it('fires when opening a file or changing the grammar of an open file', async () => {
<add> await atom.packages.activatePackage('language-javascript')
<add> await atom.packages.activatePackage('language-coffee-script')
<add>
<add> const observeTextEditorsSpy = jasmine.createSpy('observeTextEditors')
<add> const javascriptGrammarUsed = jasmine.createSpy('javascript')
<add> const coffeeScriptGrammarUsed = jasmine.createSpy('coffeescript')
<add>
<add> atom.packages.triggerDeferredActivationHooks()
<add> atom.packages.onDidTriggerActivationHook('source.js:root-scope-used', () => {
<add> atom.workspace.observeTextEditors(observeTextEditorsSpy)
<add> javascriptGrammarUsed()
<add> })
<add> atom.packages.onDidTriggerActivationHook('source.coffee:root-scope-used', coffeeScriptGrammarUsed)
<add>
<add> expect(javascriptGrammarUsed).not.toHaveBeenCalled()
<add> expect(observeTextEditorsSpy).not.toHaveBeenCalled()
<add> const editor = await atom.workspace.open('sample.js', {autoIndent: false})
<add> expect(javascriptGrammarUsed).toHaveBeenCalled()
<add> expect(observeTextEditorsSpy.callCount).toBe(1)
<add>
<add> expect(coffeeScriptGrammarUsed).not.toHaveBeenCalled()
<add> atom.grammars.assignLanguageMode(editor, 'source.coffee')
<add> expect(coffeeScriptGrammarUsed).toHaveBeenCalled()
<add> })
<add> })
<add>
<ide> describe('::reopenItem()', () => {
<ide> it("opens the uri associated with the last closed pane that isn't currently open", () => {
<ide> const pane = workspace.getActivePane() | 1 |
Ruby | Ruby | fix syntaxerror in the api doc | ec0050edf357dec43ac41d167f23242aa064b5ff | <ide><path>actionpack/lib/action_controller/metal/redirecting.rb
<ide> def redirect_back(fallback_location:, allow_other_host: true, **args)
<ide> # subject to browser security settings and user preferences. If the request
<ide> # is missing this header, the <tt>fallback_location</tt> will be used.
<ide> #
<del> # redirect_back_or_to { action: "show", id: 5 }
<add> # redirect_back_or_to({ action: "show", id: 5 })
<ide> # redirect_back_or_to @post
<ide> # redirect_back_or_to "http://www.rubyonrails.org"
<ide> # redirect_back_or_to "/images/screenshot.jpg" | 1 |
PHP | PHP | add subselect to query builder | 2a72b471d59cad8701a729fd99cf4b972005e8cf | <ide><path>src/Illuminate/Database/Query/Builder.php
<ide> public function select($columns = array('*'))
<ide> * Add a new "raw" select expression to the query.
<ide> *
<ide> * @param string $expression
<add> * @param array $bindings
<add> * @return \Illuminate\Database\Query\Builder|static
<add> */
<add> public function selectRaw($expression, array $bindings = array())
<add> {
<add> $this->addSelect(new Expression($expression));
<add>
<add> if ($bindings)
<add> {
<add> $this->addBinding($bindings, 'select');
<add> }
<add>
<add> return $this;
<add> }
<add>
<add> /**
<add> * Add a subselect expression to the query.
<add> *
<add> * @param \Closure|\Illuminate\Database\Query\Builder|string $query
<add> * @param string $as
<ide> * @return \Illuminate\Database\Query\Builder|static
<ide> */
<del> public function selectRaw($expression)
<add> public function selectSub($query, $as)
<ide> {
<del> return $this->select(new Expression($expression));
<add> if ($query instanceof Closure)
<add> {
<add> $callback = $query;
<add> $query = $this->newQuery();
<add> $callback($query);
<add> }
<add>
<add> if ($query instanceof Builder)
<add> {
<add> $bindings = $query->getBindings();
<add> $query = $query->toSql();
<add> }
<add> elseif (is_string($query))
<add> {
<add> $bindings = [];
<add> }
<add> else
<add> {
<add> $type = is_object($query) ? get_class($query) : gettype($query);
<add> $message = "Argument #1 passed to selectSub must be an SQL string, query builder or closure, {$type} given";
<add> throw new \InvalidArgumentException($message);
<add> }
<add>
<add> $as = $this->grammar->wrap($as);
<add>
<add> $query = '(' . $query . ') as ' . $as;
<add>
<add> return $this->selectRaw($query, $bindings);
<ide> }
<ide>
<ide> /**
<ide><path>tests/Database/DatabaseQueryBuilderTest.php
<ide> public function testMergeBuildersBindingOrder()
<ide> }
<ide>
<ide>
<add> public function testSubSelect()
<add> {
<add> $expectedSql = 'select "foo", "bar", (select "baz" from "two" where "subkey" = ?) as "sub" from "one" where "key" = ?';
<add> $expectedBindings = ['subval', 'val'];
<add>
<add> $builder = $this->getPostgresBuilder();
<add> $builder->from('one')->select(['foo', 'bar'])->where('key', '=', 'val');
<add> $builder->selectSub(function($query) { $query->from('two')->select('baz')->where('subkey', '=', 'subval'); }, 'sub');
<add> $this->assertEquals($expectedSql, $builder->toSql());
<add> $this->assertEquals($expectedBindings, $builder->getBindings());
<add>
<add> $builder = $this->getPostgresBuilder();
<add> $builder->from('one')->select(['foo', 'bar'])->where('key', '=', 'val');
<add> $subBuilder = $this->getPostgresBuilder();
<add> $subBuilder->from('two')->select('baz')->where('subkey', '=', 'subval');
<add> $builder->selectSub($subBuilder, 'sub');
<add> $this->assertEquals($expectedSql, $builder->toSql());
<add> $this->assertEquals($expectedBindings, $builder->getBindings());
<add> }
<add>
<add>
<ide> protected function getBuilder()
<ide> {
<ide> $grammar = new Illuminate\Database\Query\Grammars\Grammar; | 2 |
Text | Text | add sticky footer faq | fbf7e979dde46a6ef0874aa7707eb1213a517f43 | <ide><path>README.md
<ide> When I first created this project I didn't have any experience with Handlebars.
<ide> ### Why do you have all routes in app.js?
<ide> For the sake of simplicity. While there might be a better approach, such as passing `app` context to each controller as outlined in this [blog](http://timstermatic.github.io/blog/2013/08/17/a-simple-mvc-framework-with-node-and-express/), I find such style to be confusing for beginners. It took me a long time to grasp the concept of `exports` and `module.exports`, let alone having a global `app` reference in other files. That to me is a backward thinking. The `app.js` is the "center of the universe", it should be the one referencing models, routes, controllers, etc. When working solo I actually prefer to have everything in `app.js` as is the case with this REST API server for [ember-sass-express-starter's app.js file](https://github.com/sahat/ember-sass-express-starter/blob/master/app.js). That makes things so much simpler!
<ide>
<add>### I don't need a sticky footer, can I delete it?
<add>Absolutely. But unlike a regular footer there is a bit more work involved. First, delete `#wrap` and `#footer` *ID*s from **styles.less**. Next delete `#wrap` and `#footer` from **layout.jade**. If no element is specified before the class or id, Jade assumes it's a `div` element. Don't forget to indent everything under `#wrap` to the left once, since this project uses two spaces per block indentation.
<ide>
<ide> TODO
<ide> ---- | 1 |
PHP | PHP | update factory find model logic | d5cf953ff4bd7db92ebdbfe14d914bbdf13e64d8 | <ide><path>src/Illuminate/Database/Eloquent/Factories/Factory.php
<ide> public function newModel(array $attributes = [])
<ide> public function modelName()
<ide> {
<ide> $resolver = static::$modelNameResolver ?: function (self $factory) {
<del> return 'App\\'.Str::replaceLast('Factory', '', class_basename($factory));
<add> $factoryBasename = Str::replaceLast('Factory', '', class_basename($factory));
<add>
<add> return class_exists('App\\Models\\'.$factoryBasename)
<add> ? 'App\\Models\\'.$factoryBasename
<add> : 'App\\'.$factoryBasename;
<ide> };
<ide>
<ide> return $this->model ?: $resolver($this); | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.