content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
Python
Python
assert exception msg
ce8b3d398f84eb88959fb188e8e0c90dcef65ec0
<ide><path>keras/utils/conv_utils.py <ide> def normalize_tuple(value, n, name, cmp=None): <ide> <ide> unqualified_values = [v for v in value_tuple if cmp is not None and cmp(v)] <ide> if len(unqualified_values) > 0: <del> error_msg += f' that does not satisfy the requirement.' <add> error_msg += (f' including {unqualified_values}' <add> f' that does not satisfy the requirement.') <ide> raise ValueError(error_msg) <ide> return value_tuple <ide> <ide><path>keras/utils/conv_utils_test.py <ide> def test_normalize_tuple(self): <ide> self.assertEqual((3, -1, 3), <ide> conv_utils.normalize_tuple((3, -1, 3), n=3, name='negative_size')) <ide> <del> with self.assertRaises(ValueError): <add> with self.assertRaises(ValueError) as ctx: <ide> conv_utils.normalize_tuple((2, 1), n=3, name='strides', cmp=lambda x: x < 0) <add> self.assertTrue('The `strides` argument must be a tuple of 3' in str(ctx.exception)) <ide> <del> with self.assertRaises(ValueError): <del> conv_utils.normalize_tuple(None, n=3, name='strides', cmp=lambda x: x < 0) <add> with self.assertRaises(ValueError) as ctx: <add> conv_utils.normalize_tuple(None, n=3, name='kernel_size', cmp=lambda x: x <= 0) <add> self.assertTrue('The `kernel_size` argument must be a tuple of 3' in str(ctx.exception)) <ide> <del> with self.assertRaises(ValueError): <del> conv_utils.normalize_tuple(-4, n=3, name='pool_size', cmp=lambda x: x <= 0) <add> with self.assertRaises(ValueError) as ctx: <add> conv_utils.normalize_tuple(-4, n=3, name='strides', cmp=lambda x: x < 0) <add> self.assertTrue('that does not satisfy the requirement' in str(ctx.exception)) <ide> <del> with self.assertRaises(ValueError): <add> with self.assertRaises(ValueError) as ctx: <ide> conv_utils.normalize_tuple((0, 1, 2), n=3, name='pool_size', cmp=lambda x: x <= 0) <add> self.assertTrue('that does not satisfy the requirement' in str(ctx.exception)) <ide> <ide> def test_normalize_data_format(self): <ide> self.assertEqual('channels_last',
2
Python
Python
fix french lemmatization
8bd85fd9d51b8961e6d264554908e1f1b80d8e02
<ide><path>spacy/lang/fr/lemmatizer/lemmatizer.py <ide> def lemmatize(string, index, exceptions, rules): <ide> if not forms: <ide> forms.extend(oov_forms) <ide> if not forms and string in LOOKUP.keys(): <del> forms.append(LOOKUP[string]) <add> forms.append(LOOKUP[string][0]) <ide> if not forms: <ide> forms.append(string) <ide> return list(set(forms)) <ide><path>spacy/tests/regression/test_issue3178.py <add>from __future__ import unicode_literals <add>import pytest <add>import spacy <add> <add> <add>@pytest.mark.models('fr') <add>def test_issue1959(FR): <add> texts = ['Je suis la mauvaise herbe', "Me, myself and moi"] <add> for text in texts: <add> FR(text)
2
Ruby
Ruby
fix cellar any handling
831d034303bf972f6b4a872d5abdf6acb3439656
<ide><path>Library/Homebrew/software_spec.rb <ide> def root_url(var = nil, specs = {}) <ide> end <ide> <ide> def compatible_locations? <del> compatible_cellar = cellar == :any || <del> cellar == :any_skip_relocation || <del> cellar == HOMEBREW_CELLAR.to_s <add> # this looks like it should check prefix and repository too but to be <add> # `cellar :any` actually requires no references to the cellar, prefix or <add> # repository. <add> return true if [:any, :any_skip_relocation].include?(cellar) <ide> <add> compatible_cellar = cellar == HOMEBREW_CELLAR.to_s <ide> compatible_prefix = prefix == HOMEBREW_PREFIX.to_s <ide> <ide> # Only check the repository matches if the prefix is the default. <ide> # This is because the bottle DSL does not allow setting a custom repository <ide> # but does allow setting a custom prefix. <del> compatible_repository = if prefix == Homebrew::DEFAULT_PREFIX <add> compatible_repository = if Homebrew.default_prefix?(prefix) <ide> repository == HOMEBREW_REPOSITORY.to_s <ide> else <ide> true
1
Text
Text
add v3.24.0-beta.2 to changelog
1ae3f13e5be1dc949045e7061b924fc0fa3ec47a
<ide><path>CHANGELOG.md <ide> # Ember Changelog <ide> <add>### v3.24.0-beta.2 (November 24, 2020) <add> <add>- [#19282](https://github.com/emberjs/ember.js/pull/19282) [BUGFIX] Issue deprecations (instead of assertions) for tracked mutation in constructor during rendering <add> <ide> ### v3.24.0-beta.1 (November 16, 2020) <ide> <ide> - [#19224](https://github.com/emberjs/ember.js/pull/19224) [FEATURE] Add `{{page-title}}` helper to route template blueprints to implement [RFC #0654](https://github.com/emberjs/rfcs/blob/master/text/0645-add-ember-page-title-addon.md).
1
Ruby
Ruby
simplify cxxstdlib_check implementation
95aef5511091dc666beae7c5bdb79df926374554
<ide><path>Library/Homebrew/compat/formula.rb <ide> def std_cmake_parameters <ide> "-DCMAKE_INSTALL_PREFIX='#{prefix}' -DCMAKE_BUILD_TYPE=None -DCMAKE_FIND_FRAMEWORK=LAST -Wno-dev" <ide> end <ide> <del> def cxxstdlib <del> self.class.cxxstdlib <del> end <del> <ide> def cxxstdlib_check check_type <ide> self.class.cxxstdlib_check check_type <ide> end <ide><path>Library/Homebrew/formula.rb <ide> def skip_clean? path <ide> end <ide> <ide> def skip_cxxstdlib_check? <del> self.class.cxxstdlib.include?(:skip) <add> false <ide> end <ide> <ide> def require_universal_deps? <ide> def keg_only reason, explanation="" <ide> @keg_only_reason = KegOnlyReason.new(reason, explanation) <ide> end <ide> <del> # Flag for marking whether this formula needs C++ standard library <del> # compatibility check <del> def cxxstdlib <del> @cxxstdlib ||= Set.new <del> end <del> <del> # Explicitly request changing C++ standard library compatibility check <del> # settings. Use with caution! <add> # Pass :skip to this method to disable post-install stdlib checking <ide> def cxxstdlib_check check_type <del> cxxstdlib << check_type <add> define_method(:skip_cxxstdlib_check?) { true } if check_type == :skip <ide> end <ide> <ide> # For Apple compilers, this should be in the format:
2
Java
Java
move builder methods to versionresourceresolver
ae48b5f7f22ca967ced6f206b30af38b492bf68b
<ide><path>spring-webmvc/src/main/java/org/springframework/web/servlet/config/annotation/ResourceHandlerRegistration.java <ide> package org.springframework.web.servlet.config.annotation; <ide> <ide> import java.util.ArrayList; <del>import java.util.HashMap; <ide> import java.util.List; <del>import java.util.Map; <ide> <ide> import org.springframework.cache.Cache; <ide> import org.springframework.cache.concurrent.ConcurrentMapCache; <ide> public class ResourceHandlerRegistration { <ide> <ide> private List<ResourceResolver> customResolvers = new ArrayList<ResourceResolver>(); <ide> <del> private VersionResourceResolver versionResolver; <del> <ide> private List<ResourceTransformer> customTransformers = new ArrayList<ResourceTransformer>(); <ide> <add> private boolean hasVersionResolver; <add> <add> private boolean hasCssLinkTransformer; <add> <ide> private boolean isDevMode = false; <ide> <ide> private Cache resourceCache; <ide> public ResourceHandlerRegistration addResourceLocations(String...resourceLocatio <ide> public ResourceHandlerRegistration addResolver(ResourceResolver resolver) { <ide> Assert.notNull(resolver, "The provided ResourceResolver should not be null"); <ide> this.customResolvers.add(resolver); <add> if (resolver instanceof VersionResourceResolver) { <add> this.hasVersionResolver = true; <add> } <ide> return this; <ide> } <ide> <ide> public ResourceHandlerRegistration addResolver(ResourceResolver resolver) { <ide> public ResourceHandlerRegistration addTransformer(ResourceTransformer transformer) { <ide> Assert.notNull(transformer, "The provided ResourceTransformer should not be null"); <ide> this.customTransformers.add(transformer); <del> return this; <del> } <del> <del> /** <del> * Apply Resource Versioning on the matching resources using a {@link FixedVersionStrategy}. <del> * <p>This strategy uses that fixed version string and adds it as a prefix in the resource path, <del> * e.g. {@code fixedversion/js/main.js}.</p> <del> * <p>There are many ways to get a version string for your application:</p> <del> * <ul> <del> * <li>create a string using the current date, a source of random numbers at runtime</li> <del> * <li>fetch a version string from a property source or an Env variable, using SpEL or @Value</li> <del> * </ul> <del> * <p>Note that a {@link CssLinkResourceTransformer} will be automatically registered to <del> * support versioned resources in CSS files.</p> <del> * @param fixedVersion a version string <del> * @param pathPatterns one or more resource URL path patterns <del> * @return the same {@link ResourceHandlerRegistration} instance for chained method invocation <del> * @see VersionResourceResolver <del> * @see FixedVersionStrategy <del> * @since 4.1 <del> */ <del> public ResourceHandlerRegistration addFixedVersionStrategy(String fixedVersion, String... pathPatterns) { <del> addVersionStrategy(new FixedVersionStrategy(fixedVersion), pathPatterns); <del> return this; <del> } <del> <del> /** <del> * Apply Resource Versioning on the matching resources using a {@link ContentVersionStrategy}. <del> * <p>This strategy uses the content of the Resource to create a String hash and adds it <del> * in the resource filename, e.g. {@code css/main-e36d2e05253c6c7085a91522ce43a0b4.css}.</p> <del> * <p>Note that a {@link CssLinkResourceTransformer} will be automatically registered to <del> * support versioned resources in CSS files.</p> <del> * @param pathPatterns one or more resource URL path patterns <del> * @return the same {@link ResourceHandlerRegistration} instance for chained method invocation <del> * @see VersionResourceResolver <del> * @see ContentVersionStrategy <del> * @since 4.1 <del> */ <del> public ResourceHandlerRegistration addContentVersionStrategy(String... pathPatterns) { <del> addVersionStrategy(new ContentVersionStrategy(), pathPatterns); <del> return this; <del> } <del> <del> <del> /** <del> * Apply Resource Versioning on the matching resources; this will update resources' URLs to include <del> * a version string calculated by a {@link VersionStrategy}. This is often used for cache busting. <del> * <p>Note that a {@link CssLinkResourceTransformer} will be automatically registered to <del> * support versioned resources in CSS files.</p> <del> * @param strategy the versioning strategy to use <del> * @param pathPatterns one or more resource URL path patterns <del> * @return the same {@link ResourceHandlerRegistration} instance for chained method invocation <del> * @see VersionResourceResolver <del> * @see VersionStrategy <del> * @since 4.1 <del> */ <del> public ResourceHandlerRegistration addVersionStrategy(VersionStrategy strategy, String... pathPatterns) { <del> if (this.versionResolver == null) { <del> this.versionResolver = new VersionResourceResolver(); <del> this.customResolvers.add(this.versionResolver); <del> this.customTransformers.add(new CssLinkResourceTransformer()); <del> } <del> for(String pattern : pathPatterns) { <del> this.versionResolver.getVersionStrategyMap().put(pattern, strategy); <add> if (transformer instanceof CssLinkResourceTransformer) { <add> this.hasCssLinkTransformer = true; <ide> } <ide> return this; <ide> } <ide> protected List<ResourceTransformer> getResourceTransformers() { <ide> } <ide> List<ResourceTransformer> transformers = new ArrayList<ResourceTransformer>(); <ide> ResourceTransformer first = this.customTransformers.get(0); <del> if (!ClassUtils.isAssignable(CachingResourceTransformer.class, first.getClass()) && !this.isDevMode) { <del> transformers.add(new CachingResourceTransformer(getDefaultResourceCache())); <add> ResourceTransformer cachingTransformer = null; <add> if (!this.isDevMode) { <add> if (ClassUtils.isAssignable(CachingResourceTransformer.class, first.getClass())) { <add> cachingTransformer = first; <add> } <add> else { <add> cachingTransformer = new CachingResourceTransformer(getDefaultResourceCache()); <add> transformers.add(cachingTransformer); <add> } <ide> } <ide> transformers.addAll(this.customTransformers); <add> if (this.hasVersionResolver && !this.hasCssLinkTransformer) { <add> transformers.add(cachingTransformer != null ? 1 : 0, new CssLinkResourceTransformer()); <add> } <ide> return transformers; <ide> } <ide> <ide><path>spring-webmvc/src/main/java/org/springframework/web/servlet/resource/VersionResourceResolver.java <ide> import org.springframework.util.StringUtils; <ide> <ide> /** <del> * A {@code ResourceResolver} that resolves request paths containing a version <del> * string, i.e. version information about the resource being requested. <del> * This resolver can be useful to set up HTTP caching strategies by changing <del> * resources' URLs as they are updated. <add> * Resolves request paths containing a version string that can be used as part <add> * of an HTTP caching strategy in which a resource is cached with a far future <add> * date (e.g. 1 year) and cached until the version, and therefore the URL, is <add> * changed. <ide> * <del> * <p>Because resource versioning depends on the resource types, this {@code ResourceResolver} <del> * needs to be configured with at least one {@link VersionStrategy}. The process of matching <del> * and generating version strings is delegated to the {@code VersionStrategy}. <add> * <p>Different versioning strategies exist and this resolver must be configured <add> * with one or more such strategies along with path mappings to indicate which <add> * strategy applies to which resources. <ide> * <del> * <p>When resolving resources, this resolver will first delegate to the chain to locate <del> * an existing resource and then attempt to extract a version string from the request path <del> * and then find a resource that matches that version. <add> * <p>{@code ContentVersionStrategy} is a good default choice except in cases <add> * where it cannot be used. Most notably the {@code ContentVersionStrategy} <add> * cannot be combined with JavaScript module loaders. For such cases the <add> * {@code FixedVersionStrategy} is a better choice. <ide> * <del> * <p>When resolving URLs, this resolver will, if necessary, add a version string in the <del> * request path. <add> * <p>Note that using this resolver to serve CSS files means the <add> * {@link CssLinkResourceTransformer} should also be used in order to modify <add> * links within CSS files to also contain versions. <ide> * <ide> * @author Brian Clozel <add> * @author Rossen Stoyanchev <ide> * @since 4.1 <ide> * @see VersionStrategy <ide> */ <ide> public void setStrategyMap(Map<String, VersionStrategy> map) { <ide> /** <ide> * Return the map with version strategies keyed by path pattern. <ide> */ <del> public Map<String, VersionStrategy> getVersionStrategyMap() { <add> public Map<String, VersionStrategy> getStrategyMap() { <ide> return this.versionStrategyMap; <ide> } <ide> <add> /** <add> * Insert a content-based version in resource URLs that match the given path <add> * patterns. The version is computed from the content of the file, e.g. <add> * {@code "css/main-e36d2e05253c6c7085a91522ce43a0b4.css"}. This is a good <add> * default strategy to use except when it cannot be, for example when using <add> * JavaScript module loaders, use {@link #addFixedVersionStrategy} instead <add> * for serving JavaScript files. <add> * @param pathPatterns one or more resource URL path patterns <add> * @return the current instance for chained method invocation <add> * @see ContentVersionStrategy <add> */ <add> public VersionResourceResolver addContentVersionStrategy(String... pathPatterns) { <add> addVersionStrategy(new ContentVersionStrategy(), pathPatterns); <add> return this; <add> } <add> <add> /** <add> * Insert a fixed, prefix-based version in resource URLs that match the given <add> * path patterns, e.g. {@code "{version}/js/main.js"}. This is useful (vs <add> * content-based versions) when using JavaScript module loaders. <add> * <p>The version may be a random number, the current date, fetched from a <add> * git commit sha, a property file, environment variable, and set with SpEL <add> * expressions in the configuration (e.g. see {@code @Value} in Java config). <add> * @param version a version string <add> * @param pathPatterns one or more resource URL path patterns <add> * @return the current instance for chained method invocation <add> * @see FixedVersionStrategy <add> */ <add> public VersionResourceResolver addFixedVersionStrategy(String version, String... pathPatterns) { <add> addVersionStrategy(new FixedVersionStrategy(version), pathPatterns); <add> return this; <add> } <add> <add> /** <add> * Register a custom VersionStrategy to apply to resource URLs that match the <add> * given path patterns. <add> * @param strategy the custom strategy <add> * @param pathPatterns one or more resource URL path patterns <add> * @return the current instance for chained method invocation <add> * @see VersionStrategy <add> */ <add> public VersionResourceResolver addVersionStrategy(VersionStrategy strategy, String... pathPatterns) { <add> for(String pattern : pathPatterns) { <add> getStrategyMap().put(pattern, strategy); <add> } <add> return this; <add> } <add> <add> <ide> @Override <ide> protected Resource resolveResourceInternal(HttpServletRequest request, String requestPath, <ide> List<? extends Resource> locations, ResourceResolverChain chain) { <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/config/annotation/ResourceHandlerRegistryTests.java <ide> public void noCacheResourceChain() throws Exception { <ide> <ide> @Test <ide> public void versionResourceChain() throws Exception { <del> this.registration <del> .addTransformer(new AppCacheManifestTransfomer()) <add> VersionResourceResolver versionResolver = new VersionResourceResolver() <ide> .addFixedVersionStrategy("fixed", "/**/*.js") <ide> .addContentVersionStrategy("/**"); <ide> <add> this.registration.addResolver(versionResolver).addTransformer(new AppCacheManifestTransfomer()); <add> <ide> ResourceHttpRequestHandler handler = getHandler("/resources/**"); <ide> List<ResourceResolver> resolvers = handler.getResourceResolvers(); <ide> assertThat(resolvers.toString(), resolvers, Matchers.hasSize(3)); <ide> public void versionResourceChain() throws Exception { <ide> List<ResourceTransformer> transformers = handler.getResourceTransformers(); <ide> assertThat(transformers, Matchers.hasSize(3)); <ide> assertThat(transformers.get(0), Matchers.instanceOf(CachingResourceTransformer.class)); <del> assertThat(transformers.get(1), Matchers.instanceOf(AppCacheManifestTransfomer.class)); <del> assertThat(transformers.get(2), Matchers.instanceOf(CssLinkResourceTransformer.class)); <add> assertThat(transformers.get(1), Matchers.instanceOf(CssLinkResourceTransformer.class)); <add> assertThat(transformers.get(2), Matchers.instanceOf(AppCacheManifestTransfomer.class)); <ide> } <ide> <ide> @Test <ide> public void customResourceChain() throws Exception { <add> VersionResourceResolver versionResolver = new VersionResourceResolver() <add> .addFixedVersionStrategy("fixed", "/**/*.js") <add> .addContentVersionStrategy("/**"); <add> <ide> CachingResourceResolver cachingResolver = Mockito.mock(CachingResourceResolver.class); <ide> CachingResourceTransformer cachingTransformer = Mockito.mock(CachingResourceTransformer.class); <ide> this.registration <del> .addTransformer(cachingTransformer) <del> .addTransformer(new AppCacheManifestTransfomer()) <ide> .addResolver(cachingResolver) <del> .addFixedVersionStrategy("fixed", "/**/*.js") <del> .addContentVersionStrategy("/**") <add> .addResolver(versionResolver) <ide> .addResolver(new CustomPathResourceResolver()) <add> .addTransformer(cachingTransformer) <add> .addTransformer(new AppCacheManifestTransfomer()) <ide> .setCachePeriod(3600); <ide> <ide> ResourceHttpRequestHandler handler = getHandler("/resources/**"); <ide> public void customResourceChain() throws Exception { <ide> List<ResourceTransformer> transformers = handler.getResourceTransformers(); <ide> assertThat(transformers, Matchers.hasSize(3)); <ide> assertThat(transformers.get(0), Matchers.equalTo(cachingTransformer)); <del> assertThat(transformers.get(1), Matchers.instanceOf(AppCacheManifestTransfomer.class)); <del> assertThat(transformers.get(2), Matchers.instanceOf(CssLinkResourceTransformer.class)); <add> assertThat(transformers.get(1), Matchers.instanceOf(CssLinkResourceTransformer.class)); <add> assertThat(transformers.get(2), Matchers.instanceOf(AppCacheManifestTransfomer.class)); <ide> } <ide> <ide> private ResourceHttpRequestHandler getHandler(String pathPattern) { <ide><path>spring-webmvc/src/test/java/org/springframework/web/servlet/resource/ResourceUrlProviderJavaConfigTests.java <ide> static class WebConfig extends WebMvcConfigurationSupport { <ide> public void addResourceHandlers(ResourceHandlerRegistry registry) { <ide> registry.addResourceHandler("/resources/**") <ide> .addResourceLocations("classpath:org/springframework/web/servlet/resource/test/") <del> .addContentVersionStrategy("/**"); <add> .addResolver(new VersionResourceResolver().addContentVersionStrategy("/**")); <ide> } <ide> } <ide>
4
Python
Python
fix description of dtype default in linspace
68ebc2bad1ad6a99e9939eff205a1a6d936cf4fd
<ide><path>numpy/core/function_base.py <ide> def linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None, <ide> If True, return (`samples`, `step`), where `step` is the spacing <ide> between samples. <ide> dtype : dtype, optional <del> The type of the output array. If `dtype` is not given, infer the data <del> type from the other input arguments. <add> The type of the output array. If `dtype` is not given, the data type <add> is inferred from `start` and `stop`. The inferred dtype will <add> never be an integer; `float` is chosen even if the arguments would <add> produce an array of integers. <ide> <ide> .. versionadded:: 1.9.0 <ide>
1
Go
Go
check typos and fix in daemon directory
cf4f5b47710b3875767f997fc2eb3386cb0b66c0
<ide><path>daemon/links/links_test.go <ide> func TestLinkPortRangeEnv(t *testing.T) { <ide> if env["DOCKER_ENV_PASSWORD"] != "gordon" { <ide> t.Fatalf("Expected gordon, got %s", env["DOCKER_ENV_PASSWORD"]) <ide> } <del> for i := range []int{6379, 6380, 6381} { <add> for _, i := range []int{6379, 6380, 6381} { <ide> tcpaddr := fmt.Sprintf("DOCKER_PORT_%d_TCP_ADDR", i) <del> tcpport := fmt.Sprintf("DOCKER_PORT_%d_TCP+PORT", i) <del> tcpproto := fmt.Sprintf("DOCKER_PORT_%d_TCP+PROTO", i) <add> tcpport := fmt.Sprintf("DOCKER_PORT_%d_TCP_PORT", i) <add> tcpproto := fmt.Sprintf("DOCKER_PORT_%d_TCP_PROTO", i) <ide> tcp := fmt.Sprintf("DOCKER_PORT_%d_TCP", i) <del> if env[tcpaddr] == "172.0.17.2" { <add> if env[tcpaddr] != "172.0.17.2" { <ide> t.Fatalf("Expected env %s = 172.0.17.2, got %s", tcpaddr, env[tcpaddr]) <ide> } <del> if env[tcpport] == fmt.Sprintf("%d", i) { <add> if env[tcpport] != fmt.Sprintf("%d", i) { <ide> t.Fatalf("Expected env %s = %d, got %s", tcpport, i, env[tcpport]) <ide> } <del> if env[tcpproto] == "tcp" { <add> if env[tcpproto] != "tcp" { <ide> t.Fatalf("Expected env %s = tcp, got %s", tcpproto, env[tcpproto]) <ide> } <del> if env[tcp] == fmt.Sprintf("tcp://172.0.17.2:%d", i) { <add> if env[tcp] != fmt.Sprintf("tcp://172.0.17.2:%d", i) { <ide> t.Fatalf("Expected env %s = tcp://172.0.17.2:%d, got %s", tcp, i, env[tcp]) <ide> } <ide> } <ide><path>daemon/logger/splunk/splunk_test.go <ide> func TestRawFormatWithLabels(t *testing.T) { <ide> t.Fatal(err) <ide> } else { <ide> if event != "containeriid a=b notjson" { <del> t.Fatalf("Unexpected event in message 1 %v", event) <add> t.Fatalf("Unexpected event in message 2 %v", event) <ide> } <ide> } <ide> <ide> func TestRawFormatWithoutTag(t *testing.T) { <ide> t.Fatal(err) <ide> } else { <ide> if event != "notjson" { <del> t.Fatalf("Unexpected event in message 1 %v", event) <add> t.Fatalf("Unexpected event in message 2 %v", event) <ide> } <ide> } <ide>
2
Ruby
Ruby
remove space in -isysroot
32744e174651c8892381ec27fbc640c61fdaf5f3
<ide><path>Library/Homebrew/extend/os/mac/extend/ENV/std.rb <ide> def remove_macosxsdk(version = MacOS.version) <ide> return unless (sdk = MacOS.sdk_path_if_needed(version)) <ide> <ide> delete("SDKROOT") <del> remove_from_cflags "-isysroot #{sdk}" <del> remove "CPPFLAGS", "-isysroot #{sdk}" <del> remove "LDFLAGS", "-isysroot #{sdk}" <add> remove_from_cflags "-isysroot#{sdk}" <add> remove "CPPFLAGS", "-isysroot#{sdk}" <add> remove "LDFLAGS", "-isysroot#{sdk}" <ide> if HOMEBREW_PREFIX.to_s == "/usr/local" <ide> delete("CMAKE_PREFIX_PATH") <ide> else <ide> def macosxsdk(version = MacOS.version) <ide> # Tell clang/gcc where system include's are: <ide> append_path "CPATH", "#{sdk}/usr/include" <ide> # The -isysroot is needed, too, because of the Frameworks <del> append_to_cflags "-isysroot #{sdk}" <del> append "CPPFLAGS", "-isysroot #{sdk}" <add> append_to_cflags "-isysroot#{sdk}" <add> append "CPPFLAGS", "-isysroot#{sdk}" <ide> # And the linker needs to find sdk/usr/lib <del> append "LDFLAGS", "-isysroot #{sdk}" <add> append "LDFLAGS", "-isysroot#{sdk}" <ide> # Needed to build cmake itself and perhaps some cmake projects: <ide> append_path "CMAKE_PREFIX_PATH", "#{sdk}/usr" <ide> append_path "CMAKE_FRAMEWORK_PATH", "#{sdk}/System/Library/Frameworks"
1
Java
Java
use real yoganodes in fabricuimanagertest
864cc00a6107fd8144f1c4ecaf5b35260d00cf7d
<ide><path>ReactAndroid/src/test/java/com/facebook/react/fabric/FabricUIManagerTest.java <ide> import com.facebook.react.fabric.FabricUIManager; <ide> import com.facebook.react.uimanager.ReactShadowNode; <ide> import com.facebook.react.uimanager.ReactShadowNodeImpl; <del>import com.facebook.react.uimanager.ReactYogaConfigProvider; <ide> import com.facebook.react.uimanager.Spacing; <ide> import com.facebook.react.uimanager.ThemedReactContext; <ide> import com.facebook.react.uimanager.ViewManager; <ide> import com.facebook.react.uimanager.ViewManagerRegistry; <del>import com.facebook.react.uimanager.YogaNodePool; <ide> import com.facebook.react.views.text.ReactRawTextManager; <ide> import com.facebook.react.views.text.ReactRawTextShadowNode; <ide> import com.facebook.react.views.text.ReactTextViewManager; <ide> import com.facebook.react.views.view.ReactViewManager; <ide> import com.facebook.testing.robolectric.v3.WithTestDefaultsRunner; <del>import com.facebook.yoga.YogaConfig; <del>import com.facebook.yoga.YogaMeasureFunction; <del>import com.facebook.yoga.YogaNode; <ide> import java.util.ArrayList; <ide> import java.util.Arrays; <ide> import java.util.Collections; <ide> import org.robolectric.RuntimeEnvironment; <ide> <ide> /** Tests {@link FabricUIManager} */ <del>@PrepareForTest({YogaNodePool.class, ReactYogaConfigProvider.class}) <ide> @RunWith(WithTestDefaultsRunner.class) <ide> public class FabricUIManagerTest { <ide> <ide> private FabricUIManager mFabricUIManager; <ide> private ThemedReactContext mThemedReactContext; <ide> private int mNextReactTag; <del> private MockYogaNodePool mMockYogaNodePool; <del> <del> private YogaMeasureFunction mLastYogaMeasureFunction; <ide> <ide> @Before <ide> public void setUp() throws Exception { <ide> public void setUp() throws Exception { <ide> ViewManagerRegistry viewManagerRegistry = new ViewManagerRegistry(viewManagers); <ide> <ide> mFabricUIManager = new FabricUIManager(reactContext, viewManagerRegistry); <del> <del> // Hack around Yoga until the UnsatisfiedLinkErrors are fixed t14964130 <del> PowerMockito.mockStatic(YogaNodePool.class, ReactYogaConfigProvider.class); <del> mMockYogaNodePool = new MockYogaNodePool(); <del> PowerMockito.when(YogaNodePool.get()).thenReturn(mMockYogaNodePool); <del> PowerMockito.when(ReactYogaConfigProvider.get()) <del> .thenAnswer( <del> new Answer<Object>() { <del> @Override <del> public Object answer(InvocationOnMock invocation) { <del> return mock(YogaConfig.class); <del> } <del> }); <ide> } <ide> <ide> @Test <ide> public void testCompleteRoot() { <ide> } <ide> <ide> /** <del> * Tests that cloned text nodes will reassign their yoga nodes' measure functions. <del> * <del> * <p>TODO(T26729515): Currently this tests the wrong implementation. It assumes that yoga nodes <del> * are reused across clones and simply checks the most recently assigned measure functions of the <del> * shared yoga node. When yoga node cloning is implemented, this needs to be changed to mock each <del> * cloned shadow nodes' yoga nodes and make the same assertions on each of their measure <del> * functions. <add> * Tests that cloned text nodes will not share measure functions <ide> */ <ide> @Test <ide> public void testTextMutableClone() { <ide> ReactRootView rootView = <ide> new ReactRootView(RuntimeEnvironment.application.getApplicationContext()); <ide> int rootTag = mFabricUIManager.addRootView(rootView); <ide> <del> final YogaNode yogaNode = mock(YogaNode.class); <del> <del> doAnswer( <del> new Answer() { <del> @Override <del> public Object answer(InvocationOnMock invocation) { <del> when(yogaNode.isMeasureDefined()).thenReturn(true); <del> when(yogaNode.clone()).thenReturn(yogaNode); <del> when(yogaNode.cloneWithNewChildren()).thenReturn(yogaNode); <del> mLastYogaMeasureFunction = (YogaMeasureFunction) invocation.getArguments()[0]; <del> return null; <del> } <del> }) <del> .when(yogaNode) <del> .setMeasureFunction(any(YogaMeasureFunction.class)); <del> <del> mMockYogaNodePool.add(yogaNode); <del> <ide> ReactShadowNode text = <ide> mFabricUIManager.createNode(0, ReactTextViewManager.REACT_CLASS, rootTag, null); <del> YogaMeasureFunction textMeasureFunction = mLastYogaMeasureFunction; <ide> assertThat(text.isMeasureDefined()).isTrue(); <ide> <ide> ReactShadowNode textCopy = text.mutableCopy(); <del> YogaMeasureFunction textCopyMeasureFunction = mLastYogaMeasureFunction; <ide> assertThat(textCopy.isMeasureDefined()).isTrue(); <del> assertThat(textCopyMeasureFunction).isNotSameAs(textMeasureFunction); <ide> <del> ReactShadowNode textCopyWithNewChildren = text.mutableCopyWithNewChildren(); <del> YogaMeasureFunction textCopyWithNewChildrenMeasureFunction = mLastYogaMeasureFunction; <del> assertThat(textCopyWithNewChildren.isMeasureDefined()).isTrue(); <del> assertThat(textCopyWithNewChildrenMeasureFunction).isNotSameAs(textMeasureFunction); <del> assertThat(textCopyWithNewChildrenMeasureFunction).isNotSameAs(textCopyMeasureFunction); <add> textCopy.setStyleWidth(200); <add> text.onBeforeLayout(); <add> text.calculateLayout(); <add> textCopy.onBeforeLayout(); <add> textCopy.calculateLayout(); <add> <add> assertThat(text.getLayoutWidth()).isNotEqualTo(textCopy.getLayoutWidth()); <ide> } <ide> <ide> /** <ide> private void assertSameFields(ReactShadowNode node1, ReactShadowNode node2) { <ide> assertThat(node1.getLayoutX()).isEqualTo(node2.getLayoutX()); <ide> assertThat(node1.getLayoutY()).isEqualTo(node2.getLayoutY()); <ide> for (int spacingType = Spacing.LEFT; spacingType <= Spacing.ALL; spacingType++) { <del> assertThat(node1.getPadding(spacingType)).isEqualTo(node2.getPadding(spacingType)); <ide> assertThat(node1.getStylePadding(spacingType)).isEqualTo(node2.getStylePadding(spacingType)); <ide> } <ide> assertThat(node1.getStyleWidth()).isEqualTo(node2.getStyleWidth()); <ide> private ReactShadowNode createViewNode() { <ide> node.setThemedContext(mThemedReactContext); <ide> return node; <ide> } <del> <del> private static class MockYogaNodePool extends ClearableSynchronizedPool<YogaNode> { <del> <del> private List<YogaNode> mMockYogaNodes; <del> <del> public MockYogaNodePool() { <del> super(1024); <del> mMockYogaNodes = new LinkedList<>(); <del> } <del> <del> public void add(YogaNode... nodes) { <del> Collections.addAll(mMockYogaNodes, nodes); <del> } <del> <del> @Override <del> public synchronized YogaNode acquire() { <del> if (!mMockYogaNodes.isEmpty()) { <del> return mMockYogaNodes.remove(0); <del> } <del> return createMockYogaNode(); <del> } <del> <del> private static YogaNode createMockYogaNode() { <del> final YogaNode yogaNode = mock(YogaNode.class); <del> when(yogaNode.clone()).thenReturn(yogaNode); <del> when(yogaNode.cloneWithNewChildren()).thenReturn(yogaNode); <del> doAnswer( <del> new Answer() { <del> @Override <del> public Object answer(InvocationOnMock invocation) { <del> when(yogaNode.isMeasureDefined()).thenReturn(true); <del> return null; <del> } <del> }) <del> .when(yogaNode) <del> .setMeasureFunction(any(YogaMeasureFunction.class)); <del> return yogaNode; <del> } <del> } <ide> }
1
Javascript
Javascript
reduce usage of public util
4c4e4f4bda7d4ca70ff8e9b8a2efbe1cac27abee
<ide><path>lib/_http_agent.js <ide> 'use strict'; <ide> <ide> const net = require('net'); <del>const util = require('util'); <ide> const EventEmitter = require('events'); <del>const debug = util.debuglog('http'); <add>const debug = require('internal/util/debuglog').debuglog('http'); <ide> const { async_id_symbol } = require('internal/async_hooks').symbols; <ide> <ide> // New Agent code. <ide><path>lib/_http_common.js <ide> const { <ide> readStop <ide> } = incoming; <ide> <del>const debug = require('util').debuglog('http'); <add>const debug = require('internal/util/debuglog').debuglog('http'); <ide> <ide> const kIncomingMessage = Symbol('IncomingMessage'); <ide> const kOnHeaders = HTTPParser.kOnHeaders | 0; <ide><path>lib/_http_outgoing.js <ide> <ide> const assert = require('internal/assert'); <ide> const Stream = require('stream'); <del>const util = require('util'); <ide> const internalUtil = require('internal/util'); <ide> const { outHeadersKey, utcDate } = require('internal/http'); <ide> const { Buffer } = require('buffer'); <ide> Object.setPrototypeOf(OutgoingMessage, Stream); <ide> <ide> <ide> Object.defineProperty(OutgoingMessage.prototype, '_headers', { <del> get: util.deprecate(function() { <add> get: internalUtil.deprecate(function() { <ide> return this.getHeaders(); <ide> }, 'OutgoingMessage.prototype._headers is deprecated', 'DEP0066'), <del> set: util.deprecate(function(val) { <add> set: internalUtil.deprecate(function(val) { <ide> if (val == null) { <ide> this[outHeadersKey] = null; <ide> } else if (typeof val === 'object') { <ide> Object.defineProperty(OutgoingMessage.prototype, '_headers', { <ide> }); <ide> <ide> Object.defineProperty(OutgoingMessage.prototype, '_headerNames', { <del> get: util.deprecate(function() { <add> get: internalUtil.deprecate(function() { <ide> const headers = this[outHeadersKey]; <ide> if (headers !== null) { <ide> const out = Object.create(null); <ide> Object.defineProperty(OutgoingMessage.prototype, '_headerNames', { <ide> } <ide> return null; <ide> }, 'OutgoingMessage.prototype._headerNames is deprecated', 'DEP0066'), <del> set: util.deprecate(function(val) { <add> set: internalUtil.deprecate(function(val) { <ide> if (typeof val === 'object' && val !== null) { <ide> const headers = this[outHeadersKey]; <ide> if (!headers) <ide><path>lib/_http_server.js <ide> <ide> 'use strict'; <ide> <del>const util = require('util'); <ide> const net = require('net'); <ide> const assert = require('internal/assert'); <ide> const { <ide> function Server(options, requestListener) { <ide> this.maxHeadersCount = null; <ide> this.headersTimeout = 40 * 1000; // 40 seconds <ide> } <del>util.inherits(Server, net.Server); <add>Object.setPrototypeOf(Server.prototype, net.Server.prototype); <add>Object.setPrototypeOf(Server, net.Server); <ide> <ide> <ide> Server.prototype.setTimeout = function setTimeout(msecs, callback) {
4
Javascript
Javascript
remove "pass" from progress plugin
96d91631f6cd88cb9093a9001e33cbd7e39633c6
<ide><path>lib/ProgressPlugin.js <ide> class ProgressPlugin { <ide> }; <ide> const numberOfHooks = Object.keys(hooks).length; <ide> Object.keys(hooks).forEach((name, idx) => { <del> let pass = 0; <ide> const title = hooks[name]; <ide> const percentage = idx / numberOfHooks * 0.25 + 0.7; <ide> compilation.hooks[name].intercept({ <ide> name: "ProgressPlugin", <ide> context: true, <ide> call: () => { <del> if(pass++ > 0) <del> handler(percentage, title, `pass ${pass}`); <del> else <del> handler(percentage, title); <add> handler(percentage, title); <ide> }, <ide> tap: (context, tap) => { <ide> if(context) { <del> context.reportProgress = (...args) => { <del> if(pass > 1) <del> handler(percentage, title, `pass ${pass}`, tap.name, ...args); <del> else <del> handler(percentage, title, tap.name, ...args); <add> // p is percentage from 0 to 1 <add> // args is any number of messages in a hierarchical matter <add> context.reportProgress = (p, ...args) => { <add> handler(percentage, title, tap.name, ...args); <ide> }; <ide> } <del> if(pass > 1) <del> handler(percentage, title, `pass ${pass}`, tap.name); <del> else <del> handler(percentage, title, tap.name); <add> handler(percentage, title, tap.name); <ide> } <ide> }); <ide> }); <ide> class ProgressPlugin { <ide> }, <ide> tap: (context, tap) => { <ide> if(context) { <del> context.reportProgress = (...args) => { <add> context.reportProgress = (p, ...args) => { <ide> handler(0.95, "emitting", tap.name, ...args); <ide> }; <ide> }
1
Javascript
Javascript
handle webcrypto generatekey() usages edge case
84db3e7b06979a388a65d8ebce2571554c2dadd6
<ide><path>lib/internal/crypto/webcrypto.js <ide> async function generateKey( <ide> algorithm = normalizeAlgorithm(algorithm); <ide> validateBoolean(extractable, 'extractable'); <ide> validateArray(keyUsages, 'keyUsages'); <del> if (keyUsages.length === 0) { <del> throw lazyDOMException( <del> 'Usages cannot be empty when creating a key', <del> 'SyntaxError'); <del> } <add> let result; <add> let resultType; <ide> switch (algorithm.name) { <ide> case 'RSASSA-PKCS1-v1_5': <ide> // Fall through <ide> case 'RSA-PSS': <ide> // Fall through <ide> case 'RSA-OAEP': <del> return lazyRequire('internal/crypto/rsa') <add> resultType = 'CryptoKeyPair'; <add> result = await lazyRequire('internal/crypto/rsa') <ide> .rsaKeyGenerate(algorithm, extractable, keyUsages); <add> break; <ide> case 'Ed25519': <ide> // Fall through <ide> case 'Ed448': <ide> // Fall through <ide> case 'X25519': <ide> // Fall through <ide> case 'X448': <del> return lazyRequire('internal/crypto/cfrg') <add> resultType = 'CryptoKeyPair'; <add> result = await lazyRequire('internal/crypto/cfrg') <ide> .cfrgGenerateKey(algorithm, extractable, keyUsages); <add> break; <ide> case 'ECDSA': <ide> // Fall through <ide> case 'ECDH': <del> return lazyRequire('internal/crypto/ec') <add> resultType = 'CryptoKeyPair'; <add> result = await lazyRequire('internal/crypto/ec') <ide> .ecGenerateKey(algorithm, extractable, keyUsages); <add> break; <ide> case 'HMAC': <del> return lazyRequire('internal/crypto/mac') <add> resultType = 'CryptoKey'; <add> result = await lazyRequire('internal/crypto/mac') <ide> .hmacGenerateKey(algorithm, extractable, keyUsages); <add> break; <ide> case 'AES-CTR': <ide> // Fall through <ide> case 'AES-CBC': <ide> // Fall through <ide> case 'AES-GCM': <ide> // Fall through <ide> case 'AES-KW': <del> return lazyRequire('internal/crypto/aes') <add> resultType = 'CryptoKey'; <add> result = await lazyRequire('internal/crypto/aes') <ide> .aesGenerateKey(algorithm, extractable, keyUsages); <add> break; <ide> default: <ide> throw lazyDOMException('Unrecognized name.'); <ide> } <add> <add> if ( <add> (resultType === 'CryptoKey' && <add> (result.type === 'secret' || result.type === 'private') && <add> result.usages.length === 0) || <add> (resultType === 'CryptoKeyPair' && result.privateKey.usages.length === 0) <add> ) { <add> throw lazyDOMException( <add> 'Usages cannot be empty when creating a key.', <add> 'SyntaxError'); <add> } <add> <add> return result; <ide> } <ide> <ide> async function deriveBits(algorithm, baseKey, length) { <ide><path>test/parallel/test-webcrypto-derivebits-ecdh.js <ide> async function prepareKeys() { <ide> { <ide> name: 'ECDSA', <ide> namedCurve: 'P-521' <del> }, false, ['verify']); <add> }, false, ['sign', 'verify']); <ide> <ide> await assert.rejects(subtle.deriveBits({ <ide> name: 'ECDH', <ide><path>test/parallel/test-webcrypto-derivekey-ecdh.js <ide> async function prepareKeys() { <ide> namedCurve: 'P-521' <ide> }, <ide> false, <del> ['verify']); <add> ['sign', 'verify']); <ide> <ide> await assert.rejects( <ide> subtle.deriveKey( <ide><path>test/parallel/test-webcrypto-keygen.js <ide> const allUsages = [ <ide> const vectors = { <ide> 'AES-CTR': { <ide> algorithm: { length: 256 }, <add> result: 'CryptoKey', <ide> usages: [ <ide> 'encrypt', <ide> 'decrypt', <ide> 'wrapKey', <ide> 'unwrapKey', <ide> ], <del> mandatoryUsages: [] <ide> }, <ide> 'AES-CBC': { <ide> algorithm: { length: 256 }, <add> result: 'CryptoKey', <ide> usages: [ <ide> 'encrypt', <ide> 'decrypt', <ide> 'wrapKey', <ide> 'unwrapKey', <ide> ], <del> mandatoryUsages: [] <ide> }, <ide> 'AES-GCM': { <ide> algorithm: { length: 256 }, <add> result: 'CryptoKey', <ide> usages: [ <ide> 'encrypt', <ide> 'decrypt', <ide> 'wrapKey', <ide> 'unwrapKey', <ide> ], <del> mandatoryUsages: [] <ide> }, <ide> 'AES-KW': { <ide> algorithm: { length: 256 }, <add> result: 'CryptoKey', <ide> usages: [ <ide> 'wrapKey', <ide> 'unwrapKey', <ide> ], <del> mandatoryUsages: [] <ide> }, <ide> 'HMAC': { <ide> algorithm: { length: 256, hash: 'SHA-256' }, <add> result: 'CryptoKey', <ide> usages: [ <ide> 'sign', <ide> 'verify', <ide> ], <del> mandatoryUsages: [] <ide> }, <ide> 'RSASSA-PKCS1-v1_5': { <ide> algorithm: { <ide> modulusLength: 1024, <ide> publicExponent: new Uint8Array([1, 0, 1]), <ide> hash: 'SHA-256' <ide> }, <add> result: 'CryptoKeyPair', <ide> usages: [ <ide> 'sign', <ide> 'verify', <ide> ], <del> mandatoryUsages: ['sign'], <ide> }, <ide> 'RSA-PSS': { <ide> algorithm: { <ide> modulusLength: 1024, <ide> publicExponent: new Uint8Array([1, 0, 1]), <ide> hash: 'SHA-256' <ide> }, <add> result: 'CryptoKeyPair', <ide> usages: [ <ide> 'sign', <ide> 'verify', <ide> ], <del> mandatoryUsages: ['sign'] <ide> }, <ide> 'RSA-OAEP': { <ide> algorithm: { <ide> modulusLength: 1024, <ide> publicExponent: new Uint8Array([1, 0, 1]), <ide> hash: 'SHA-256' <ide> }, <add> result: 'CryptoKeyPair', <ide> usages: [ <ide> 'encrypt', <ide> 'decrypt', <ide> 'wrapKey', <ide> 'unwrapKey', <ide> ], <del> mandatoryUsages: [ <del> 'decrypt', <del> 'unwrapKey', <del> ] <ide> }, <ide> 'ECDSA': { <ide> algorithm: { namedCurve: 'P-521' }, <add> result: 'CryptoKeyPair', <ide> usages: [ <ide> 'sign', <ide> 'verify', <ide> ], <del> mandatoryUsages: ['sign'] <ide> }, <ide> 'ECDH': { <ide> algorithm: { namedCurve: 'P-521' }, <add> result: 'CryptoKeyPair', <ide> usages: [ <ide> 'deriveKey', <ide> 'deriveBits', <ide> ], <del> mandatoryUsages: [ <del> 'deriveKey', <del> 'deriveBits', <del> ] <ide> }, <ide> 'Ed25519': { <add> result: 'CryptoKeyPair', <ide> usages: [ <ide> 'sign', <ide> 'verify', <ide> ], <del> mandatoryUsages: ['sign'] <ide> }, <ide> 'Ed448': { <add> result: 'CryptoKeyPair', <ide> usages: [ <ide> 'sign', <ide> 'verify', <ide> ], <del> mandatoryUsages: ['sign'] <ide> }, <ide> 'X25519': { <add> result: 'CryptoKeyPair', <ide> usages: [ <ide> 'deriveKey', <ide> 'deriveBits', <ide> ], <del> mandatoryUsages: [ <del> 'deriveKey', <del> 'deriveBits', <del> ] <ide> }, <ide> 'X448': { <add> result: 'CryptoKeyPair', <ide> usages: [ <ide> 'deriveKey', <ide> 'deriveBits', <ide> ], <del> mandatoryUsages: [ <del> 'deriveKey', <del> 'deriveBits', <del> ] <ide> }, <ide> }; <ide> <ide> const vectors = { <ide> []), <ide> { message: /Usages cannot be empty/ }); <ide> <add> // For CryptoKeyPair results the private key <add> // usages must not be empty. <add> // - ECDH(-like) algorithm key pairs only have private key usages <add> // - Signing algorithm key pairs may pass a non-empty array but <add> // with only a public key usage <add> if ( <add> vectors[name].result === 'CryptoKeyPair' && <add> vectors[name].usages.includes('verify') <add> ) { <add> await assert.rejects( <add> subtle.generateKey( <add> { <add> name, ...vectors[name].algorithm <add> }, <add> true, <add> ['verify']), <add> { message: /Usages cannot be empty/ }); <add> } <add> <ide> const invalidUsages = []; <ide> allUsages.forEach((usage) => { <ide> if (!vectors[name].usages.includes(usage))
4
Python
Python
fix method documentation of function sort()
050181e8d660d981d8f4a6b870d1859bf617a414
<ide><path>numpy/ma/core.py <ide> def argsort(a, axis=np._NoValue, kind=None, order=None, endwith=True, fill_value <ide> argsort.__doc__ = MaskedArray.argsort.__doc__ <ide> <ide> def sort(a, axis=-1, kind=None, order=None, endwith=True, fill_value=None): <del> "Function version of the eponymous method." <add> """ <add> Return a sorted copy of the masked array. <add> <add> Equivalent to creating a copy of the array <add> and applying the MaskedArray ``sort()`` method. <add> <add> Refer to ``MaskedArray.sort`` for the full documentation <add> <add> See Also <add> -------- <add> MaskedArray.sort : equivalent method <add> """ <ide> a = np.array(a, copy=True, subok=True) <ide> if axis is None: <ide> a = a.flatten() <ide> def sort(a, axis=-1, kind=None, order=None, endwith=True, fill_value=None): <ide> else: <ide> a.sort(axis=axis, kind=kind, order=order) <ide> return a <del>sort.__doc__ = MaskedArray.sort.__doc__ <ide> <ide> <ide> def compressed(x):
1
Text
Text
fix broken link to 1.7 hub api
09742bcd692b2594800631e5c5c16d76dcf0e9f2
<ide><path>docs/reference/api/docker-io_api.md <ide> weight = 99 <ide> <ide> # Docker Hub API <ide> <del>This API is deprecated as of 1.7. To view the old version, see the [Docker Hub API](docker-io_api.md) in the 1.7 documentation. <add>This API is deprecated as of 1.7. To view the old version, see the [Docker Hub <add>API](https://docs.docker.com/v1.7/docker/reference/api/docker-io_api/) in the 1.7 documentation. <ide>
1
PHP
PHP
apply styleci fixes
acfdc3e5b90a43eeede56f496356009fa87bdee8
<ide><path>src/Illuminate/Foundation/Validation/ValidationException.php <ide> <ide> class ValidationException extends Exception <ide> { <del> /** <del> * The validator instance. <del> * <del> * @var \Illuminate\Validation\Validator <del> */ <del> public $validator; <add> /** <add> * The validator instance. <add> * <add> * @var \Illuminate\Validation\Validator <add> */ <add> public $validator; <ide> <del> /** <del> * The recommended response to send to the client. <del> * <del> * @var \Illuminate\Http\Response|null <del> */ <del> public $response; <add> /** <add> * The recommended response to send to the client. <add> * <add> * @var \Illuminate\Http\Response|null <add> */ <add> public $response; <ide> <del> /** <del> * Create a new exception instance. <del> * <del> * @param \Illuminate\Validation\Validator $validator <del> * @param \Illuminate\Http\Response $response <del> * @return void <del> */ <del> public function __construct($validator, $response = null) <del> { <del> parent::__construct("The given data failed to pass validation."); <add> /** <add> * Create a new exception instance. <add> * <add> * @param \Illuminate\Validation\Validator $validator <add> * @param \Illuminate\Http\Response $response <add> * @return void <add> */ <add> public function __construct($validator, $response = null) <add> { <add> parent::__construct('The given data failed to pass validation.'); <ide> <del> $this->response = $response; <del> $this->validator = $validator; <del> } <add> $this->response = $response; <add> $this->validator = $validator; <add> } <ide> }
1
PHP
PHP
remove an invalid option for json_encode
11f4590a55e8b9b6c921bff6cb4ebd25c7680585
<ide><path>src/View/JsonView.php <ide> protected function _serialize($serialize) <ide> $data = $this->_dataToSerialize($serialize); <ide> <ide> $jsonOptions = JSON_HEX_TAG | JSON_HEX_APOS | JSON_HEX_AMP | JSON_HEX_QUOT | <del> JSON_ERROR_INF_OR_NAN | JSON_PARTIAL_OUTPUT_ON_ERROR; <add> JSON_PARTIAL_OUTPUT_ON_ERROR; <ide> <ide> if (isset($this->viewVars['_jsonOptions'])) { <ide> if ($this->viewVars['_jsonOptions'] === false) {
1
Javascript
Javascript
add getstats again - remove breaking change
b863851ce969317688799b754131e3546206c7ad
<ide><path>lib/Compilation.js <ide> const ModuleTemplate = require("./ModuleTemplate"); <ide> const Dependency = require("./Dependency"); <ide> const ChunkRenderError = require("./ChunkRenderError"); <ide> const CachedSource = require("webpack-sources").CachedSource; <add>const Stats = require("./Stats"); <ide> <ide> function byId(a, b) { <ide> if(a.id < b.id) return -1; <ide> class Compilation extends Tapable { <ide> this.dependencyTemplates = new Map(); <ide> } <ide> <add> getStats() { <add> return new Stats(this); <add> } <add> <ide> templatesPlugin(name, fn) { <ide> this.mainTemplate.plugin(name, fn); <ide> this.chunkTemplate.plugin(name, fn);
1
Ruby
Ruby
fix indentation issuing warning
9e7bcaa9b394277c3549d706b68b7c9372432638
<ide><path>actionpack/lib/action_dispatch/journey/nfa/transition_table.rb <ide> def inverted <ide> <ide> @inverted <ide> end <del> end <add> end <ide> end <ide> end <ide> end
1
Javascript
Javascript
log an error message to console as well
e8e2370ed102ae6994739b178744fcf37266b31d
<ide><path>src/auto-update-manager.js <ide> export default class AutoUpdateManager { <ide> this.emitter.emit('update-not-available') <ide> }), <ide> applicationDelegate.onUpdateError((message) => { <add> console.error(message) <ide> this.emitter.emit('update-error', message) <ide> }) <ide> )
1
Text
Text
update explamples to spanish
d3d02afa8d65944ee0330764ed891cbb22aa723a
<ide><path>guide/spanish/ruby/ruby-comments/index.md <ide> --- <ide> title: Ruby Comments <del>localeTitle: Rubí Comentarios <add>localeTitle: Ruby Comentarios <ide> --- <del># Rubí Comentarios <add># Ruby Comentarios <ide> <ide> Los comentarios son líneas de anotación dentro del código que se ignoran en tiempo de ejecución (lo que significa que son visibles dentro del código fuente pero no se imprimen cuando ejecuta el programa). <ide> <ide> En Ruby, un comentario de una sola línea comienza con el carácter `#` y se extiende hasta el final de la línea. El comentario puede estar en su propia línea o siguiendo el código. <ide> <ide> ```Ruby <del>puts "Learning to code is fun!" <add>puts "¡Aprender a escribir código es divertido!" <ide> <del> # I am a single line comment. <add> # Soy un comentario de una sola línea. <ide> <del> puts "Ruby is a great language!" # Me too - I am a trailing comment. <add> puts "¡Ruby es un gran lenguaje!" # Yo también - Soy un código de una línea. <ide> ``` <ide> <ide> Cuando se ejecuta, el programa anterior produce lo siguiente: <ide> ``` <del>Learning to code is fun! <del> Ruby is a great language! <add>¡Aprender a escribir código es divertido! <add>¡Ruby es un gran lenguaje! <ide> ``` <ide> <ide> Puede hacer comentarios de varias líneas colocando los comentarios entre `=begin` y `=end` . `=begin` y `=end` deben comenzar al principio de la línea y `=end` debe estar en una línea propia. <ide> <ide> ```ruby <ide> =begin <del> I am a multi-line comment <del> and I can be as long as I please. <del> See, still going! <add> Soy un comentario multi línea <add> y puedo ser tan largo como se me plazca. <add> ¡Ves, todavía sigo! <ide> =end <ide> <del> puts "Hello World!" <add> puts "¡Hola mundo!" <ide> <del> =begin It's ok to start the comment on the same <del> line as "=begin" (though it's more readable if <del> you don't) but you can't put a space or any <del> text before "=begin" or "=end" and you can't put <del> anything on the same line after "=end". <add> =begin Está bien empezar el comentario en la misma <add> línea que "=begin" (aunque es más comprensible si <add> no lo haces) pero no puedes poner un espacio o <add> texto antes que "=begin" o "=end" y no puedes poner <add> nada nada en la misma línea después de "=end". <ide> =end <ide> ``` <ide> <ide> Cuando se ejecuta, el programa anterior produce lo siguiente: <ide> ``` <del>Hello World! <add>¡Hola mundo! <ide> <del>``` <ide>\ No newline at end of file <add>```
1
Go
Go
fix some output information for container test
92ee5a5d3aaefe2fc3b800de67d2ff0acd6a8aae
<ide><path>container/memory_store_test.go <ide> func TestListContainers(t *testing.T) { <ide> t.Fatalf("expected list size 2, got %v", len(list)) <ide> } <ide> if list[0].ID != "id2" { <del> t.Fatalf("expected older container to be first, got %v", list[0].ID) <add> t.Fatalf("expected id2, got %v", list[0].ID) <ide> } <ide> } <ide> <ide> func TestApplyAllContainer(t *testing.T) { <ide> t.Fatal("expected container to not be nil") <ide> } <ide> if cont.ID != "newID" { <del> t.Fatalf("expected newID, got %v", cont) <add> t.Fatalf("expected newID, got %v", cont.ID) <ide> } <ide> } <ide><path>container/state_test.go <ide> func TestStateTimeoutWait(t *testing.T) { <ide> }() <ide> select { <ide> case <-time.After(200 * time.Millisecond): <del> t.Fatal("Stop callback doesn't fire in 100 milliseconds") <add> t.Fatal("Stop callback doesn't fire in 200 milliseconds") <ide> case <-stopped: <ide> t.Log("Stop callback fired") <ide> }
2
Ruby
Ruby
add `tap#contents` methods
785750ee6364b14e47dfac352372cb51a83ee9c9
<ide><path>Library/Homebrew/cmd/tap-info.rb <ide> def print_tap_info(taps) <ide> if tap.installed? <ide> info += tap.pinned? ? "pinned" : "unpinned" <ide> info += ", private" if tap.private? <del> if (formula_count = tap.formula_files.size).positive? <del> info += ", #{Formatter.pluralize(formula_count, "formula")}" <add> if (contents = tap.contents).empty? <add> info += ", no commands/casks/formulae" <add> else <add> info += ", #{contents.join(", ")}" <ide> end <del> if (command_count = tap.command_files.size).positive? <del> info += ", #{Formatter.pluralize(command_count, "command")}" <del> end <del> info += ", no formulae/commands" if (formula_count + command_count).zero? <ide> info += "\n#{tap.path} (#{tap.path.abv})" <ide> info += "\nFrom: #{tap.remote.nil? ? "N/A" : tap.remote}" <ide> else <ide><path>Library/Homebrew/tap.rb <ide> def install(options = {}) <ide> <ide> link_completions_and_manpages <ide> <del> casks = Formatter.pluralize(cask_files.count, "cask") <del> formulae = Formatter.pluralize(formula_files.count, "formula") <del> puts "Tapped #{formulae} and #{casks} (#{path.abv})." unless quiet <add> formatted_contents = Formatter.enumeration(*contents)&.prepend(" ") <add> puts "Tapped#{formatted_contents} (#{path.abv})." unless quiet <ide> Descriptions.cache_formulae(formula_names) <ide> <ide> return if options[:clone_target] <ide> def uninstall <ide> puts "Untapping #{name}..." <ide> <ide> abv = path.abv <del> casks = Formatter.pluralize(cask_files.count, "cask") <del> formulae = Formatter.pluralize(formula_files.count, "formula") <add> formatted_contents = Formatter.enumeration(*contents)&.prepend(" ") <ide> <ide> unpin if pinned? <ide> Descriptions.uncache_formulae(formula_names) <ide> Utils::Link.unlink_manpages(path) <ide> Utils::Link.unlink_completions(path) <ide> path.rmtree <ide> path.parent.rmdir_if_possible <del> puts "Untapped #{formulae} and #{casks} (#{abv})." <add> puts "Untapped#{formatted_contents} (#{abv})." <ide> clear_cache <ide> end <ide> <ide> def cask_dir <ide> @cask_dir ||= path/"Casks" <ide> end <ide> <add> def contents <add> contents = [] <add> <add> if (command_count = command_files.count).positive? <add> contents << Formatter.pluralize(command_count, "command") <add> end <add> <add> if (cask_count = cask_files.count).positive? <add> contents << Formatter.pluralize(cask_count, "cask") <add> end <add> <add> if (formula_count = formula_files.count).positive? <add> contents << Formatter.pluralize(formula_count, "formula") <add> end <add> <add> contents <add> end <add> <ide> # an array of all {Formula} files of this {Tap}. <ide> def formula_files <ide> @formula_files ||= if formula_dir.directory? <ide> def alias_reverse_table <ide> <ide> # an array of all commands files of this {Tap}. <ide> def command_files <del> @command_files ||= Pathname.glob("#{path}/cmd/brew-*").select(&:executable?) <add> @command_files ||= Pathname.glob("#{path}/cmd/brew{,cask}-*") <add> .select { |file| file.executable? || file.extname == ".rb" } <ide> end <ide> <ide> # path to the pin record for this {Tap}.
2
Text
Text
fix broken link in collaborator_guide.md
51d8fd3d4e69ec602f038c8ecae6c08c5dad8d40
<ide><path>COLLABORATOR_GUIDE.md <ide> Collaborators or additional evidence that the issue has relevance, the <ide> issue may be closed. Remember that issues can always be re-opened if <ide> necessary. <ide> <del>[**See "Who to CC in issues"**](./onboarding-extras.md#who-to-cc-in-issues) <add>[**See "Who to CC in issues"**](./doc/onboarding-extras.md#who-to-cc-in-issues) <ide> <ide> ## Accepting Modifications <ide>
1
PHP
PHP
deprecate the read side of join() and from()
a1ab1b5c62810e9872717560dc24f479dcaaa245
<ide><path>src/Database/Query.php <ide> public function modifier($modifiers, $overwrite = false) <ide> public function from($tables = [], $overwrite = false) <ide> { <ide> if (empty($tables)) { <add> deprecationWarning('Using Query::from() to read state is deprecated. Use clause("from") instead.'); <add> <ide> return $this->_parts['from']; <ide> } <ide> <ide> public function from($tables = [], $overwrite = false) <ide> public function join($tables = null, $types = [], $overwrite = false) <ide> { <ide> if ($tables === null) { <add> deprecationWarning('Using Query::join() to read state is deprecated. Use clause("join") instead.'); <add> <ide> return $this->_parts['join']; <ide> } <ide> <ide><path>src/ORM/Association/BelongsToMany.php <ide> public function find($type = null, array $options = []) <ide> protected function _appendJunctionJoin($query, $conditions) <ide> { <ide> $name = $this->_junctionAssociationName(); <del> $joins = $query->join(); <add> $joins = $query->clause('join'); <ide> $matching = [ <ide> $name => [ <ide> 'table' => $this->junction()->getTable(), <ide><path>tests/TestCase/Database/QueryTest.php <ide> public function testRemoveJoin() <ide> 'type' => 'INNER', <ide> 'conditions' => ['articles.author_id = authors.id'] <ide> ]]); <del> $this->assertArrayHasKey('authors', $query->join()); <add> $this->assertArrayHasKey('authors', $query->clause('join')); <ide> <ide> $this->assertSame($query, $query->removeJoin('authors')); <del> $this->assertArrayNotHasKey('authors', $query->join()); <add> $this->assertArrayNotHasKey('authors', $query->clause('join')); <add> } <add> <add> /** <add> * Test join read mode <add> * <add> * @deprecated <add> * @return void <add> */ <add> public function testJoinReadMode() <add> { <add> $this->loadFixtures('Articles'); <add> $query = new Query($this->connection); <add> $query->select(['id', 'title']) <add> ->from('articles') <add> ->join(['authors' => [ <add> 'type' => 'INNER', <add> 'conditions' => ['articles.author_id = authors.id'] <add> ]]); <add> <add> $this->deprecated(function () use ($query) { <add> $this->assertArrayHasKey('authors', $query->join()); <add> }); <ide> } <ide> <ide> /**
3
Text
Text
fix initial example in entityruler api docs
d2c474cbb76c3f680660d60da866903ce4893756
<ide><path>website/docs/api/entityruler.md <ide> be a token pattern (list) or a phrase pattern (string). For example: <ide> > <ide> > ```python <ide> > # Construction via create_pipe <del>> ruler = nlp.create_pipe("entityruler") <add>> ruler = nlp.create_pipe("entity_ruler") <ide> > <ide> > # Construction from class <ide> > from spacy.pipeline import EntityRuler
1
Java
Java
fix warnings and polish localsessionfactorybuilder
5360bd899d68bf01ec916c429296735b34e172c3
<ide><path>org.springframework.orm/src/main/java/org/springframework/orm/hibernate4/LocalSessionFactoryBuilder.java <ide> * adding {@link SpringSessionContext} as a default and providing convenient ways <ide> * to specify a DataSource and an application class loader. <ide> * <del> * <p>This is designed for programmatic use, e.g. in <code>@Bean</code> factory methods. <add> * <p>This is designed for programmatic use, e.g. in {@code @Bean} factory methods. <ide> * Consider using {@link LocalSessionFactoryBean} for XML bean definition files. <ide> * <ide> * @author Juergen Hoeller <ide> * @since 3.1 <ide> * @see LocalSessionFactoryBean <ide> */ <add>@SuppressWarnings("serial") <ide> public class LocalSessionFactoryBuilder extends Configuration { <ide> <ide> private static final String RESOURCE_PATTERN = "/**/*.class"; <ide> public LocalSessionFactoryBuilder(DataSource dataSource, ResourceLoader resource <ide> * @see #addAnnotatedClass <ide> * @see #scanPackages <ide> */ <del> public LocalSessionFactoryBuilder addAnnotatedClasses(Class... annotatedClasses) { <add> public LocalSessionFactoryBuilder addAnnotatedClasses(Class<?>... annotatedClasses) { <ide> for (Class<?> annotatedClass : annotatedClasses) { <ide> ReflectionUtils.invokeMethod(addAnnotatedClassMethod, this, annotatedClass); <ide> } <ide> public LocalSessionFactoryBuilder addPackages(String... annotatedPackages) { <ide> <ide> /** <ide> * Perform Spring-based scanning for entity classes, registering them <del> * as annotated classes with this <code>Configuration</code>. <add> * as annotated classes with this {@code Configuration}. <ide> * @param packagesToScan one or more Java package names <ide> * @throws HibernateException if scanning fails for any reason <ide> */ <ide> private boolean matchesFilter(MetadataReader reader, MetadataReaderFactory reade <ide> <ide> <ide> /** <del> * Build the <code>SessionFactory</code>. <add> * Build the {@code SessionFactory}. <ide> */ <ide> @Override <ide> @SuppressWarnings("deprecation")
1
Javascript
Javascript
upgrade dedupeplugin to es6
771c6b287702bd15a7cce8dd77aed119f76f0dd6
<ide><path>lib/optimize/DedupePlugin.js <ide> MIT License http://www.opensource.org/licenses/mit-license.php <ide> Author Tobias Koppers @sokra <ide> */ <del>function DedupePlugin() {} <del>module.exports = DedupePlugin; <add>"use strict"; <add> <add>class DedupePlugin { <add> apply(compiler) { <add> compiler.plugin("compilation", (compilation) => { <add> compilation.warnings.push(new Error("DedupePlugin: This plugin was removed from webpack. remove it from configuration.")); <add> }); <add> } <add>} <ide> <del>DedupePlugin.prototype.apply = function(compiler) { <del> compiler.plugin("compilation", function(compilation) { <del> compilation.warnings.push(new Error("DedupePlugin: This plugin was removed from webpack. remove it from configuration.")); <del> }); <del>}; <add>module.exports = DedupePlugin;
1
Ruby
Ruby
use array arithmetic rather than create sets
6e14feb978434802e7a46b26d99d64e31f545fe2
<ide><path>activerecord/lib/active_record/associations/association_collection.rb <ide> def replace(other_array) <ide> other_array.each { |val| raise_on_type_mismatch(val) } <ide> <ide> load_target <del> other = other_array.size < 100 ? other_array : other_array.to_set <del> current = @target.size < 100 ? @target : @target.to_set <ide> <ide> transaction do <del> delete(@target.select { |v| !other.include?(v) }) <del> concat(other_array.select { |v| !current.include?(v) }) <add> delete(@target - other_array) <add> concat(other_array - @target) <ide> end <ide> end <ide>
1
Python
Python
apply `raise` fixes. closes
3655b732bd08022dab8498b44191d6c4049bc7a8
<ide><path>doc/numpybook/comparison/weave/filter.py <ide> <ide> def filter(a): <ide> if a.ndim != 2: <del> raise ValueError, "a must be 2-d" <add> raise ValueError("a must be 2-d") <ide> code = r""" <ide> int i,j; <ide> for(i=1;i<Na[0]-1;i++) { <ide><path>doc/numpybook/comparison/weave/inline.py <ide> <ide> def example1(a): <ide> if not isinstance(a, list): <del> raise ValueError, "argument must be a list" <add> raise ValueError("argument must be a list") <ide> code = r""" <ide> int i; <ide> py::tuple results(2); <ide> def example1(a): <ide> <ide> def arr(a): <ide> if a.ndim != 2: <del> raise ValueError, "a must be 2-d" <add> raise ValueError("a must be 2-d") <ide> code = r""" <ide> int i,j; <ide> for(i=1;i<Na[0]-1;i++) { <ide><path>numpy/core/code_generators/generate_numpy_api.py <ide> def do_generate_api(targets, sources): <ide> multiarray_api_dict[name] = TypeApi(name, index, 'PyTypeObject', api_name) <ide> <ide> if len(multiarray_api_dict) != len(multiarray_api_index): <del> raise AssertionError, "Multiarray API size mismatch %d %d" % \ <del> (len(multiarray_api_dict), len(multiarray_api_index)) <add> raise AssertionError("Multiarray API size mismatch %d %d" % <add> (len(multiarray_api_dict), len(multiarray_api_index))) <ide> <ide> extension_list = [] <ide> for name, index in genapi.order_dict(multiarray_api_index): <ide><path>numpy/core/records.py <ide> def __setattr__(self, attr, val): <ide> fielddict = ndarray.__getattribute__(self, 'dtype').fields or {} <ide> if attr not in fielddict: <ide> exctype, value = sys.exc_info()[:2] <del> raise exctype, value <add> raise exctype(value) <ide> else: <ide> fielddict = ndarray.__getattribute__(self, 'dtype').fields or {} <ide> if attr not in fielddict: <ide><path>numpy/f2py/auxfuncs.py <ide> def __init__(self,mess): <ide> self.mess = mess <ide> def __call__(self,var): <ide> mess = '\n\n var = %s\n Message: %s\n' % (var,self.mess) <del> raise F2PYError,mess <add> raise F2PYError(mess) <ide> <ide> def l_and(*f): <ide> l,l2='lambda v',[] <ide><path>numpy/lib/polynomial.py <ide> def polyfit(x, y, deg, rcond=None, full=False, w=None, cov=False): <ide> if w is not None: <ide> w = NX.asarray(w) + 0.0 <ide> if w.ndim != 1: <del> raise TypeError, "expected a 1-d array for weights" <add> raise TypeError("expected a 1-d array for weights") <ide> if w.shape[0] != y.shape[0] : <del> raise TypeError, "expected w and y to have the same length" <add> raise TypeError("expected w and y to have the same length") <ide> lhs *= w[:, NX.newaxis] <ide> if rhs.ndim == 2: <ide> rhs *= w[:, NX.newaxis] <ide><path>numpy/ma/extras.py <ide> def polyfit(x, y, deg, rcond=None, full=False, w=None, cov=False): <ide> if w is not None: <ide> w = asarray(w) <ide> if w.ndim != 1: <del> raise TypeError, "expected a 1-d array for weights" <add> raise TypeError("expected a 1-d array for weights") <ide> if w.shape[0] != y.shape[0] : <del> raise TypeError, "expected w and y to have the same length" <add> raise TypeError("expected w and y to have the same length") <ide> m = mask_or(m, getmask(w)) <ide> <ide> if m is not nomask: <ide><path>numpy/ma/mrecords.py <ide> def __setattr__(self, attr, val): <ide> optinfo = ndarray.__getattribute__(self, '_optinfo') or {} <ide> if not (attr in fielddict or attr in optinfo): <ide> exctype, value = sys.exc_info()[:2] <del> raise exctype, value <add> raise exctype(value) <ide> else: <ide> # Get the list of names ...... <ide> fielddict = ndarray.__getattribute__(self, 'dtype').fields or {} <ide><path>numpy/oldnumeric/arrayfns.py <ide> def array_set(vals1, indices, vals2): <ide> vals1 = asarray(vals1) <ide> vals2 = asarray(vals2) <ide> if vals1.ndim != vals2.ndim or vals1.ndim < 1: <del> raise error, "vals1 and vals2 must have same number of dimensions (>=1)" <add> raise error("vals1 and vals2 must have same number of dimensions (>=1)") <ide> vals1[indices] = vals2 <ide> <ide> from numpy import digitize <ide> def interp(y, x, z, typ=None): <ide> if typ == 'f': <ide> return res.astype('f') <ide> <del> raise error, "incompatible typecode" <add> raise error("incompatible typecode") <ide> <ide> def nz(x): <ide> x = asarray(x,dtype=np.ubyte) <ide><path>numpy/oldnumeric/random_array.py <ide> def test(): <ide> mt.set_state(obj) <ide> obj2 = mt.get_state() <ide> if (obj2[1] - obj[1]).any(): <del> raise SystemExit, "Failed seed test." <add> raise SystemExit("Failed seed test.") <ide> print "First random number is", random() <ide> print "Average of 10000 random numbers is", np.sum(random(10000),axis=0)/10000. <ide> x = random([10,1000]) <ide> if len(x.shape) != 2 or x.shape[0] != 10 or x.shape[1] != 1000: <del> raise SystemExit, "random returned wrong shape" <add> raise SystemExit("random returned wrong shape") <ide> x.shape = (10000,) <ide> print "Average of 100 by 100 random numbers is", np.sum(x,axis=0)/10000. <ide> y = uniform(0.5,0.6, (1000,10)) <ide> if len(y.shape) !=2 or y.shape[0] != 1000 or y.shape[1] != 10: <del> raise SystemExit, "uniform returned wrong shape" <add> raise SystemExit("uniform returned wrong shape") <ide> y.shape = (10000,) <ide> if np.minimum.reduce(y) <= 0.5 or np.maximum.reduce(y) >= 0.6: <del> raise SystemExit, "uniform returned out of desired range" <add> raise SystemExit("uniform returned out of desired range") <ide> print "randint(1, 10, shape=[50])" <ide> print randint(1, 10, shape=[50]) <ide> print "permutation(10)", permutation(10) <ide> def test(): <ide> s = 3.0 <ide> x = normal(2.0, s, [10, 1000]) <ide> if len(x.shape) != 2 or x.shape[0] != 10 or x.shape[1] != 1000: <del> raise SystemExit, "standard_normal returned wrong shape" <add> raise SystemExit("standard_normal returned wrong shape") <ide> x.shape = (10000,) <ide> mean_var_test(x, "normally distributed numbers with mean 2 and variance %f"%(s**2,), 2, s**2, 0) <ide> x = exponential(3, 10000) <ide> mean_var_test(x, "random numbers exponentially distributed with mean %f"%(s,), s, s**2, 2) <ide> x = multivariate_normal(np.array([10,20]), np.array(([1,2],[2,4]))) <ide> print "\nA multivariate normal", x <del> if x.shape != (2,): raise SystemExit, "multivariate_normal returned wrong shape" <add> if x.shape != (2,): raise SystemExit("multivariate_normal returned wrong shape") <ide> x = multivariate_normal(np.array([10,20]), np.array([[1,2],[2,4]]), [4,3]) <ide> print "A 4x3x2 array containing multivariate normals" <ide> print x <del> if x.shape != (4,3,2): raise SystemExit, "multivariate_normal returned wrong shape" <add> if x.shape != (4,3,2): raise SystemExit("multivariate_normal returned wrong shape") <ide> x = multivariate_normal(np.array([-100,0,100]), np.array([[3,2,1],[2,2,1],[1,1,1]]), 10000) <ide> x_mean = np.sum(x,axis=0)/10000. <ide> print "Average of 10000 multivariate normals with mean [-100,0,100]" <ide><path>numpy/oldnumeric/rng.py <ide> def _sample(self, rng, n): <ide> class ExponentialDistribution(Distribution): <ide> def __init__(self, lambda_): <ide> if (lambda_ <= 0): <del> raise error, "parameter must be positive" <add> raise error("parameter must be positive") <ide> Distribution.__init__(self, 'exponential', lambda_) <ide> <ide> def density(x): <ide> def __init__(self, m, s): <ide> m = float(m) <ide> s = float(s) <ide> if (s <= 0): <del> raise error, "standard deviation must be positive" <add> raise error("standard deviation must be positive") <ide> Distribution.__init__(self, 'lognormal', m, s) <ide> sn = math.log(1.0+s*s/(m*m)); <ide> self._mn = math.log(m)-0.5*sn <ide> def __init__(self, m, s): <ide> m = float(m) <ide> s = float(s) <ide> if (s <= 0): <del> raise error, "standard deviation must be positive" <add> raise error("standard deviation must be positive") <ide> Distribution.__init__(self, 'normal', m, s) <ide> self._fac = 1.0/math.sqrt(2*math.pi)/s <ide> <ide> def __init__(self, a, b): <ide> b = float(b) <ide> width = b-a <ide> if (width <=0): <del> raise error, "width of uniform distribution must be > 0" <add> raise error("width of uniform distribution must be > 0") <ide> Distribution.__init__(self, 'uniform', a, b) <ide> self._fac = 1.0/width <ide> <ide> def __init__(self, seed, dist=None): <ide> if dist is None: <ide> dist = default_distribution <ide> if not isinstance(dist, Distribution): <del> raise error, "Not a distribution object" <add> raise error("Not a distribution object") <ide> self._dist = dist <ide> <ide> def ranf(self): <ide><path>tools/osxbuild/install_and_test.py <ide> def color_print(msg): <ide> pkg = fn <ide> break <ide> if pkg is None: <del> raise IOError, 'Package is not found in directory %s' % distdir <add> raise IOError('Package is not found in directory %s' % distdir) <ide> <ide> pkgpath = os.path.abspath(os.path.join(SRC_DIR, DIST_DIR, pkg)) <ide> color_print('Installing package: %s' % pkgpath)
12
Text
Text
add quotes for event names + fix similar nits
9c8857d9461210185e7272a15a1a2f5b75b31faa
<ide><path>doc/api/assert.md <ide> added: v0.1.21 <ide> changes: <ide> - version: REPLACEME <ide> pr-url: https://github.com/nodejs/node/pull/18418 <del> description: Calling `assert.fail` with more than one argument is deprecated <del> and emits a warning. <add> description: Calling `assert.fail()` with more than one argument is <add> deprecated and emits a warning. <ide> --> <ide> * `actual` {any} <ide> * `expected` {any} <ide><path>doc/api/async_hooks.md <ide> const async_hooks = require('async_hooks'); <ide> ## Terminology <ide> <ide> An asynchronous resource represents an object with an associated callback. <del>This callback may be called multiple times, for example, the `connection` event <del>in `net.createServer`, or just a single time like in `fs.open`. A resource <del>can also be closed before the callback is called. AsyncHook does not <add>This callback may be called multiple times, for example, the `'connection'` <add>event in `net.createServer()`, or just a single time like in `fs.open()`. <add>A resource can also be closed before the callback is called. AsyncHook does not <ide> explicitly distinguish between these different cases but will represent them <ide> as the abstract concept that is a resource. <ide> <ide> const asyncHook = async_hooks.createHook(new MyAddedCallbacks()); <ide> <ide> If any `AsyncHook` callbacks throw, the application will print the stack trace <ide> and exit. The exit path does follow that of an uncaught exception, but <del>all `uncaughtException` listeners are removed, thus forcing the process to <add>all `'uncaughtException'` listeners are removed, thus forcing the process to <ide> exit. The `'exit'` callbacks will still be called unless the application is run <ide> with `--abort-on-uncaught-exception`, in which case a stack trace will be <ide> printed and the application exits, leaving a core file. <ide><path>doc/api/child_process.md <ide> pipes between the parent and child. The value is one of the following: <ide> between parent and child. A [`ChildProcess`][] may have at most *one* IPC stdio <ide> file descriptor. Setting this option enables the [`subprocess.send()`][] <ide> method. If the child is a Node.js process, the presence of an IPC channel <del> will enable [`process.send()`][], [`process.disconnect()`][], <del> [`process.on('disconnect')`][], and [`process.on('message')`] within the <del> child. <add> will enable [`process.send()`][] and [`process.disconnect()`][] methods, <add> as well as [`'disconnect'`][] and [`'message'`][] events within the child. <ide> <ide> Accessing the IPC channel fd in any way other than [`process.send()`][] <ide> or using the IPC channel with a child process that is not a Node.js instance <ide> spawn('prg', [], { stdio: ['pipe', null, null, null, 'pipe'] }); <ide> *It is worth noting that when an IPC channel is established between the <ide> parent and child processes, and the child is a Node.js process, the child <ide> is launched with the IPC channel unreferenced (using `unref()`) until the <del>child registers an event handler for the [`process.on('disconnect')`][] event <del>or the [`process.on('message')`][] event. This allows the child to exit <add>child registers an event handler for the [`'disconnect'`][] event <add>or the [`'message'`][] event. This allows the child to exit <ide> normally without the process being held open by the open IPC channel.* <ide> <ide> See also: [`child_process.exec()`][] and [`child_process.fork()`][] <ide> changes: <ide> When an IPC channel has been established between the parent and child ( <ide> i.e. when using [`child_process.fork()`][]), the `subprocess.send()` method can <ide> be used to send messages to the child process. When the child process is a <del>Node.js instance, these messages can be received via the <del>[`process.on('message')`][] event. <add>Node.js instance, these messages can be received via the [`'message'`][] event. <ide> <ide> The message goes through serialization and parsing. The resulting <ide> message might not be the same as what is originally sent. <ide> allows the child to send messages back to the parent. <ide> <ide> There is a special case when sending a `{cmd: 'NODE_foo'}` message. Messages <ide> containing a `NODE_` prefix in the `cmd` property are reserved for use within <del>Node.js core and will not be emitted in the child's [`process.on('message')`][] <add>Node.js core and will not be emitted in the child's [`'message'`][] <ide> event. Rather, such messages are emitted using the <del>`process.on('internalMessage')` event and are consumed internally by Node.js. <add>`'internalMessage'` event and are consumed internally by Node.js. <ide> Applications should avoid using such messages or listening for <ide> `'internalMessage'` events as it is subject to change without notice. <ide> <ide> The optional `sendHandle` argument that may be passed to `subprocess.send()` is <ide> for passing a TCP server or socket object to the child process. The child will <ide> receive the object as the second argument passed to the callback function <del>registered on the [`process.on('message')`][] event. Any data that is received <add>registered on the [`'message'`][] event. Any data that is received <ide> and buffered in the socket will not be sent to the child. <ide> <ide> The optional `callback` is a function that is invoked after the message is <ide> the same requirement. Thus, in `child_process` functions where a shell can be <ide> spawned, `'cmd.exe'` is used as a fallback if `process.env.ComSpec` is <ide> unavailable. <ide> <add>[`'disconnect'`]: process.html#process_event_disconnect <ide> [`'error'`]: #child_process_event_error <ide> [`'exit'`]: #child_process_event_exit <add>[`'message'`]: process.html#process_event_message <ide> [`ChildProcess`]: #child_process_child_process <ide> [`Error`]: errors.html#errors_class_error <ide> [`EventEmitter`]: events.html#events_class_eventemitter <ide> unavailable. <ide> [`process.disconnect()`]: process.html#process_process_disconnect <ide> [`process.env`]: process.html#process_process_env <ide> [`process.execPath`]: process.html#process_process_execpath <del>[`process.on('disconnect')`]: process.html#process_event_disconnect <del>[`process.on('message')`]: process.html#process_event_message <ide> [`process.send()`]: process.html#process_process_send_message_sendhandle_options_callback <ide> [`stdio`]: #child_process_options_stdio <ide> [`util.promisify()`]: util.html#util_util_promisify_original <ide><path>doc/api/cluster.md <ide> added: v0.7.0 <ide> * `message` {Object} <ide> * `handle` {undefined|Object} <ide> <del>Similar to the `cluster.on('message')` event, but specific to this worker. <add>Similar to the `'message'` event of `cluster`, but specific to this worker. <ide> <ide> Within a worker, `process.on('message')` may also be used. <ide> <ide> added: v0.7.9 <ide> <ide> Emitted after the worker IPC channel has disconnected. This can occur when a <ide> worker exits gracefully, is killed, or is disconnected manually (such as with <del>worker.disconnect()). <add>`worker.disconnect()`). <ide> <ide> There may be a delay between the `'disconnect'` and `'exit'` events. These <ide> events can be used to detect if the process is stuck in a cleanup or if there <ide> cluster.on('exit', (worker, code, signal) => { <ide> }); <ide> ``` <ide> <del>See [child_process event: 'exit'][]. <add>See [child_process event: `'exit'`][]. <ide> <ide> ## Event: 'fork' <ide> <!-- YAML <ide> changes: <ide> <ide> Emitted when the cluster master receives a message from any worker. <ide> <del>See [child_process event: 'message'][]. <add>See [child_process event: `'message'`][]. <ide> <ide> Before Node.js v6.0, this event emitted only the message and the handle, <ide> but not the worker object, contrary to what the documentation stated. <ide> added: v0.7.0 <ide> After forking a new worker, the worker should respond with an online message. <ide> When the master receives an online message it will emit this event. <ide> The difference between `'fork'` and `'online'` is that fork is emitted when the <del>master forks a worker, and 'online' is emitted when the worker is running. <add>master forks a worker, and `'online'` is emitted when the worker is running. <ide> <ide> ```js <ide> cluster.on('online', (worker) => { <ide> socket.on('data', (id) => { <ide> [`server.close()`]: net.html#net_event_close <ide> [`worker.exitedAfterDisconnect`]: #cluster_worker_exitedafterdisconnect <ide> [Child Process module]: child_process.html#child_process_child_process_fork_modulepath_args_options <del>[child_process event: 'exit']: child_process.html#child_process_event_exit <del>[child_process event: 'message']: child_process.html#child_process_event_message <add>[child_process event: `'exit'`]: child_process.html#child_process_event_exit <add>[child_process event: `'message'`]: child_process.html#child_process_event_message <ide> [`cluster.settings`]: #cluster_cluster_settings <ide><path>doc/api/console.md <ide> added: v8.0.0 <ide> * `label` {string} <ide> <ide> This method does not display anything unless used in the inspector. The <del>`console.timeStamp()` method adds an event with the label `label` to the <add>`console.timeStamp()` method adds an event with the label `'label'` to the <ide> **Timeline** panel of the inspector. <ide> <ide> ### console.timeline([label]) <ide><path>doc/api/deprecations.md <ide> API instead. <ide> <ide> Type: End-of-Life <ide> <del>`runInAsyncIdScope` doesn't emit the `before` or `after` event and can thus <add>`runInAsyncIdScope` doesn't emit the `'before'` or `'after'` event and can thus <ide> cause a lot of issues. See https://github.com/nodejs/node/issues/14328 for more <ide> details. <ide> <ide><path>doc/api/dgram.md <ide> and port can be retrieved using [`socket.address().address`][] and <ide> added: v0.1.99 <ide> --> <ide> <del>* `type` {string} - Either 'udp4' or 'udp6'. <add>* `type` {string} - Either `'udp4'` or `'udp6'`. <ide> * `callback` {Function} - Attached as a listener to `'message'` events. <ide> * Returns: {dgram.Socket} <ide> <ide> Creates a `dgram.Socket` object of the specified `type`. The `type` argument <del>can be either `udp4` or `udp6`. An optional `callback` function can be passed <del>which is added as a listener for `'message'` events. <add>can be either `'udp4'` or `'udp6'`. An optional `callback` function can be <add>passed which is added as a listener for `'message'` events. <ide> <ide> Once the socket is created, calling [`socket.bind()`][] will instruct the <ide> socket to begin listening for datagram messages. When `address` and `port` are <ide><path>doc/api/documentation.md <ide> failures or behavior changes when API modifications occur. To help avoid such <ide> surprises, `Experimental` features may require a command-line flag to <ide> explicitly enable them, or may cause a process warning to be emitted. <ide> By default, such warnings are printed to [`stderr`][] and may be handled by <del>attaching a listener to the [`process.on('warning')`][] event. <add>attaching a listener to the [`'warning'`][] event. <ide> <ide> ## JSON Output <ide> <!-- YAML <ide> relative to Linux and macOS. For an example of the subtle ways in which it's <ide> sometimes impossible to replace Unix syscall semantics on Windows, see [Node <ide> issue 4760](https://github.com/nodejs/node/issues/4760). <ide> <del>[submit an issue]: https://github.com/nodejs/node/issues/new <del>[the contributing guide]: https://github.com/nodejs/node/blob/master/CONTRIBUTING.md <add>[`'warning'`]: process.html#process_event_warning <ide> [`stderr`]: process.html#process_process_stderr <del>[`process.on('warning')`]: process.html#process_event_warning <ide> [`fs.open()`]: fs.html#fs_fs_open_path_flags_mode_callback <ide> [`fs.lchown()`]: fs.html#fs_fs_lchown_path_uid_gid_callback <add>[submit an issue]: https://github.com/nodejs/node/issues/new <add>[the contributing guide]: https://github.com/nodejs/node/blob/master/CONTRIBUTING.md <ide><path>doc/api/domain.md <ide> binding. <ide> <ide> This also works with timers that are returned from [`setInterval()`][] and <ide> [`setTimeout()`][]. If their callback function throws, it will be caught by <del>the domain 'error' handler. <add>the domain `'error'` handler. <ide> <ide> If the Timer or EventEmitter was already bound to a domain, it is removed <ide> from that one, and bound to this one instead. <ide> d.on('error', (er) => { <ide> <ide> ### domain.enter() <ide> <del>The `enter` method is plumbing used by the `run`, `bind`, and `intercept` <del>methods to set the active domain. It sets `domain.active` and `process.domain` <del>to the domain, and implicitly pushes the domain onto the domain stack managed <del>by the domain module (see [`domain.exit()`][] for details on the domain stack). <del>The call to `enter` delimits the beginning of a chain of asynchronous calls and <del>I/O operations bound to a domain. <add>The `enter()` method is plumbing used by the `run()`, `bind()`, and <add>`intercept()` methods to set the active domain. It sets `domain.active` and <add>`process.domain` to the domain, and implicitly pushes the domain onto the domain <add>stack managed by the domain module (see [`domain.exit()`][] for details on the <add>domain stack). The call to `enter()` delimits the beginning of a chain of <add>asynchronous calls and I/O operations bound to a domain. <ide> <del>Calling `enter` changes only the active domain, and does not alter the domain <del>itself. `enter` and `exit` can be called an arbitrary number of times on a <add>Calling `enter()` changes only the active domain, and does not alter the domain <add>itself. `enter()` and `exit()` can be called an arbitrary number of times on a <ide> single domain. <ide> <ide> ### domain.exit() <ide> <del>The `exit` method exits the current domain, popping it off the domain stack. <add>The `exit()` method exits the current domain, popping it off the domain stack. <ide> Any time execution is going to switch to the context of a different chain of <ide> asynchronous calls, it's important to ensure that the current domain is exited. <del>The call to `exit` delimits either the end of or an interruption to the chain <add>The call to `exit()` delimits either the end of or an interruption to the chain <ide> of asynchronous calls and I/O operations bound to a domain. <ide> <ide> If there are multiple, nested domains bound to the current execution context, <del>`exit` will exit any domains nested within this domain. <add>`exit()` will exit any domains nested within this domain. <ide> <del>Calling `exit` changes only the active domain, and does not alter the domain <del>itself. `enter` and `exit` can be called an arbitrary number of times on a <add>Calling `exit()` changes only the active domain, and does not alter the domain <add>itself. `enter()` and `exit()` can be called an arbitrary number of times on a <ide> single domain. <ide> <ide> ### domain.intercept(callback) <ide> d2.run(() => { <ide> ``` <ide> <ide> Note that domains will not interfere with the error handling mechanisms for <del>Promises, i.e. no `error` event will be emitted for unhandled Promise <add>Promises, i.e. no `'error'` event will be emitted for unhandled Promise <ide> rejections. <ide> <ide> [`Error`]: errors.html#errors_class_error <ide><path>doc/api/errors.md <ide> For *all* [`EventEmitter`][] objects, if an `'error'` event handler is not <ide> provided, the error will be thrown, causing the Node.js process to report an <ide> unhandled exception and crash unless either: The [`domain`][domains] module is <ide> used appropriately or a handler has been registered for the <del>[`process.on('uncaughtException')`][] event. <add>[`'uncaughtException'`][] event. <ide> <ide> ```js <ide> const EventEmitter = require('events'); <ide> An attempt was made to use a readable stream that did not implement <ide> <a id="ERR_STREAM_UNSHIFT_AFTER_END_EVENT"></a> <ide> ### ERR_STREAM_UNSHIFT_AFTER_END_EVENT <ide> <del>An attempt was made to call [`stream.unshift()`][] after the `end` event was <add>An attempt was made to call [`stream.unshift()`][] after the `'end'` event was <ide> emitted. <ide> <ide> <a id="ERR_STREAM_WRAP"></a> <ide> meaning of the error depends on the specific function. <ide> <ide> Creation of a [`zlib`][] object failed due to incorrect configuration. <ide> <add>[`'uncaughtException'`]: process.html#process_event_uncaughtexception <ide> [`--force-fips`]: cli.html#cli_force_fips <ide> [`child_process`]: child_process.html <ide> [`cipher.getAuthTag()`]: crypto.html#crypto_cipher_getauthtag <ide> Creation of a [`zlib`][] object failed due to incorrect configuration. <ide> [`net`]: net.html <ide> [`new URL(input)`]: url.html#url_constructor_new_url_input_base <ide> [`new URLSearchParams(iterable)`]: url.html#url_constructor_new_urlsearchparams_iterable <del>[`process.on('uncaughtException')`]: process.html#process_event_uncaughtexception <ide> [`process.send()`]: process.html#process_process_send_message_sendhandle_options_callback <ide> [`process.setUncaughtExceptionCaptureCallback()`]: process.html#process_process_setuncaughtexceptioncapturecallback_fn <ide> [`require('crypto').setEngine()`]: crypto.html#crypto_crypto_setengine_engine_flags <ide><path>doc/api/events.md <ide> server.on('connection', callback); <ide> server.removeListener('connection', callback); <ide> ``` <ide> <del>`removeListener` will remove, at most, one instance of a listener from the <add>`removeListener()` will remove, at most, one instance of a listener from the <ide> listener array. If any single listener has been added multiple times to the <del>listener array for the specified `eventName`, then `removeListener` must be <add>listener array for the specified `eventName`, then `removeListener()` must be <ide> called multiple times to remove each instance. <ide> <ide> Note that once an event has been emitted, all listeners attached to it at the <ide> added: v9.4.0 <ide> - Returns: {Function[]} <ide> <ide> Returns a copy of the array of listeners for the event named `eventName`, <del>including any wrappers (such as those created by `.once`). <add>including any wrappers (such as those created by `.once()`). <ide> <ide> ```js <ide> const emitter = new EventEmitter(); <ide> logFnWrapper.listener(); <ide> logFnWrapper(); <ide> <ide> emitter.on('log', () => console.log('log persistently')); <del>// will return a new Array with a single function bound by `on` above <add>// will return a new Array with a single function bound by `.on()` above <ide> const newListeners = emitter.rawListeners('log'); <ide> <ide> // logs "log persistently" twice <ide><path>doc/api/fs.md <ide> support. If `filename` is provided, it will be provided as a `Buffer` if <ide> `filename` will be a UTF-8 string. <ide> <ide> ```js <del>// Example when handled through fs.watch listener <add>// Example when handled through fs.watch() listener <ide> fs.watch('./tmp', { encoding: 'buffer' }, (eventType, filename) => { <ide> if (filename) { <ide> console.log(filename); <ide> to [`net.Socket`][]. <ide> If `autoClose` is false, then the file descriptor won't be closed, even if <ide> there's an error. It is the application's responsibility to close it and make <ide> sure there's no file descriptor leak. If `autoClose` is set to true (default <del>behavior), on `error` or `end` the file descriptor will be closed <add>behavior), on `'error'` or `'end'` the file descriptor will be closed <ide> automatically. <ide> <ide> `mode` sets the file mode (permission and sticky bits), but only if the <ide> than replacing it may require a `flags` mode of `r+` rather than the <ide> default mode `w`. The `encoding` can be any one of those accepted by <ide> [`Buffer`][]. <ide> <del>If `autoClose` is set to true (default behavior) on `error` or `end` <add>If `autoClose` is set to true (default behavior) on `'error'` or `'finish'` <ide> the file descriptor will be closed automatically. If `autoClose` is false, <ide> then the file descriptor won't be closed, even if there's an error. <ide> It is the application's responsibility to close it and make sure there's no <ide><path>doc/api/http.md <ide> added: v0.5.9 <ide> <ide> * `timeout` {number} Milliseconds before a request times out. <ide> * `callback` {Function} Optional function to be called when a timeout occurs. <del> Same as binding to the `timeout` event. <add> Same as binding to the `'timeout'` event. <ide> * Returns: {http.ClientRequest} <ide> <ide> Once a socket is assigned to this request and is connected <ide> changes: <ide> pr-url: https://github.com/nodejs/node/pull/4557 <ide> description: The default action of calling `.destroy()` on the `socket` <ide> will no longer take place if there are listeners attached <del> for `clientError`. <add> for `'clientError'`. <ide> - version: v9.4.0 <ide> pr-url: https://github.com/nodejs/node/pull/17672 <ide> description: The rawPacket is the current buffer that just parsed. Adding <del> this buffer to the error object of clientError event is to make <del> it possible that developers can log the broken packet. <add> this buffer to the error object of `'clientError'` event is to <add> make it possible that developers can log the broken packet. <ide> --> <ide> <ide> * `exception` {Error} <ide> There are a few special headers that should be noted. <ide> <ide> * Sending an 'Expect' header will immediately send the request headers. <ide> Usually, when sending 'Expect: 100-continue', both a timeout and a listener <del> for the `continue` event should be set. See RFC2616 Section 8.2.3 for more <add> for the `'continue'` event should be set. See RFC2616 Section 8.2.3 for more <ide> information. <ide> <ide> * Sending an Authorization header will override using the `auth` option <ide> const req = http.request(options, (res) => { <ide> In a successful request, the following events will be emitted in the following <ide> order: <ide> <del>* `socket` <del>* `response` <del> * `data` any number of times, on the `res` object <del> (`data` will not be emitted at all if the response body is empty, for <add>* `'socket'` <add>* `'response'` <add> * `'data'` any number of times, on the `res` object <add> (`'data'` will not be emitted at all if the response body is empty, for <ide> instance, in most redirects) <del> * `end` on the `res` object <del>* `close` <add> * `'end'` on the `res` object <add>* `'close'` <ide> <ide> In the case of a connection error, the following events will be emitted: <ide> <del>* `socket` <del>* `error` <del>* `close` <add>* `'socket'` <add>* `'error'` <add>* `'close'` <ide> <ide> If `req.abort()` is called before the connection succeeds, the following events <ide> will be emitted in the following order: <ide> <del>* `socket` <add>* `'socket'` <ide> * (`req.abort()` called here) <del>* `abort` <del>* `close` <del>* `error` with an error with message `Error: socket hang up` and code <del> `ECONNRESET` <add>* `'abort'` <add>* `'close'` <add>* `'error'` with an error with message `'Error: socket hang up'` and code <add> `'ECONNRESET'` <ide> <ide> If `req.abort()` is called after the response is received, the following events <ide> will be emitted in the following order: <ide> <del>* `socket` <del>* `response` <del> * `data` any number of times, on the `res` object <add>* `'socket'` <add>* `'response'` <add> * `'data'` any number of times, on the `res` object <ide> * (`req.abort()` called here) <del>* `abort` <del>* `close` <del> * `aborted` on the `res` object <del> * `end` on the `res` object <del> * `close` on the `res` object <del> <del>Note that setting the `timeout` option or using the `setTimeout` function will <del>not abort the request or do anything besides add a `timeout` event. <add>* `'abort'` <add>* `'close'` <add> * `'aborted'` on the `res` object <add> * `'end'` on the `res` object <add> * `'close'` on the `res` object <add> <add>Note that setting the `timeout` option or using the `setTimeout()` function will <add>not abort the request or do anything besides add a `'timeout'` event. <ide> <ide> [`'checkContinue'`]: #http_event_checkcontinue <ide> [`'request'`]: #http_event_request <ide><path>doc/api/http2.md <ide> compatibility with the existing [HTTP/1][] module API. However, <ide> the [Compatibility API][] is. <ide> <ide> The `http2` Core API is much more symmetric between client and server than the <del>`http` API. For instance, most events, like `error`, `connect` and `stream`, can <del>be emitted either by client-side code or server-side code. <add>`http` API. For instance, most events, like `'error'`, `'connect'` and <add>`'stream'`, can be emitted either by client-side code or server-side code. <ide> <ide> ### Server-side example <ide> <ide> If the `payload` argument is not specified, the default payload will be the <ide> added: v9.4.0 <ide> --> <ide> <del>Calls [`ref()`][`net.Socket.prototype.ref`] on this `Http2Session` <add>Calls [`ref()`][`net.Socket.prototype.ref()`] on this `Http2Session` <ide> instance's underlying [`net.Socket`]. <ide> <ide> #### http2session.remoteSettings <ide> client. <ide> added: v9.4.0 <ide> --> <ide> <del>Calls [`unref()`][`net.Socket.prototype.unref`] on this `Http2Session` <add>Calls [`unref()`][`net.Socket.prototype.unref()`] on this `Http2Session` <ide> instance's underlying [`net.Socket`]. <ide> <ide> ### Class: ServerHttp2Session <ide> added: v8.4.0 <ide> --> <ide> <ide> The `'timeout'` event is emitted after no activity is received for this <del>`'Http2Stream'` within the number of milliseconds set using <add>`Http2Stream` within the number of milliseconds set using <ide> `http2stream.setTimeout()`. <ide> <ide> #### Event: 'trailers' <ide> following additional properties: <ide> [`http2stream.pushStream()`]: #http2_http2stream_pushstream_headers_options_callback <ide> [`net.Server.close()`]: net.html#net_server_close_callback <ide> [`net.Socket`]: net.html#net_class_net_socket <del>[`net.Socket.prototype.ref`]: net.html#net_socket_ref <del>[`net.Socket.prototype.unref`]: net.html#net_socket_unref <add>[`net.Socket.prototype.ref()`]: net.html#net_socket_ref <add>[`net.Socket.prototype.unref()`]: net.html#net_socket_unref <ide> [`net.connect()`]: net.html#net_net_connect <ide> [`request.socket.getPeerCertificate()`]: tls.html#tls_tlssocket_getpeercertificate_detailed <ide> [`response.end()`]: #http2_response_end_data_encoding_callback <ide><path>doc/api/https.md <ide> added: v0.3.4 <ide> --> <ide> - `options` {Object} Accepts `options` from [`tls.createServer()`][], <ide> [`tls.createSecureContext()`][] and [`http.createServer()`][]. <del>- `requestListener` {Function} A listener to be added to the `request` event. <add>- `requestListener` {Function} A listener to be added to the `'request'` event. <ide> <ide> Example: <ide> <ide><path>doc/api/inspector.md <ide> added: v8.0.0 <ide> Emitted when an inspector notification is received that has its method field set <ide> to the `<inspector-protocol-method>` value. <ide> <del>The following snippet installs a listener on the [`Debugger.paused`][] <add>The following snippet installs a listener on the [`'Debugger.paused'`][] <ide> event, and prints the reason for program suspension whenever program <ide> execution is suspended (through breakpoints, for example): <ide> <ide> session.post('Profiler.enable', () => { <ide> ``` <ide> <ide> <del>[`session.connect()`]: #inspector_session_connect <del>[`Debugger.paused`]: https://chromedevtools.github.io/devtools-protocol/v8/Debugger/#event-paused <add>[`'Debugger.paused'`]: https://chromedevtools.github.io/devtools-protocol/v8/Debugger#event-paused <ide> [`EventEmitter`]: events.html#events_class_eventemitter <add>[`session.connect()`]: #inspector_session_connect <ide> [Chrome DevTools Protocol Viewer]: https://chromedevtools.github.io/devtools-protocol/v8/ <ide> [CPU Profiler]: https://chromedevtools.github.io/devtools-protocol/v8/Profiler <ide><path>doc/api/modules.md <ide> added: v8.9.0 <ide> * Returns: {string[]|null} <ide> <ide> Returns an array containing the paths searched during resolution of `request` or <del>null if the `request` string references a core module, for example `http` or <add>`null` if the `request` string references a core module, for example `http` or <ide> `fs`. <ide> <ide> ## The `module` Object <ide><path>doc/api/n-api.md <ide> napi_status napi_fatal_exception(napi_env env, napi_value err); <ide> ``` <ide> <ide> - `[in] env`: The environment that the API is invoked under. <del>- `[in] err`: The error you want to pass to `uncaughtException`. <add>- `[in] err`: The error you want to pass to `'uncaughtException'`. <ide> <del>Trigger an `uncaughtException` in JavaScript. Useful if an async <add>Trigger an `'uncaughtException'` in JavaScript. Useful if an async <ide> callback throws an exception with no way to recover. <ide> <ide> ### Fatal Errors <ide><path>doc/api/net.md <ide> added: v0.1.90 <ide> * Returns: {net.Server} <ide> <ide> Stops the server from accepting new connections and keeps existing <del>connections. This function is asynchronous, the server is finally <del>closed when all connections are ended and the server emits a [`'close'`][] event. <add>connections. This function is asynchronous, the server is finally closed <add>when all connections are ended and the server emits a [`'close'`][] event. <ide> The optional `callback` will be called once the `'close'` event occurs. Unlike <ide> that event, it will be called with an Error as its only argument if the server <ide> was not open when it was closed. <ide> on Linux. The default value of this parameter is 511 (not 512). <ide> <ide> All [`net.Socket`][] are set to `SO_REUSEADDR` (See [socket(7)][] for details). <ide> <del>The `server.listen()` method can be called again if and only if there was an error <del>during the first `server.listen()` call or `server.close()` has been called. <del>Otherwise, an `ERR_SERVER_ALREADY_LISTEN` error will be thrown. <add>The `server.listen()` method can be called again if and only if there was an <add>error during the first `server.listen()` call or `server.close()` has been <add>called. Otherwise, an `ERR_SERVER_ALREADY_LISTEN` error will be thrown. <ide> <ide> One of the most common errors raised when listening is `EADDRINUSE`. <ide> This happens when another server is already listening on the requested <ide> added: v0.9.1 <ide> <ide> * Returns: {net.Server} <ide> <del>Opposite of `unref`, calling `ref` on a previously `unref`d server will *not* <del>let the program exit if it's the only server left (the default behavior). If <del>the server is `ref`d calling `ref` again will have no effect. <add>Opposite of `unref()`, calling `ref()` on a previously `unref`ed server will <add>*not* let the program exit if it's the only server left (the default behavior). <add>If the server is `ref`ed calling `ref()` again will have no effect. <ide> <ide> ### server.unref() <ide> <!-- YAML <ide> added: v0.9.1 <ide> <ide> * Returns: {net.Server} <ide> <del>Calling `unref` on a server will allow the program to exit if this is the only <del>active server in the event system. If the server is already `unref`d calling <del>`unref` again will have no effect. <add>Calling `unref()` on a server will allow the program to exit if this is the only <add>active server in the event system. If the server is already `unref`ed calling <add>`unref()` again will have no effect. <ide> <ide> ## Class: net.Socket <ide> <!-- YAML <ide> endpoint, depending on what it [`connect()`][`socket.connect()`] to. <ide> added: v0.1.90 <ide> --> <ide> <del>* `had_error` {boolean} `true` if the socket had a transmission error. <add>* `hadError` {boolean} `true` if the socket had a transmission error. <ide> <del>Emitted once the socket is fully closed. The argument `had_error` is a boolean <add>Emitted once the socket is fully closed. The argument `hadError` is a boolean <ide> which says if the socket was closed due to a transmission error. <ide> <ide> ### Event: 'connect' <ide> See [`net.createConnection()`][]. <ide> added: v0.1.90 <ide> --> <ide> <del>* {Buffer} <add>* {Buffer|string} <ide> <ide> Emitted when data is received. The argument `data` will be a `Buffer` or <ide> `String`. Encoding of data is set by [`socket.setEncoding()`][]. <ide> added: v6.1.0 <ide> If `true` - <ide> [`socket.connect(options[, connectListener])`][`socket.connect(options)`] <ide> was called and haven't yet finished. Will be set to `false` before emitting <del>`connect` event and/or calling <add>`'connect'` event and/or calling <ide> [`socket.connect(options[, connectListener])`][`socket.connect(options)`]'s <ide> callback. <ide> <ide> added: v0.9.1 <ide> <ide> * Returns: {net.Socket} The socket itself. <ide> <del>Opposite of `unref`, calling `ref` on a previously `unref`d socket will *not* <del>let the program exit if it's the only socket left (the default behavior). If <del>the socket is `ref`d calling `ref` again will have no effect. <add>Opposite of `unref()`, calling `ref()` on a previously `unref`ed socket will <add>*not* let the program exit if it's the only socket left (the default behavior). <add>If the socket is `ref`ed calling `ref` again will have no effect. <ide> <ide> ### socket.remoteAddress <ide> <!-- YAML <ide> added: v0.9.1 <ide> <ide> * Returns: {net.Socket} The socket itself. <ide> <del>Calling `unref` on a socket will allow the program to exit if this is the only <del>active socket in the event system. If the socket is already `unref`d calling <del>`unref` again will have no effect. <add>Calling `unref()` on a socket will allow the program to exit if this is the only <add>active socket in the event system. If the socket is already `unref`ed calling <add>`unref()` again will have no effect. <ide> <ide> ### socket.write(data[, encoding][, callback]) <ide> <!-- YAML <ide><path>doc/api/process.md <ide> down the process. **It is not safe to resume normal operation after <ide> `'uncaughtException'`.** <ide> <ide> To restart a crashed application in a more reliable way, whether <del>`uncaughtException` is emitted or not, an external monitor should be employed <add>`'uncaughtException'` is emitted or not, an external monitor should be employed <ide> in a separate process to detect application failures and recover or restart as <ide> needed. <ide> <ide> lead to sub-optimal application performance, bugs, or security vulnerabilities. <ide> The listener function is called with a single `warning` argument whose value is <ide> an `Error` object. There are three key properties that describe the warning: <ide> <del>* `name` {string} The name of the warning (currently `Warning` by default). <add>* `name` {string} The name of the warning (currently `'Warning'` by default). <ide> * `message` {string} A system-provided description of the warning. <ide> * `stack` {string} A stack trace to the location in the code where the warning <ide> was issued. <ide> Using the `--no-deprecation` command line flag will suppress all reporting <ide> of the custom deprecation. <ide> <ide> The `*-deprecation` command line flags only affect warnings that use the name <del>`DeprecationWarning`. <add>`'DeprecationWarning'`. <ide> <ide> #### Emitting custom warnings <ide> <ide> custom or application-specific warnings. <ide> <ide> Signal events will be emitted when the Node.js process receives a signal. Please <ide> refer to signal(7) for a listing of standard POSIX signal names such as <del>`SIGINT`, `SIGHUP`, etc. <add>`'SIGINT'`, `'SIGHUP'`, etc. <ide> <ide> The signal handler will receive the signal's name (`'SIGINT'`, <ide> `'SIGTERM'`, etc.) as the first argument. <ide> process.on('SIGINT', handle); <ide> process.on('SIGTERM', handle); <ide> ``` <ide> <del>* `SIGUSR1` is reserved by Node.js to start the [debugger][]. It's possible to <add>* `'SIGUSR1'` is reserved by Node.js to start the [debugger][]. It's possible to <ide> install a listener but doing so might interfere with the debugger. <del>* `SIGTERM` and `SIGINT` have default handlers on non-Windows platforms that <add>* `'SIGTERM'` and `'SIGINT'` have default handlers on non-Windows platforms that <ide> reset the terminal mode before exiting with code `128 + signal number`. If one <ide> of these signals has a listener installed, its default behavior will be <ide> removed (Node.js will no longer exit). <del>* `SIGPIPE` is ignored by default. It can have a listener installed. <del>* `SIGHUP` is generated on Windows when the console window is closed, and on <add>* `'SIGPIPE'` is ignored by default. It can have a listener installed. <add>* `'SIGHUP'` is generated on Windows when the console window is closed, and on <ide> other platforms under various similar conditions, see signal(7). It can have a <ide> listener installed, however Node.js will be unconditionally terminated by <ide> Windows about 10 seconds later. On non-Windows platforms, the default <ide> behavior of `SIGHUP` is to terminate Node.js, but once a listener has been <ide> installed its default behavior will be removed. <del>* `SIGTERM` is not supported on Windows, it can be listened on. <del>* `SIGINT` from the terminal is supported on all platforms, and can usually be <add>* `'SIGTERM'` is not supported on Windows, it can be listened on. <add>* `'SIGINT'` from the terminal is supported on all platforms, and can usually be <ide> generated with `<Ctrl>+C` (though this may be configurable). It is not <ide> generated when terminal raw mode is enabled. <del>* `SIGBREAK` is delivered on Windows when `<Ctrl>+<Break>` is pressed, on <add>* `'SIGBREAK'` is delivered on Windows when `<Ctrl>+<Break>` is pressed, on <ide> non-Windows platforms it can be listened on, but there is no way to send or <ide> generate it. <del>* `SIGWINCH` is delivered when the console has been resized. On Windows, this <add>* `'SIGWINCH'` is delivered when the console has been resized. On Windows, this <ide> will only happen on write to the console when the cursor is being moved, or <ide> when a readable tty is used in raw mode. <del>* `SIGKILL` cannot have a listener installed, it will unconditionally terminate <del> Node.js on all platforms. <del>* `SIGSTOP` cannot have a listener installed. <del>* `SIGBUS`, `SIGFPE`, `SIGSEGV` and `SIGILL`, when not raised artificially <del> using kill(2), inherently leave the process in a state from which it is not <del> safe to attempt to call JS listeners. Doing so might lead to the process <del> hanging in an endless loop, since listeners attached using `process.on()` are <del> called asynchronously and therefore unable to correct the underlying problem. <add>* `'SIGKILL'` cannot have a listener installed, it will unconditionally <add> terminate Node.js on all platforms. <add>* `'SIGSTOP'` cannot have a listener installed. <add>* `'SIGBUS'`, `'SIGFPE'`, `'SIGSEGV'` and `'SIGILL'`, when not raised <add> artificially using kill(2), inherently leave the process in a state from <add> which it is not safe to attempt to call JS listeners. Doing so might lead to <add> the process hanging in an endless loop, since listeners attached using <add> `process.on()` are called asynchronously and therefore unable to correct the <add> underlying problem. <ide> <ide> Windows does not support sending signals, but Node.js offers some emulation <ide> with [`process.kill()`][], and [`subprocess.kill()`][]. Sending signal `0` can <ide> added: v8.0.0 <ide> <ide> The `process.emitWarning()` method can be used to emit custom or application <ide> specific process warnings. These can be listened for by adding a handler to the <del>[`process.on('warning')`][process_warning] event. <add>[`'warning'`][process_warning] event. <ide> <ide> ```js <ide> // Emit a warning with a code and additional detail. <ide> process.emitWarning('Something happened!', { <ide> <ide> In this example, an `Error` object is generated internally by <ide> `process.emitWarning()` and passed through to the <del>[`process.on('warning')`][process_warning] event. <add>[`'warning'`][process_warning] handler. <ide> <ide> ```js <ide> process.on('warning', (warning) => { <ide> added: v6.0.0 <ide> <ide> The `process.emitWarning()` method can be used to emit custom or application <ide> specific process warnings. These can be listened for by adding a handler to the <del>[`process.on('warning')`][process_warning] event. <add>[`'warning'`][process_warning] event. <ide> <ide> ```js <ide> // Emit a warning using a string. <ide> process.emitWarning('Something happened!', 'CustomWarning', 'WARN001'); <ide> ``` <ide> <ide> In each of the previous examples, an `Error` object is generated internally by <del>`process.emitWarning()` and passed through to the <del>[`process.on('warning')`][process_warning] event. <add>`process.emitWarning()` and passed through to the [`'warning'`][process_warning] <add>handler. <ide> <ide> ```js <ide> process.on('warning', (warning) => { <ide> process.on('warning', (warning) => { <ide> ``` <ide> <ide> If `warning` is passed as an `Error` object, it will be passed through to the <del>`process.on('warning')` event handler unmodified (and the optional `type`, <add>`'warning'` event handler unmodified (and the optional `type`, <ide> `code` and `ctor` arguments will be ignored): <ide> <ide> ```js <ide> Note that while process warnings use `Error` objects, the process warning <ide> mechanism is **not** a replacement for normal error handling mechanisms. <ide> <ide> The following additional handling is implemented if the warning `type` is <del>`DeprecationWarning`: <add>`'DeprecationWarning'`: <ide> <ide> * If the `--throw-deprecation` command-line flag is used, the deprecation <ide> warning is thrown as an exception rather than being emitted as an event. <ide> added: v0.8.0 <ide> <ide> The `process.noDeprecation` property indicates whether the `--no-deprecation` <ide> flag is set on the current Node.js process. See the documentation for <del>the [`warning` event][process_warning] and the <del>[`emitWarning` method][process_emit_warning] for more information about this <add>the [`'warning'` event][process_warning] and the <add>[`emitWarning()` method][process_emit_warning] for more information about this <ide> flag's behavior. <ide> <ide> ## process.pid <ide> The `process.setUncaughtExceptionCapture` function sets a function that will <ide> be invoked when an uncaught exception occurs, which will receive the exception <ide> value itself as its first argument. <ide> <del>If such a function is set, the [`process.on('uncaughtException')`][] event will <add>If such a function is set, the [`'uncaughtException'`][] event will <ide> not be emitted. If `--abort-on-uncaught-exception` was passed from the <ide> command line or set through [`v8.setFlagsFromString()`][], the process will <ide> not abort. <ide> added: v0.9.12 <ide> <ide> The `process.throwDeprecation` property indicates whether the <ide> `--throw-deprecation` flag is set on the current Node.js process. See the <del>documentation for the [`warning` event][process_warning] and the <del>[`emitWarning` method][process_emit_warning] for more information about this <add>documentation for the [`'warning'` event][process_warning] and the <add>[`emitWarning()` method][process_emit_warning] for more information about this <ide> flag's behavior. <ide> <ide> ## process.title <ide> added: v0.8.0 <ide> <ide> The `process.traceDeprecation` property indicates whether the <ide> `--trace-deprecation` flag is set on the current Node.js process. See the <del>documentation for the [`warning` event][process_warning] and the <del>[`emitWarning` method][process_emit_warning] for more information about this <add>documentation for the [`'warning'` event][process_warning] and the <add>[`emitWarning()` method][process_emit_warning] for more information about this <ide> flag's behavior. <ide> <ide> ## process.umask([mask]) <ide> cases: <ide> [`process.exit()`]: #process_process_exit_code <ide> [`process.exitCode`]: #process_process_exitcode <ide> [`process.kill()`]: #process_process_kill_pid_signal <del>[`process.on('uncaughtException')`]: process.html#process_event_uncaughtexception <ide> [`process.setUncaughtExceptionCaptureCallback()`]: process.html#process_process_setuncaughtexceptioncapturecallback_fn <ide> [`promise.catch()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/catch <ide> [`require()`]: globals.html#globals_require <ide><path>doc/api/readline.md <ide> The `'close'` event is emitted when one of the following occur: <ide> * The `input` stream receives its `'end'` event; <ide> * The `input` stream receives `<ctrl>-D` to signal end-of-transmission (EOT); <ide> * The `input` stream receives `<ctrl>-C` to signal `SIGINT` and there is no <del> `SIGINT` event listener registered on the `readline.Interface` instance. <add> `'SIGINT'` event listener registered on the `readline.Interface` instance. <ide> <ide> The listener function is called without passing any arguments. <ide> <ide> added: v0.7.5 <ide> The `'pause'` event is emitted when one of the following occur: <ide> <ide> * The `input` stream is paused. <del>* The `input` stream is not paused and receives the `SIGCONT` event. (See <del> events [`SIGTSTP`][] and [`SIGCONT`][]) <add>* The `input` stream is not paused and receives the `'SIGCONT'` event. (See <add> events [`'SIGTSTP'`][] and [`'SIGCONT'`][]) <ide> <ide> The listener function is called without passing any arguments. <ide> <ide> added: v0.7.5 <ide> --> <ide> <ide> The `'SIGTSTP'` event is emitted when the `input` stream receives a `<ctrl>-Z` <del>input, typically known as `SIGTSTP`. If there are no `SIGTSTP` event listeners <add>input, typically known as `SIGTSTP`. If there are no `'SIGTSTP'` event listeners <ide> registered when the `input` stream receives a `SIGTSTP`, the Node.js process <ide> will be sent to the background. <ide> <del>When the program is resumed using fg(1p), the `'pause'` and `SIGCONT` events <add>When the program is resumed using fg(1p), the `'pause'` and `'SIGCONT'` events <ide> will be emitted. These can be used to resume the `input` stream. <ide> <ide> The `'pause'` and `'SIGCONT'` events will not be emitted if the `input` was <ide> rl.on('line', (line) => { <ide> }); <ide> ``` <ide> <del>[`SIGCONT`]: readline.html#readline_event_sigcont <del>[`SIGTSTP`]: readline.html#readline_event_sigtstp <add>[`'SIGCONT'`]: readline.html#readline_event_sigcont <add>[`'SIGTSTP'`]: readline.html#readline_event_sigtstp <ide> [`process.stdin`]: process.html#process_process_stdin <ide> [`process.stdout`]: process.html#process_process_stdout <ide> [Readable]: stream.html#stream_readable_streams <ide><path>doc/api/stream.md <ide> const server = http.createServer((req, res) => { <ide> body += chunk; <ide> }); <ide> <del> // the end event indicates that the entire body has been received <add> // the 'end' event indicates that the entire body has been received <ide> req.on('end', () => { <ide> try { <ide> const data = JSON.parse(body); <ide> added: v8.0.0 <ide> <ide> * Returns: {this} <ide> <del>Destroy the stream, and emit the passed `error` and a `close` event. <add>Destroy the stream, and emit the passed `'error'` and a `'close'` event. <ide> After this call, the writable stream has ended and subsequent calls <del>to `write` / `end` will give an `ERR_STREAM_DESTROYED` error. <add>to `write()` / `end()` will give an `ERR_STREAM_DESTROYED` error. <ide> Implementors should not override this method, <del>but instead implement [`writable._destroy`][writable-_destroy]. <add>but instead implement [`writable._destroy()`][writable-_destroy]. <ide> <ide> ##### writable.end([chunk][, encoding][, callback]) <ide> <!-- YAML <ide> changes: <ide> - version: REPLACEME <ide> pr-url: https://github.com/nodejs/node/pull/17979 <ide> description: > <del> 'readable' is always emitted in the next tick after <del> .push() is called <add> The `'readable'` is always emitted in the next tick after `.push()` <add> is called <ide> - version: REPLACEME <ide> pr-url: https://github.com/nodejs/node/pull/18994 <del> description: Using 'readable' requires calling .read(). <add> description: Using `'readable'` requires calling `.read()`. <ide> --> <ide> <ide> The `'readable'` event is emitted when there is data available to be read from <ide> added: v8.0.0 <ide> * `error` {Error} Error which will be passed as payload in `'error'` event <ide> * Returns: {this} <ide> <del>Destroy the stream, and emit `'error'` and `close`. After this call, the <add>Destroy the stream, and emit `'error'` and `'close'`. After this call, the <ide> readable stream will release any internal resources and subsequent calls <del>to `push` will be ignored. <add>to `push()` will be ignored. <ide> Implementors should not override this method, but instead implement <del>[`readable._destroy`][readable-_destroy]. <add>[`readable._destroy()`][readable-_destroy]. <ide> <ide> ##### readable.isPaused() <ide> <!-- YAML <ide> added: v0.9.4 <ide> changes: <ide> - version: REPLACEME <ide> pr-url: https://github.com/nodejs/node/pull/18994 <del> description: Resume has no effect if there is a 'readable' event listening <add> description: The `resume()` has no effect if there is a `'readable'` event <add> listening. <ide> --> <ide> <ide> * Returns: {this} <ide> function parseHeader(stream, callback) { <ide> const remaining = split.join('\n\n'); <ide> const buf = Buffer.from(remaining, 'utf8'); <ide> stream.removeListener('error', callback); <del> // remove the readable listener before unshifting <add> // remove the 'readable' listener before unshifting <ide> stream.removeListener('readable', onReadable); <ide> if (buf.length) <ide> stream.unshift(buf); <ide> added: v8.0.0 <ide> Destroy the stream, and emit `'error'`. After this call, the <ide> transform stream would release any internal resources. <ide> implementors should not override this method, but instead implement <del>[`readable._destroy`][readable-_destroy]. <del>The default implementation of `_destroy` for `Transform` also emit `'close'`. <add>[`readable._destroy()`][readable-_destroy]. <add>The default implementation of `_destroy()` for `Transform` also emit `'close'`. <ide> <ide> ## API for Stream Implementers <ide> <ide> changes: <ide> - version: REPLACEME <ide> pr-url: https://github.com/nodejs/node/pull/18438 <ide> description: > <del> Add `emitClose` option to specify if `close` is emitted on destroy <add> Add `emitClose` option to specify if `'close'` is emitted on destroy <ide> --> <ide> <ide> * `options` {Object} <ide> changes: <ide> it becomes possible to write JavaScript values other than string, <ide> `Buffer` or `Uint8Array` if supported by the stream implementation. <ide> **Default:** `false`. <del> * `emitClose` {boolean} Whether or not the stream should emit `close` <add> * `emitClose` {boolean} Whether or not the stream should emit `'close'` <ide> after it has been destroyed. **Default:** `true`. <ide> * `write` {Function} Implementation for the <ide> [`stream._write()`][stream-_write] method. <ide> by child classes, and if so, will be called by the internal Writable <ide> class methods only. <ide> <ide> This optional function will be called before the stream closes, delaying the <del>`finish` event until `callback` is called. This is useful to close resources <add>`'finish'` event until `callback` is called. This is useful to close resources <ide> or write buffered data before a stream ends. <ide> <ide> #### Errors While Writing <ide> For example, consider the following code: <ide> // WARNING! BROKEN! <ide> net.createServer((socket) => { <ide> <del> // we add an 'end' method, but never consume the data <add> // we add an 'end' listener, but never consume the data <ide> socket.on('end', () => { <ide> // It will never get here. <ide> socket.end('The message was received but was not processed.\n'); <ide><path>doc/api/timers.md <ide> When called, requests that the Node.js event loop *not* exit so long as the <ide> `Immediate` is active. Calling `immediate.ref()` multiple times will have no <ide> effect. <ide> <del>By default, all `Immediate` objects are "ref'd", making it normally unnecessary <add>By default, all `Immediate` objects are "ref'ed", making it normally unnecessary <ide> to call `immediate.ref()` unless `immediate.unref()` had been called previously. <ide> <ide> Returns a reference to the `Immediate`. <ide> added: v0.9.1 <ide> When called, requests that the Node.js event loop *not* exit so long as the <ide> `Timeout` is active. Calling `timeout.ref()` multiple times will have no effect. <ide> <del>By default, all `Timeout` objects are "ref'd", making it normally unnecessary <add>By default, all `Timeout` objects are "ref'ed", making it normally unnecessary <ide> to call `timeout.ref()` unless `timeout.unref()` had been called previously. <ide> <ide> Returns a reference to the `Timeout`. <ide><path>doc/api/tls.md <ide> The typical flow of an OCSP Request is as follows: <ide> listener if registered. <ide> 3. Server extracts the OCSP URL from either the `certificate` or `issuer` and <ide> performs an [OCSP request] to the CA. <del>4. Server receives `OCSPResponse` from the CA and sends it back to the client <add>4. Server receives `'OCSPResponse'` from the CA and sends it back to the client <ide> via the `callback` argument <ide> 5. Client validates the response and either destroys the socket or performs a <ide> handshake. <ide> deprecated: v0.11.3 <ide> The `'secure'` event is emitted by the `SecurePair` object once a secure <ide> connection has been established. <ide> <del>As with checking for the server [`secureConnection`](#tls_event_secureconnection) <add>As with checking for the server <add>[`'secureConnection'`](#tls_event_secureconnection) <ide> event, `pair.cleartext.authorized` should be inspected to confirm whether the <ide> certificate used is properly authorized. <ide> <ide><path>doc/api/util.md <ide> exports.obsoleteFunction = util.deprecate(() => { <ide> ``` <ide> <ide> When called, `util.deprecate()` will return a function that will emit a <del>`DeprecationWarning` using the `process.on('warning')` event. The warning will <add>`DeprecationWarning` using the [`'warning'`][] event. The warning will <ide> be emitted and printed to `stderr` the first time the returned function is <ide> called. After the warning is emitted, the wrapped function is called without <ide> emitting a warning. <ide> deprecated: v0.11.3 <ide> Deprecated predecessor of `console.log`. <ide> <ide> [`'uncaughtException'`]: process.html#process_event_uncaughtexception <add>[`'warning'`]: process.html#process_event_warning <ide> [`Array.isArray()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/isArray <ide> [`ArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer <ide> [`ArrayBuffer.isView()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/isView <ide><path>doc/api/zlib.md <ide> changes: <ide> - version: v6.0.0 <ide> pr-url: https://github.com/nodejs/node/pull/5883 <ide> description: Trailing garbage at the end of the input stream will now <del> result in an `error` event. <add> result in an `'error'` event. <ide> - version: v5.9.0 <ide> pr-url: https://github.com/nodejs/node/pull/5120 <ide> description: Multiple concatenated gzip file members are supported now. <ide> - version: v5.0.0 <ide> pr-url: https://github.com/nodejs/node/pull/2595 <del> description: A truncated input stream will now result in an `error` event. <add> description: A truncated input stream will now result in an `'error'` event. <ide> --> <ide> <ide> Decompress a gzip stream. <ide> added: v0.5.8 <ide> changes: <ide> - version: v5.0.0 <ide> pr-url: https://github.com/nodejs/node/pull/2595 <del> description: A truncated input stream will now result in an `error` event. <add> description: A truncated input stream will now result in an `'error'` event. <ide> --> <ide> <ide> Decompress a deflate stream. <ide> changes: <ide> description: Custom dictionaries are now supported by `InflateRaw`. <ide> - version: v5.0.0 <ide> pr-url: https://github.com/nodejs/node/pull/2595 <del> description: A truncated input stream will now result in an `error` event. <add> description: A truncated input stream will now result in an `'error'` event. <ide> --> <ide> <ide> Decompress a raw deflate stream.
26
Javascript
Javascript
simplify report uncaught exception logic
dc1d331403feb3b9d1ca1a2bd306ca7428414ed8
<ide><path>lib/internal/process/execution.js <ide> function createFatalException() { <ide> if (er == null || er.domain == null) { <ide> try { <ide> const report = internalBinding('report'); <del> if (report != null) { <del> if (require('internal/options').getOptionValue( <del> '--experimental-report')) { <del> const config = {}; <del> report.syncConfig(config, false); <del> if (Array.isArray(config.events) && <del> config.events.includes('exception')) { <del> if (er) { <del> report.onUnCaughtException(er.stack); <del> } else { <del> report.onUnCaughtException(undefined); <del> } <del> } <add> if (report != null && <add> require('internal/options') <add> .getOptionValue('--experimental-report')) { <add> const config = {}; <add> report.syncConfig(config, false); <add> if (Array.isArray(config.events) && <add> config.events.includes('exception')) { <add> report.onUnCaughtException(er ? er.stack : undefined); <ide> } <ide> } <ide> } catch {} // NOOP, node_report unavailable.
1
Javascript
Javascript
remove material.skinning from webglshadowmap
42e26f906d1f25a701a345fd24bf9b209446fac4
<ide><path>src/renderers/webgl/WebGLShadowMap.js <ide> function WebGLShadowMap( _renderer, _lights, _objects, capabilities ) { <ide> <ide> var depthMaterial = depthMaterialTemplate.clone(); <ide> depthMaterial.morphTargets = useMorphing; <del> depthMaterial.skinning = useSkinning; <ide> <ide> _depthMaterials[ i ] = depthMaterial; <ide> <ide> function WebGLShadowMap( _renderer, _lights, _objects, capabilities ) { <ide> vertexShader: distanceShader.vertexShader, <ide> fragmentShader: distanceShader.fragmentShader, <ide> morphTargets: useMorphing, <del> skinning: useSkinning, <ide> clipping: true <ide> } ); <ide>
1
PHP
PHP
update docblocks for deletemany()
f235093c8715ff9ce2b0f19dec2f8ff69b6367fb
<ide><path>src/ORM/Table.php <ide> public function delete(EntityInterface $entity, $options = []): bool <ide> * <ide> * @param \Cake\Datasource\EntityInterface[]|\Cake\Datasource\ResultSetInterface $entities Entities to delete. <ide> * @param array|\ArrayAccess $options Options used when calling Table::save() for each entity. <del> * @return bool|\Cake\Datasource\EntityInterface[]|\Cake\Datasource\ResultSetInterface False on failure, entities list on success. <add> * @return bool|\Cake\Datasource\EntityInterface[]|\Cake\Datasource\ResultSetInterface <add> * False on failure, entities list on success. <ide> * @throws \Exception <add> * @see \Cake\ORM\Table::delete() for options and events related to this method. <ide> */ <ide> public function deleteMany(iterable $entities, $options = []) <ide> { <ide> public function deleteMany(iterable $entities, $options = []) <ide> * @return \Cake\Datasource\EntityInterface[]|\Cake\Datasource\ResultSetInterface Entities list. <ide> * @throws \Exception <ide> * @throws \Cake\ORM\Exception\PersistenceFailedException <add> * @see \Cake\ORM\Table::delete() for options and events related to this method. <ide> */ <ide> public function deleteManyOrFail(iterable $entities, $options = []): iterable <ide> {
1
Python
Python
extend http extra_options to livyhook and operator
70731073d0509ac44777624c03cd9eeae71e6fea
<ide><path>airflow/providers/apache/livy/hooks/livy.py <ide> class LivyHook(HttpHook, LoggingMixin): <ide> conn_type = 'livy' <ide> hook_name = 'Apache Livy' <ide> <del> def __init__(self, livy_conn_id: str = default_conn_name) -> None: <add> def __init__( <add> self, livy_conn_id: str = default_conn_name, extra_options: Optional[Dict[str, Any]] = None <add> ) -> None: <ide> super().__init__(http_conn_id=livy_conn_id) <add> self.extra_options = extra_options or {} <ide> <ide> def get_conn(self, headers: Optional[Dict[str, Any]] = None) -> Any: <ide> """ <ide> def run_method( <ide> method: str = 'GET', <ide> data: Optional[Any] = None, <ide> headers: Optional[Dict[str, Any]] = None, <del> extra_options: Optional[Dict[Any, Any]] = None, <ide> ) -> Any: <ide> """ <ide> Wrapper for HttpHook, allows to change method on the same HttpHook <ide> def run_method( <ide> :type data: dict <ide> :param headers: headers <ide> :type headers: dict <del> :param extra_options: extra options <del> :type extra_options: dict <ide> :return: http response <ide> :rtype: requests.Response <ide> """ <ide> if method not in ('GET', 'POST', 'PUT', 'DELETE', 'HEAD'): <ide> raise ValueError(f"Invalid http method '{method}'") <del> if extra_options is None: <del> extra_options = {'check_response': False} <add> if not self.extra_options: <add> self.extra_options = {'check_response': False} <ide> <ide> back_method = self.method <ide> self.method = method <ide> try: <del> result = self.run(endpoint, data, headers, extra_options) <add> result = self.run(endpoint, data, headers, self.extra_options) <ide> finally: <ide> self.method = back_method <ide> return result <ide><path>airflow/providers/apache/livy/operators/livy.py <ide> class LivyOperator(BaseOperator): <ide> :type livy_conn_id: str <ide> :param polling_interval: time in seconds between polling for job completion. Don't poll for values >=0 <ide> :type polling_interval: int <add> :type extra_options: A dictionary of options, where key is string and value <add> depends on the option that's being modified. <ide> """ <ide> <ide> template_fields = ('spark_params',) <ide> def __init__( <ide> proxy_user: Optional[str] = None, <ide> livy_conn_id: str = 'livy_default', <ide> polling_interval: int = 0, <add> extra_options: Optional[Dict[str, Any]] = None, <ide> **kwargs: Any, <ide> ) -> None: <ide> # pylint: disable-msg=too-many-arguments <ide> def __init__( <ide> <ide> self._livy_conn_id = livy_conn_id <ide> self._polling_interval = polling_interval <add> self._extra_options = extra_options or {} <ide> <ide> self._livy_hook: Optional[LivyHook] = None <ide> self._batch_id: Union[int, str] <ide> def get_hook(self) -> LivyHook: <ide> :rtype: LivyHook <ide> """ <ide> if self._livy_hook is None or not isinstance(self._livy_hook, LivyHook): <del> self._livy_hook = LivyHook(livy_conn_id=self._livy_conn_id) <add> self._livy_hook = LivyHook(livy_conn_id=self._livy_conn_id, extra_options=self._extra_options) <ide> return self._livy_hook <ide> <ide> def execute(self, context: Dict[Any, Any]) -> Any: <ide><path>airflow/providers/apache/livy/sensors/livy.py <ide> class LivySensor(BaseSensorOperator): <ide> :type livy_conn_id: str <ide> :param batch_id: identifier of the monitored batch <ide> :type batch_id: Union[int, str] <add> :type extra_options: A dictionary of options, where key is string and value <add> depends on the option that's being modified. <ide> """ <ide> <ide> template_fields = ('batch_id',) <ide> <ide> @apply_defaults <ide> def __init__( <del> self, *, batch_id: Union[int, str], livy_conn_id: str = 'livy_default', **kwargs: Any <add> self, <add> *, <add> batch_id: Union[int, str], <add> livy_conn_id: str = 'livy_default', <add> extra_options: Optional[Dict[str, Any]] = None, <add> **kwargs: Any, <ide> ) -> None: <ide> super().__init__(**kwargs) <add> self.batch_id = batch_id <ide> self._livy_conn_id = livy_conn_id <del> self._batch_id = batch_id <ide> self._livy_hook: Optional[LivyHook] = None <add> self._extra_options = extra_options or {} <ide> <ide> def get_hook(self) -> LivyHook: <ide> """ <ide> def get_hook(self) -> LivyHook: <ide> :rtype: LivyHook <ide> """ <ide> if self._livy_hook is None or not isinstance(self._livy_hook, LivyHook): <del> self._livy_hook = LivyHook(livy_conn_id=self._livy_conn_id) <add> self._livy_hook = LivyHook(livy_conn_id=self._livy_conn_id, extra_options=self._extra_options) <ide> return self._livy_hook <ide> <ide> def poke(self, context: Dict[Any, Any]) -> bool: <del> batch_id = self._batch_id <add> batch_id = self.batch_id <ide> <ide> status = self.get_hook().get_batch_state(batch_id) <ide> return status in self.get_hook().TERMINAL_STATES <ide><path>tests/providers/apache/livy/operators/test_livy.py <ide> def test_execution(self, mock_post, mock_get): <ide> assert call_args == {'file': 'sparkapp'} <ide> mock_get.assert_called_once_with(BATCH_ID) <ide> <add> @patch('airflow.providers.apache.livy.operators.livy.LivyHook.post_batch') <add> def test_execution_with_extra_options(self, mock_post): <add> extra_options = {'check_response': True} <add> task = LivyOperator( <add> file='sparkapp', dag=self.dag, task_id='livy_example', extra_options=extra_options <add> ) <add> <add> task.execute(context={}) <add> <add> assert task.get_hook().extra_options == extra_options <add> <ide> @patch('airflow.providers.apache.livy.operators.livy.LivyHook.delete_batch') <ide> @patch('airflow.providers.apache.livy.operators.livy.LivyHook.post_batch', return_value=BATCH_ID) <ide> def test_deletion(self, mock_post, mock_delete):
4
Python
Python
check submodules on sdist
4cd72742079c6eba4ec0803975c43b779545b537
<ide><path>setup.py <ide> def configuration(parent_package='',top_path=None): <ide> <ide> return config <ide> <add>def check_submodules(): <add> """ verify that the submodules are checked out and clean <add> use `git submodule update --init`; on failure <add> """ <add> if not os.path.exists('.git'): <add> return <add> with open('.gitmodules') as f: <add> for l in f: <add> if 'path' in l: <add> p = l.split('=')[-1].strip() <add> if not os.path.exists(p): <add> raise ValueError('Submodule %s missing' % p) <add> <add> <add> proc = subprocess.Popen(['git', 'submodule', 'status'], <add> stdout=subprocess.PIPE) <add> status, _ = proc.communicate() <add> status = status.decode("ascii", "replace") <add> for line in status.splitlines(): <add> if line.startswith('-') or line.startswith('+'): <add> raise ValueError('Submodule not clean: %s' % line) <add> <add>from distutils.command.sdist import sdist <add>class sdist_checked(sdist): <add> """ check submodules on sdist to prevent incomplete tarballs """ <add> def run(self): <add> check_submodules() <add> sdist.run(self) <ide> <ide> def setup_package(): <ide> src_path = os.path.dirname(os.path.abspath(sys.argv[0])) <ide> def setup_package(): <ide> classifiers=[_f for _f in CLASSIFIERS.split('\n') if _f], <ide> platforms = ["Windows", "Linux", "Solaris", "Mac OS-X", "Unix"], <ide> test_suite='nose.collector', <add> cmdclass={"sdist": sdist_checked}, <ide> ) <ide> <ide> # Run build
1
PHP
PHP
fix cs error
8606d5e421258cdbdcb93a6e3394b560c5c29232
<ide><path>src/Utility/Hash.php <ide> public static function sort(array $data, string $path, $dir = 'asc', $type = 're <ide> $dir = strtolower($dir); <ide> } <ide> if (!in_array($dir, [\SORT_ASC, \SORT_DESC], true)) { <del> $dir = ($dir === 'asc') ? \SORT_ASC : \SORT_DESC; <add> $dir = $dir === 'asc' ? \SORT_ASC : \SORT_DESC; <ide> } <ide> <ide> $ignoreCase = false;
1
Text
Text
fix code block
78a1bc3a1c510e08fd1aff81f7d6f957863ae070
<ide><path>curriculum/challenges/arabic/01-responsive-web-design/applied-accessibility/improve-accessibility-of-audio-content-with-the-audio-element.arabic.md <ide> localeTitle: تحسين إمكانية الوصول إلى المحتوى الص <ide> --- <ide> <ide> ## Description <del><section id="description"> يمنح عنصر HTML5 <code>audio</code> معنىً دلاليًا عندما يلف محتوى دفق الصوت أو الصوت في الترميز. يحتاج المحتوى الصوتي أيضًا إلى بديل نصي ليكون متاحًا للأشخاص الصم أو ضعاف السمع. يمكن إجراء ذلك باستخدام نص مجاور على الصفحة أو رابط إلى نص. تدعم علامة <code>audio</code> سمة <code>controls</code> . يعرض هذا التشغيل الافتراضي للمتصفح والإيقاف المؤقت وعناصر التحكم الأخرى ، ويدعم وظيفة لوحة المفاتيح. هذه سمة منطقية ، بمعنى أنها لا تحتاج إلى قيمة ، حيث يؤدي وجودها على العلامة إلى تشغيل الإعداد. إليك مثال على ذلك: <blockquote style=";text-align:right;direction:rtl"> &lt;audio id = &quot;meowClip&quot; controls&gt; <br> &lt;source src = &quot;audio / meow.mp3&quot; type = &quot;audio / mpeg&quot; /&gt; <br> &lt;source src = &quot;audio / meow.ogg&quot; type = &quot;audio / ogg&quot; /&gt; <br> &lt;/ الصوت&gt; <br></blockquote> <strong>ملحوظة</strong> <br> عادةً ما يحتوي محتوى الوسائط المتعددة على مكونات مرئية وسمعية. يحتاج إلى تسميات توضيحية متزامنة وملف نصي حتى يتمكن المستخدمون الذين يعانون من إعاقات بصرية و / أو سمعية من الوصول إليه. بشكل عام ، لا يتحمل مطور الويب مسؤولية إنشاء التسميات التوضيحية أو النص ، ولكن يجب أن يعرف كيفية تضمينها. </section> <add><section id="description"> يمنح عنصر HTML5 <code>audio</code> معنىً دلاليًا عندما يلف محتوى دفق الصوت أو الصوت في الترميز. يحتاج المحتوى الصوتي أيضًا إلى بديل نصي ليكون متاحًا للأشخاص الصم أو ضعاف السمع. يمكن إجراء ذلك باستخدام نص مجاور على الصفحة أو رابط إلى نص. تدعم علامة <code>audio</code> سمة <code>controls</code> . يعرض هذا التشغيل الافتراضي للمتصفح والإيقاف المؤقت وعناصر التحكم الأخرى ، ويدعم وظيفة لوحة المفاتيح. هذه سمة منطقية ، بمعنى أنها لا تحتاج إلى قيمة ، حيث يؤدي وجودها على العلامة إلى تشغيل الإعداد. إليك مثال على ذلك: <add> <add>```html <add><audio id="meowClip" controls> <add> <source src="audio/meow.mp3" type="audio/mpeg" /> <add> <source src="audio/meow.ogg" type="audio/ogg" /> <add></audio> <add>``` <add> <add><strong>ملحوظة</strong> <br> عادةً ما يحتوي محتوى الوسائط المتعددة على مكونات مرئية وسمعية. يحتاج إلى تسميات توضيحية متزامنة وملف نصي حتى يتمكن المستخدمون الذين يعانون من إعاقات بصرية و / أو سمعية من الوصول إليه. بشكل عام ، لا يتحمل مطور الويب مسؤولية إنشاء التسميات التوضيحية أو النص ، ولكن يجب أن يعرف كيفية تضمينها. </section> <ide> <ide> ## Instructions <ide> <section id="instructions"> حان الوقت للاستراحة من Camper Cat ومقابلة زميل Zperiax (zersiax) ، وهو بطل إمكانية الوصول ومستخدم قارئ الشاشة. للاستماع إلى مقطع قارئ الشاشة الخاص به أثناء العمل ، أضف عنصرًا <code>audio</code> بعد <code>p</code> . قم بتضمين سمة <code>controls</code> . ثم ضع علامة <code>source</code> داخل علامات <code>audio</code> مع تعيين سمة <code>src</code> إلى &quot;https://s3.amazonaws.com/freecodecamp/screen-reader.mp3&quot; <code>type</code> تعيين السمة على &quot;audio / mpeg&quot;. <strong>ملحوظة</strong> <br> قد يبدو المقطع الصوتي سريعًا ويصعب فهمه ، ولكن هذه سرعة عادية لمستخدمي قارئ الشاشة. </section>
1
Text
Text
add docs about local_assigns on guides
24bcfed15dceb16641896d4b1a875e043d863221
<ide><path>guides/source/layouts_and_rendering.md <ide> You can also pass local variables into partials, making them even more powerful <ide> <ide> Although the same partial will be rendered into both views, Action View's submit helper will return "Create Zone" for the new action and "Update Zone" for the edit action. <ide> <add>To pass a local variable to a partial in only specific cases use the `local_assigns`. <add> <add>* `index.html.erb` <add> <add> ```erb <add> <%= render user.articles %> <add> ``` <add> <add>* `show.html.erb` <add> <add> ```erb <add> <%= render article, full: true %> <add> ``` <add> <add>* `_articles.html.erb` <add> <add> ```erb <add> <%= content_tag_for :article, article do |article| %> <add> <h2><%= article.title %></h2> <add> <add> <% if local_assigns[:full] %> <add> <%= simple_format article.body %> <add> <% else %> <add> <%= truncate article.body %> <add> <% end %> <add> <% end %> <add> ``` <add> <add>This way it is possible to use the partial without the need to declare all local variables. <add> <ide> Every partial also has a local variable with the same name as the partial (minus the underscore). You can pass an object in to this local variable via the `:object` option: <ide> <ide> ```erb
1
Javascript
Javascript
remove old bundler code from server class
e9393f694d8f0d0190a3576fd65a65f747f8cce2
<ide><path>local-cli/server/util/__tests__/getInverseDependencies-test.js <del>/** <del> * Copyright (c) 2015-present, Facebook, Inc. <del> * All rights reserved. <del> * <del> * This source code is licensed under the BSD-style license found in the <del> * LICENSE file in the root directory of this source tree. An additional grant <del> * of patent rights can be found in the PATENTS file in the same directory. <del> * <del> * @emails oncall+javascript_foundation <del> */ <del>'use strict'; <del> <del>jest.dontMock('../getInverseDependencies'); <del> <del>const getInverseDependencies = require('../getInverseDependencies'); <del> <del>describe('getInverseDependencies', () => { <del> it('', () => { <del> const module1 = createModule('module1', ['module2', 'module3']); <del> const module2 = createModule('module2', ['module3', 'module4']); <del> const module3 = createModule('module3', ['module4']); <del> const module4 = createModule('module4', []); <del> <del> const modulePairs = { <del> 'module1': [['module2', module2], ['module3', module3]], <del> 'module2': [['module3', module3], ['module4', module4]], <del> 'module3': [['module4', module4]], <del> 'module4': [], <del> }; <del> <del> const resolutionResponse = { <del> dependencies: [module1, module2, module3, module4], <del> getResolvedDependencyPairs: module => { <del> return modulePairs[module.hash()]; <del> }, <del> }; <del> <del> const dependencies = getInverseDependencies(resolutionResponse); <del> const actual = // jest can't compare maps and sets <del> Array.from(dependencies.entries()) <del> .map(([key, value]) => [key, Array.from(value)]); <del> <del> expect(actual).toEqual([ <del> [module2, [module1]], <del> [module3, [module1, module2]], <del> [module4, [module2, module3]], <del> ]); <del> }); <del>}); <del> <del>function createModule(name, dependencies) { <del> return { <del> hash: () => name, <del> getName: () => Promise.resolve(name), <del> getDependencies: () => Promise.resolve(dependencies), <del> }; <del>} <ide><path>local-cli/server/util/attachHMRServer.js <del>/** <del> * Copyright (c) 2015-present, Facebook, Inc. <del> * All rights reserved. <del> * <del> * This source code is licensed under the BSD-style license found in the <del> * LICENSE file in the root directory of this source tree. An additional grant <del> * of patent rights can be found in the PATENTS file in the same directory. <del> * <del> * @flow <del> */ <del> <del>'use strict'; <del> <del>const getInverseDependencies = require('./getInverseDependencies'); <del>const querystring = require('querystring'); <del>const url = require('url'); <del> <del>import type {ResolutionResponse} from './getInverseDependencies'; <del>import type {Server as HTTPServer} from 'http'; <del>import type {Server as HTTPSServer} from 'https'; <del>import type {Client as WebSocketClient} from 'ws'; <del> <del>const blacklist = [ <del> 'Libraries/Utilities/HMRClient.js', <del>]; <del> <del>type HMRBundle = { <del> getModulesIdsAndCode(): Array<{id: string, code: string}>, <del> getSourceMappingURLs(): Array<mixed>, <del> getSourceURLs(): Array<mixed>, <del> isEmpty(): boolean, <del>}; <del> <del>type DependencyOptions = {| <del> +dev: boolean, <del> +entryFile: string, <del> +hot: boolean, <del> +minify: boolean, <del> +platform: ?string, <del> +recursive: boolean, <del> +rootEntryFile: string, <del> +bundlingOptions?: Object, <del>|}; <del> <del>/** <del> * This is a subset of the actual `metro-bundler`'s `Server` class, <del> * without all the stuff we don't need to know about. This allows us to use <del> * `attachHMRServer` with different versions of `metro-bundler` as long as <del> * this specific contract is maintained. <del> */ <del>type PackagerServer<TModule> = { <del> buildBundleForHMR( <del> options: {platform: ?string}, <del> host: string, <del> port: number, <del> ): Promise<HMRBundle>, <del> getDependencies(options: DependencyOptions): Promise<ResolutionResponse<TModule>>, <del> getModuleForPath(entryFile: string): Promise<TModule>, <del> getShallowDependencies(options: DependencyOptions): Promise<Array<string>>, <del> setHMRFileChangeListener(listener: ?(type: string, filePath: string) => mixed): void, <del>}; <del> <del>type HMROptions<TModule> = { <del> httpServer: HTTPServer | HTTPSServer, <del> packagerServer: PackagerServer<TModule>, <del> path: string, <del>}; <del> <del>type Moduleish = { <del> getName(): string, <del> isAsset(): boolean, <del> isJSON(): boolean, <del> path: string, <del>}; <del> <del>/** <del> * Attaches a WebSocket based connection to the Packager to expose <del> * Hot Module Replacement updates to the simulator. <del> */ <del>function attachHMRServer<TModule: Moduleish>( <del> {httpServer, path, packagerServer}: HMROptions<TModule>, <del>) { <del> type Client = {| <del> ws: WebSocketClient, <del> platform: string, <del> bundleEntry: string, <del> dependenciesCache: Array<string>, <del> dependenciesModulesCache: {[mixed]: TModule}, <del> shallowDependencies: {[string]: Array<string>}, <del> inverseDependenciesCache: mixed, <del> |}; <del> <del> const clients: Set<Client> = new Set(); <del> <del> function disconnect(client: Client) { <del> clients.delete(client); <del> <del> // If there are no clients connected, stop listenig for file changes <del> if (clients.size === 0) { <del> packagerServer.setHMRFileChangeListener(null); <del> } <del> } <del> <del> // For the give platform and entry file, returns a promise with: <del> // - The full list of dependencies. <del> // - The shallow dependencies each file on the dependency list has <del> // - Inverse shallow dependencies map <del> async function getDependencies(platform: string, bundleEntry: string): Promise<{ <del> dependenciesCache: Array<string>, <del> dependenciesModulesCache: {[mixed]: TModule}, <del> shallowDependencies: {[string]: Array<string>}, <del> inverseDependenciesCache: mixed, <del> /* $FlowFixMe(>=0.54.0 site=react_native_fb,react_native_oss) This comment <del> * suppresses an error found when Flow v0.54 was deployed. To see the error <del> * delete this comment and run Flow. */ <del> resolutionResponse: ResolutionResponse<TModule>, <del> }> { <del> const response = await packagerServer.getDependencies({ <del> dev: true, <del> entryFile: bundleEntry, <del> rootEntryFile: bundleEntry, <del> hot: true, <del> minify: false, <del> platform: platform, <del> recursive: true, <del> }); <del> <del> /* $FlowFixMe: getModuleId might be null */ <del> const {getModuleId}: {getModuleId: () => number} = response; <del> <del> // for each dependency builds the object: <del> // `{path: '/a/b/c.js', deps: ['modA', 'modB', ...]}` <del> const deps: Array<{ <del> path: string, <del> name?: string, <del> deps: Array<string>, <del> }> = await Promise.all(response.dependencies.map(async (dep: TModule) => { <del> const depName = dep.getName(); <del> <del> if (dep.isAsset() || dep.isJSON()) { <del> return {path: dep.path, deps: []}; <del> } <del> const dependencies = await packagerServer.getShallowDependencies({ <del> dev: true, <del> entryFile: dep.path, <del> rootEntryFile: bundleEntry, <del> hot: true, <del> minify: false, <del> platform: platform, <del> recursive: true, <del> bundlingOptions: response.options, <del> }); <del> <del> return { <del> path: dep.path, <del> name: depName, <del> deps: dependencies, <del> }; <del> })); <del> <del> // list with all the dependencies' filenames the bundle entry has <del> const dependenciesCache = response.dependencies.map(dep => dep.path); <del> <del> // map from module name to path <del> const moduleToFilenameCache = Object.create(null); <del> deps.forEach(dep => { <del> /* $FlowFixMe: `name` could be null, but `deps` would be as well. */ <del> moduleToFilenameCache[dep.name] = dep.path; <del> }); <del> <del> // map that indicates the shallow dependency each file included on the <del> // bundle has <del> const shallowDependencies = Object.create(null); <del> deps.forEach(dep => { <del> shallowDependencies[dep.path] = dep.deps; <del> }); <del> <del> // map from module name to the modules' dependencies the bundle entry <del> // has <del> const dependenciesModulesCache = Object.create(null); <del> response.dependencies.forEach(dep => { <del> dependenciesModulesCache[getModuleId(dep)] = dep; <del> }); <del> <del> const inverseDependenciesCache = Object.create(null); <del> const inverseDependencies = getInverseDependencies(response); <del> for (const [module, dependents] of inverseDependencies) { <del> inverseDependenciesCache[getModuleId(module)] = <del> Array.from(dependents).map(getModuleId); <del> } <del> <del> /* $FlowFixMe(>=0.56.0 site=react_native_oss) This comment suppresses an <del> * error found when Flow v0.56 was deployed. To see the error delete this <del> * comment and run Flow. */ <del> /* $FlowFixMe(>=0.56.0 site=react_native_fb,react_native_oss) This comment <del> * suppresses an error found when Flow v0.56 was deployed. To see the error <del> * delete this comment and run Flow. */ <del> return { <del> dependenciesCache, <del> dependenciesModulesCache, <del> shallowDependencies, <del> inverseDependenciesCache, <del> resolutionResponse: response, <del> }; <del> } <del> <del> async function prepareResponse( <del> client: Client, <del> filename: string, <del> ): Promise<?Object> { <del> try { <del> const bundle = await generateBundle(client, filename); <del> <del> if (!bundle || bundle.isEmpty()) { <del> return; <del> } <del> <del> return { <del> type: 'update', <del> body: { <del> modules: bundle.getModulesIdsAndCode(), <del> inverseDependencies: client.inverseDependenciesCache, <del> sourceURLs: bundle.getSourceURLs(), <del> sourceMappingURLs: bundle.getSourceMappingURLs(), <del> }, <del> }; <del> } catch (error) { <del> // send errors to the client instead of killing packager server <del> let body; <del> if (error.type === 'TransformError' || <del> error.type === 'NotFoundError' || <del> error.type === 'UnableToResolveError') { <del> body = { <del> type: error.type, <del> description: error.description, <del> filename: error.filename, <del> lineNumber: error.lineNumber, <del> }; <del> } else { <del> console.error(error.stack || error); <del> body = { <del> type: 'InternalError', <del> description: 'react-packager has encountered an internal error, ' + <del> 'please check your terminal error output for more details', <del> }; <del> } <del> <del> return {type: 'error', body}; <del> } <del> } <del> <del> async function generateBundle( <del> client: Client, <del> filename: string, <del> ): Promise<?HMRBundle> { <del> // If the main file is an asset, do not generate a bundle. <del> const moduleToUpdate = await packagerServer.getModuleForPath(filename); <del> if (moduleToUpdate.isAsset()) { <del> return; <del> } <del> <del> const deps = await packagerServer.getShallowDependencies({ <del> dev: true, <del> minify: false, <del> entryFile: filename, <del> rootEntryFile: client.bundleEntry, <del> hot: true, <del> platform: client.platform, <del> recursive: true, <del> }); <del> <del> // if the file dependencies have change we need to invalidate the <del> // dependencies caches because the list of files we need to send <del> // to the client may have changed <del> const oldDependencies = client.shallowDependencies[filename]; <del> <del> let resolutionResponse; <del> <del> if (arrayEquals(deps, oldDependencies)) { <del> // Need to create a resolution response to pass to the bundler <del> // to process requires after transform. By providing a <del> // specific response we can compute a non recursive one which <del> // is the least we need and improve performance. <del> const response = await packagerServer.getDependencies({ <del> dev: true, <del> entryFile: filename, <del> rootEntryFile: client.bundleEntry, <del> hot: true, <del> minify: false, <del> platform: client.platform, <del> recursive: true, <del> }); <del> <del> resolutionResponse = await response.copy({ <del> dependencies: [moduleToUpdate]}, <del> ); <del> } else { <del> // if there're new dependencies compare the full list of <del> // dependencies we used to have with the one we now have <del> const { <del> dependenciesCache: depsCache, <del> dependenciesModulesCache: depsModulesCache, <del> shallowDependencies: shallowDeps, <del> inverseDependenciesCache: inverseDepsCache, <del> resolutionResponse: myResolutionReponse, <del> } = await getDependencies(client.platform, client.bundleEntry); <del> <del> // build list of modules for which we'll send HMR updates <del> const modulesToUpdate = [moduleToUpdate]; <del> Object.keys(depsModulesCache).forEach(module => { <del> if (!client.dependenciesModulesCache[module]) { <del> modulesToUpdate.push(depsModulesCache[module]); <del> } <del> }); <del> <del> // Need to send modules to the client in an order it can <del> // process them: if a new dependency graph was uncovered <del> // because a new dependency was added, the file that was <del> // changed, which is the root of the dependency tree that <del> // will be sent, needs to be the last module that gets <del> // processed. Reversing the new modules makes sense <del> // because we get them through the resolver which returns <del> // a BFS ordered list. <del> modulesToUpdate.reverse(); <del> <del> // invalidate caches <del> client.dependenciesCache = depsCache; <del> client.dependenciesModulesCache = depsModulesCache; <del> client.shallowDependencies = shallowDeps; <del> client.inverseDependenciesCache = inverseDepsCache; <del> <del> resolutionResponse = await myResolutionReponse.copy({ <del> dependencies: modulesToUpdate, <del> }); <del> } <del> <del> // make sure the file was modified is part of the bundle <del> if (!client.shallowDependencies[filename]) { <del> return; <del> } <del> <del> const httpServerAddress = httpServer.address(); <del> <del> // Sanitize the value from the HTTP server <del> let packagerHost = 'localhost'; <del> if (httpServer.address().address && <del> httpServer.address().address !== '::' && <del> httpServer.address().address !== '') { <del> packagerHost = httpServerAddress.address; <del> } <del> <del> const bundle: HMRBundle = await packagerServer.buildBundleForHMR( <del> { <del> entryFile: client.bundleEntry, <del> platform: client.platform, <del> resolutionResponse, <del> }, <del> packagerHost, <del> httpServerAddress.port, <del> ); <del> <del> return bundle; <del> } <del> <del> function handleFileChange( <del> type: string, <del> filename: string, <del> ): void { <del> clients.forEach( <del> client => sendFileChangeToClient(client, type, filename), <del> ); <del> } <del> <del> async function sendFileChangeToClient( <del> client: Client, <del> type: string, <del> filename: string, <del> ): Promise<mixed> { <del> const blacklisted = blacklist.find( <del> blacklistedPath => filename.indexOf(blacklistedPath) !== -1, <del> ); <del> if (blacklisted) { <del> return; <del> } <del> <del> if (clients.has(client)) { <del> client.ws.send(JSON.stringify({type: 'update-start'})); <del> } <del> <del> if (type !== 'delete') { <del> const response = await prepareResponse(client, filename); <del> <del> if (response && clients.has(client)) { <del> client.ws.send(JSON.stringify(response)); <del> } <del> } <del> <del> if (clients.has(client)) { <del> client.ws.send(JSON.stringify({type: 'update-done'})); <del> } <del> } <del> <del> /* $FlowFixMe(>=0.54.0 site=react_native_oss) This comment suppresses an <del> * error found when Flow v0.54 was deployed. To see the error delete this <del> * comment and run Flow. */ <del> const WebSocketServer = require('ws').Server; <del> const wss = new WebSocketServer({ <del> server: httpServer, <del> path: path, <del> }); <del> <del> wss.on('connection', async ws => { <del> /* $FlowFixMe: url might be null */ <del> const params = querystring.parse(url.parse(ws.upgradeReq.url).query); <del> <del> const { <del> dependenciesCache, <del> dependenciesModulesCache, <del> shallowDependencies, <del> inverseDependenciesCache, <del> } = await getDependencies(params.platform, params.bundleEntry); <del> <del> const client = { <del> ws, <del> platform: params.platform, <del> bundleEntry: params.bundleEntry, <del> dependenciesCache, <del> dependenciesModulesCache, <del> shallowDependencies, <del> inverseDependenciesCache, <del> }; <del> clients.add(client); <del> <del> // If this is the first client connecting, start listening to file changes <del> if (clients.size === 1) { <del> packagerServer.setHMRFileChangeListener(handleFileChange); <del> } <del> <del> client.ws.on('error', e => { <del> console.error('[Hot Module Replacement] Unexpected error', e); <del> disconnect(client); <del> }); <del> <del> client.ws.on('close', () => disconnect(client)); <del> }); <del>} <del> <del>function arrayEquals<T>(arrayA: Array<T>, arrayB: Array<T>): boolean { <del> arrayA = arrayA || []; <del> arrayB = arrayB || []; <del> return ( <del> arrayA.length === arrayB.length && <del> arrayA.every((element, index) => { <del> return element === arrayB[index]; <del> }) <del> ); <del>} <del> <del>module.exports = attachHMRServer; <ide><path>local-cli/server/util/getInverseDependencies.js <del>/** <del> * Copyright (c) 2015-present, Facebook, Inc. <del> * All rights reserved. <del> * <del> * This source code is licensed under the BSD-style license found in the <del> * LICENSE file in the root directory of this source tree. An additional grant <del> * of patent rights can be found in the PATENTS file in the same directory. <del> * <del> * @flow <del> * @format <del> */ <del> <del>'use strict'; <del> <del>/** <del> * This is a subset of the actual `metro-bundler`'s `ResolutionResponse` class, <del> * without all the stuff we don't need to know about. This allows us to use <del> * `getInverseDependencies` with different versions of `metro-bundler`. <del> */ <del>export type ResolutionResponse<TModule> = { <del> copy(data: { <del> dependencies?: Array<TModule>, <del> mainModuleId?: number, <del> mocks?: mixed, <del> }): ResolutionResponse<TModule>, <del> dependencies: Array<TModule>, <del> getResolvedDependencyPairs( <del> module: TModule, <del> ): $ReadOnlyArray<[string, TModule]>, <del> options: Object, <del>}; <del> <del>function resolveModuleRequires<TModule>( <del> resolutionResponse: ResolutionResponse<TModule>, <del> module: TModule, <del>): Array<TModule> { <del> const pairs = resolutionResponse.getResolvedDependencyPairs(module); <del> return pairs ? pairs.map(([, dependencyModule]) => dependencyModule) : []; <del>} <del> <del>function getModuleDependents<TModule>( <del> cache: Map<TModule, Set<TModule>>, <del> module: TModule, <del>): Set<TModule> { <del> let dependents = cache.get(module); <del> if (!dependents) { <del> dependents = new Set(); <del> cache.set(module, dependents); <del> } <del> return dependents; <del>} <del> <del>/** <del> * Returns an object that indicates in which module each module is required. <del> */ <del>function getInverseDependencies<TModule>( <del> resolutionResponse: ResolutionResponse<TModule>, <del>): Map<TModule, Set<TModule>> { <del> const cache = new Map(); <del> <del> resolutionResponse.dependencies.forEach(module => { <del> resolveModuleRequires(resolutionResponse, module).forEach(dependency => { <del> getModuleDependents(cache, dependency).add(module); <del> }); <del> }); <del> <del> return cache; <del>} <del> <del>module.exports = getInverseDependencies;
3
Python
Python
keep input types with lambda layers
328df7303425c2e3bf4274bb1eb0a2040bab5d8c
<ide><path>keras/layers/core.py <ide> from ..utils.generic_utils import func_load <ide> from ..utils.generic_utils import deserialize_keras_object <ide> from ..utils.generic_utils import has_arg <del>from ..utils import conv_utils <ide> from ..legacy import interfaces <ide> <ide> <ide> def __init__(self, function, output_shape=None, <ide> mask=None, arguments=None, **kwargs): <ide> super(Lambda, self).__init__(**kwargs) <ide> self.function = function <add> self._input_dtypes = None <ide> self.arguments = arguments if arguments else {} <ide> if mask is not None: <ide> self.supports_masking = True <ide> def compute_output_shape(self, input_shape): <ide> # With TensorFlow or CNTK, we can infer the output shape directly: <ide> if K.backend() in ('tensorflow', 'cntk'): <ide> if isinstance(input_shape, list): <del> xs = [K.placeholder(shape=shape) for shape in input_shape] <add> xs = [K.placeholder(shape=shape, dtype=dtype) <add> for shape, dtype in zip(input_shape, self._input_dtypes)] <ide> x = self.call(xs) <ide> else: <del> x = K.placeholder(shape=input_shape) <add> x = K.placeholder(shape=input_shape, dtype=self._input_dtypes) <ide> x = self.call(x) <ide> if isinstance(x, list): <ide> return [K.int_shape(x_elem) for x_elem in x] <ide> def call(self, inputs, mask=None): <ide> arguments = self.arguments <ide> if has_arg(self.function, 'mask'): <ide> arguments['mask'] = mask <add> if isinstance(inputs, list): <add> self._input_dtypes = [K.dtype(x) for x in inputs] <add> else: <add> self._input_dtypes = K.dtype(inputs) <ide> return self.function(inputs, **arguments) <ide> <ide> def compute_mask(self, inputs, mask=None): <ide><path>tests/keras/layers/core_test.py <ide> def output_shape(input_shape): <ide> <ide> test_multiple_outputs_no_mask() <ide> <add> def test_dtypes(): <add> def func(x): <add> if K.dtype(x) != 'float16': <add> raise TypeError('x dtype is not float16, it is', K.dtype(x)) <add> return x <add> <add> i = layers.Input(shape=(3, 2, 1), dtype='float16') <add> o = layers.Lambda(func) <add> _ = o(i) <add> assert o._input_dtypes == 'float16' <add> test_dtypes() <add> <ide> # test serialization with function <ide> def f(x): <ide> return x + 1
2
Ruby
Ruby
add missing require to test_env
826064880e8c1b0b2153b2157294bba9b8eaaf5e
<ide><path>Library/Homebrew/test/test_ENV.rb <ide> require 'testing_env' <ide> require 'utils' <add>require 'hardware' <ide> require 'extend/ENV' <ide> ENV.extend(HomebrewEnvExtension) <ide>
1
Python
Python
add codecarbon integration
037e466b1056b49f29e731c4ebe15889c227a63c
<ide><path>setup.py <ide> _deps = [ <ide> "Pillow", <ide> "black==21.4b0", <add> "codecarbon==1.2.0", <ide> "cookiecutter==1.7.2", <ide> "dataclasses", <ide> "datasets", <ide> def run(self): <ide> extras["speech"] = deps_list("soundfile", "torchaudio") <ide> extras["vision"] = deps_list("Pillow") <ide> extras["timm"] = deps_list("timm") <add>extras["codecarbon"] = deps_list("codecarbon") <ide> <ide> extras["sentencepiece"] = deps_list("sentencepiece", "protobuf") <ide> extras["testing"] = ( <ide> def run(self): <ide> + extras["vision"] <ide> + extras["integrations"] <ide> + extras["timm"] <add> + extras["codecarbon"] <ide> ) <ide> <ide> extras["docs_specific"] = deps_list( <ide><path>src/transformers/dependency_versions_table.py <ide> deps = { <ide> "Pillow": "Pillow", <ide> "black": "black==21.4b0", <add> "codecarbon": "codecarbon==1.2.0", <ide> "cookiecutter": "cookiecutter==1.7.2", <ide> "dataclasses": "dataclasses", <ide> "datasets": "datasets", <ide><path>src/transformers/integrations.py <ide> def is_neptune_available(): <ide> return importlib.util.find_spec("neptune") is not None <ide> <ide> <add>def is_codecarbon_available(): <add> return importlib.util.find_spec("codecarbon") is not None <add> <add> <ide> def hp_params(trial): <ide> if is_optuna_available(): <ide> import optuna <ide> def get_available_reporting_integrations(): <ide> integrations.append("tensorboard") <ide> if is_wandb_available(): <ide> integrations.append("wandb") <add> if is_codecarbon_available(): <add> integrations.append("codecarbon") <ide> return integrations <ide> <ide> <ide> def __del__(self): <ide> pass <ide> <ide> <add>class CodeCarbonCallback(TrainerCallback): <add> """ <add> A :class:`~transformers.TrainerCallback` that tracks the CO2 emission of training. <add> """ <add> <add> def __init__(self): <add> assert ( <add> is_codecarbon_available() <add> ), "CodeCarbonCallback requires `codecarbon` to be installed. Run `pip install codecarbon`." <add> import codecarbon <add> <add> self._codecarbon = codecarbon <add> self.tracker = None <add> <add> def on_init_end(self, args, state, control, **kwargs): <add> if self.tracker is None and state.is_local_process_zero: <add> # CodeCarbon will automatically handle environment variables for configuration <add> self.tracker = self._codecarbon.EmissionsTracker(output_dir=args.output_dir) <add> <add> def on_train_begin(self, args, state, control, model=None, **kwargs): <add> if self.tracker and state.is_local_process_zero: <add> self.tracker.start() <add> <add> def on_train_end(self, args, state, control, **kwargs): <add> if self.tracker and state.is_local_process_zero: <add> self.tracker.stop() <add> <add> <ide> INTEGRATION_TO_CALLBACK = { <ide> "azure_ml": AzureMLCallback, <ide> "comet_ml": CometCallback, <ide> "mlflow": MLflowCallback, <ide> "neptune": NeptuneCallback, <ide> "tensorboard": TensorBoardCallback, <ide> "wandb": WandbCallback, <add> "codecarbon": CodeCarbonCallback, <ide> } <ide> <ide>
3
Javascript
Javascript
fix warning message for using react.dom.* as type
810582d114f65b3f86301d53f34235b68d6c7f1e
<ide><path>src/core/ReactLegacyElement.js <ide> function warnForNonLegacyFactory(type) { <ide> warning( <ide> false, <ide> 'Do not pass React.DOM.' + type.type + ' to JSX or createFactory. ' + <del> 'Use the string "' + type + '" instead.' <add> 'Use the string "' + type.type + '" instead.' <ide> ); <ide> } <ide>
1
Javascript
Javascript
fix viewpager warning
e3b6104810f34e68439234be22a36f804c2ca26c
<ide><path>Libraries/Components/ViewPager/ViewPagerAndroid.android.js <ide> var ViewPagerAndroid = React.createClass({ <ide> }], <ide> collapsable: false, <ide> }; <del> if (child.type && child.type.displayName && (child.type.displayName !== 'View')) { <add> if (child.type && <add> child.type.displayName && <add> (child.type.displayName !== 'RCTView') && <add> (child.type.displayName !== 'View')) { <ide> console.warn('Each ViewPager child must be a <View>. Was ' + child.type.displayName); <ide> } <ide> return ReactElement.createElement(child.type, newProps);
1
Text
Text
fix typo in mkdir example
33f80a864156a77545134503398536c5702b73d7
<ide><path>doc/api/fs.md <ide> import { mkdir } from 'node:fs/promises'; <ide> <ide> try { <ide> const projectFolder = new URL('./test/project/', import.meta.url); <del> const createDir = await mkdir(path, { recursive: true }); <add> const createDir = await mkdir(projectFolder, { recursive: true }); <ide> <ide> console.log(`created ${createDir}`); <ide> } catch (err) {
1
Java
Java
low the priority for logging events in fabric
b2b2caa3444db4d21566ef918add7235f709538d
<ide><path>ReactAndroid/src/main/java/com/facebook/react/fabric/FabricUIManager.java <ide> public long createEventTarget(int reactTag) { <ide> long context = mJSContext.get(); <ide> long eventTarget = mBinding.createEventTarget(context, instanceHandle); <ide> if (DEBUG) { <del> Log.e( <add> Log.d( <ide> TAG, <ide> "Created EventTarget: " + eventTarget + " for tag: " + reactTag + " with instanceHandle: " + instanceHandle); <ide> } <ide> public void releaseEventHandler(long eventHandlerPointer) { <ide> @Override <ide> public void invoke(long eventTarget, String name, WritableMap params) { <ide> if (DEBUG) { <del> Log.e( <add> Log.d( <ide> TAG, <ide> "Dispatching event for target: " + eventTarget); <ide> }
1
Javascript
Javascript
add $q.always() method
6605adf6d96cee2ef53dfad24e99d325df732cab
<ide><path>src/ng/q.js <ide> * This method *returns a new promise* which is resolved or rejected via the return value of the <ide> * `successCallback` or `errorCallback`. <ide> * <add> * - `always(callback)` – allows you to observe either the fulfillment or rejection of a promise, <add> * but to do so without modifying the final value. This is useful to release resources or do some <add> * clean-up that needs to be done whether the promise was rejected or resolved. See the [full <add> * specification](https://github.com/kriskowal/q/wiki/API-Reference#promisefinallycallback) for <add> * more information. <ide> * <ide> * # Chaining promises <ide> * <ide> function qFactory(nextTick, exceptionHandler) { <ide> } <ide> <ide> return result.promise; <add> }, <add> always: function(callback) { <add> <add> function makePromise(value, resolved) { <add> var result = defer(); <add> if (resolved) { <add> result.resolve(value); <add> } else { <add> result.reject(value); <add> } <add> return result.promise; <add> } <add> <add> function handleCallback(value, isResolved) { <add> var callbackOutput = null; <add> try { <add> callbackOutput = (callback ||defaultCallback)(); <add> } catch(e) { <add> return makePromise(e, false); <add> } <add> if (callbackOutput && callbackOutput.then) { <add> return callbackOutput.then(function() { <add> return makePromise(value, isResolved); <add> }, function(error) { <add> return makePromise(error, false); <add> }); <add> } else { <add> return makePromise(value, isResolved); <add> } <add> } <add> <add> return this.then(function(value) { <add> return handleCallback(value, true); <add> }, function(error) { <add> return handleCallback(error, false); <add> }); <ide> } <ide> } <ide> }; <ide><path>test/ng/qSpec.js <ide> describe('q', function() { <ide> it('should have a then method', function() { <ide> expect(typeof promise.then).toBe('function'); <ide> }); <add> <add> it('should have a always method', function() { <add> expect(typeof promise.always).toBe('function'); <add> }); <ide> <ide> <ide> describe('then', function() { <ide> describe('q', function() { <ide> expect(log).toEqual(['error(oops!)']); <ide> }); <ide> }); <add> <add> <add> describe('always', function() { <add> <add> it('should not take an argument', <add> function() { <add> promise.always(success(1)) <add> syncResolve(deferred, 'foo'); <add> expect(logStr()).toBe('success1()'); <add> }); <add> <add> describe("when the promise is fulfilled", function () { <add> <add> it('should call the callback', <add> function() { <add> promise.then(success(1)) <add> .always(success(2)) <add> syncResolve(deferred, 'foo'); <add> expect(logStr()).toBe('success1(foo); success2()'); <add> }); <add> <add> it('should fulfill with the original value', <add> function() { <add> promise.always(success(1)) <add> .then(success(2), error(2)) <add> syncResolve(deferred, 'foo'); <add> expect(logStr()).toBe('success1(); success2(foo)'); <add> }); <add> <add> describe("when the callback returns a promise", function() { <add> <add> describe("that is fulfilled", function() { <add> it("should fulfill with the original reason after that promise resolves", <add> function () { <add> var returnedDef = defer() <add> returnedDef.resolve('bar'); <add> promise.always(success(1, returnedDef.promise)) <add> .then(success(2)) <add> syncResolve(deferred, 'foo'); <add> expect(logStr()).toBe('success1(); success2(foo)'); <add> }); <add> }); <add> <add> describe("that is rejected", function() { <add> it("should reject with this new rejection reason", <add> function () { <add> var returnedDef = defer() <add> returnedDef.reject('bar'); <add> promise.always(success(1, returnedDef.promise)) <add> .then(success(2), error(1)) <add> syncResolve(deferred, 'foo'); <add> expect(logStr()).toBe('success1(); error1(bar)'); <add> }); <add> }); <add> <add> }); <add> <add> describe("when the callback throws an exception", function() { <add> it("should reject with this new exception", function() { <add> promise.always(error(1, "exception", true)) <add> .then(success(1), error(2)) <add> syncResolve(deferred, 'foo'); <add> expect(logStr()).toBe('error1(); error2(exception)'); <add> }); <add> }); <add> <add> }); <add> <add> <add> describe("when the promise is rejected", function () { <add> <add> it("should call the callback", function () { <add> promise.always(success(1)) <add> .then(success(2), error(1)) <add> syncReject(deferred, 'foo'); <add> expect(logStr()).toBe('success1(); error1(foo)'); <add> }); <add> <add> it('should reject with the original reason', function() { <add> promise.always(success(1), "hello") <add> .then(success(2), error(2)) <add> syncReject(deferred, 'original'); <add> expect(logStr()).toBe('success1(); error2(original)'); <add> }); <add> <add> describe("when the callback returns a promise", function() { <add> <add> describe("that is fulfilled", function() { <add> <add> it("should reject with the original reason after that promise resolves", function () { <add> var returnedDef = defer() <add> returnedDef.resolve('bar'); <add> promise.always(success(1, returnedDef.promise)) <add> .then(success(2), error(2)) <add> syncReject(deferred, 'original'); <add> expect(logStr()).toBe('success1(); error2(original)'); <add> }); <add> <add> }); <add> <add> describe("that is rejected", function () { <add> <add> it("should reject with the new reason", function() { <add> var returnedDef = defer() <add> returnedDef.reject('bar'); <add> promise.always(success(1, returnedDef.promise)) <add> .then(success(2), error(1)) <add> syncResolve(deferred, 'foo'); <add> expect(logStr()).toBe('success1(); error1(bar)'); <add> }); <add> <add> }); <add> <add> }); <add> <add> describe("when the callback throws an exception", function() { <add> <add> it("should reject with this new exception", function() { <add> promise.always(error(1, "exception", true)) <add> .then(success(1), error(2)) <add> syncResolve(deferred, 'foo'); <add> expect(logStr()).toBe('error1(); error2(exception)'); <add> }); <add> <add> }); <add> <add> }); <add> }); <ide> }); <ide> }); <ide>
2
Text
Text
fix coffee typo
e190c27d10034d4c4ee286839351876bb4ec8dc2
<ide><path>README.md <ide> Just like shipping containers, Standard Containers define a set of STANDARD OPER <ide> <ide> ### 2. CONTENT-AGNOSTIC <ide> <del>Just like shipping containers, Standard Containers are CONTENT-AGNOSTIC: all standard operations have the same effect regardless of the contents. A shipping container will be stacked in exactly the same way whether it contains Vietnamese powder coffe or spare Maserati parts. Similarly, Standard Containers are started or uploaded in the same way whether they contain a postgres database, a php application with its dependencies and application server, or Java build artifacts. <add>Just like shipping containers, Standard Containers are CONTENT-AGNOSTIC: all standard operations have the same effect regardless of the contents. A shipping container will be stacked in exactly the same way whether it contains Vietnamese powder coffee or spare Maserati parts. Similarly, Standard Containers are started or uploaded in the same way whether they contain a postgres database, a php application with its dependencies and application server, or Java build artifacts. <ide> <ide> <ide> ### 3. INFRASTRUCTURE-AGNOSTIC
1
Python
Python
add comments to explain why we use model.call
71c98d5de8d68313c2cf9a0d851943926f4553fb
<ide><path>official/vision/beta/serving/detection.py <ide> def serve(self, images: tf.Tensor): <ide> <ide> input_image_shape = image_info[:, 1, :] <ide> <add> # To overcome keras.Model extra limitation to save a model with layers that <add> # have multiple inputs, we use `model.call` here to trigger the forward <add> # path. Note that, this disables some keras magics happens in `__call__`. <ide> detections = self.model.call( <ide> images=images, <ide> image_shape=input_image_shape,
1
PHP
PHP
apply fixes from styleci
fdc1e61d901ec6f660b505630e34f292495497bb
<ide><path>src/Illuminate/Database/Eloquent/Relations/Relation.php <ide> public function touch() <ide> <ide> if (! $model::isIgnoringTouch()) { <ide> $this->rawUpdate([ <del> $model->getUpdatedAtColumn() => $model->freshTimestampString() <add> $model->getUpdatedAtColumn() => $model->freshTimestampString(), <ide> ]); <ide> } <ide> }
1
Javascript
Javascript
enable export mangling for system.js externals
c94aea82ccb4490a10c6e638c7b19a6f15327f9c
<ide><path>lib/Dependency.js <ide> /** <ide> * @typedef {Object} ExportsSpec <ide> * @property {(string | ExportSpec)[] | true | null} exports exported names, true for unknown exports or null for no exports <add> * @property {boolean=} canMangle can the export be renamed (defaults to true) <ide> * @property {Module[]=} dependencies module on which the result depends on <ide> */ <ide> <ide><path>lib/ExternalModule.js <ide> const { OriginalSource, RawSource } = require("webpack-sources"); <ide> const Module = require("./Module"); <ide> const RuntimeGlobals = require("./RuntimeGlobals"); <ide> const Template = require("./Template"); <add>const StaticExportsDependency = require("./dependencies/StaticExportsDependency"); <ide> const makeSerializable = require("./util/makeSerializable"); <ide> const propertyAccess = require("./util/propertyAccess"); <ide> <ide> class ExternalModule extends Module { <ide> * @returns {void} <ide> */ <ide> build(options, compilation, resolver, fs, callback) { <del> this.buildMeta = {}; <add> this.buildMeta = { <add> exportsType: undefined <add> }; <ide> this.buildInfo = { <ide> strict: true <ide> }; <add> this.clearDependenciesAndBlocks(); <add> if (this.externalType === "system") { <add> this.buildMeta.exportsType = "namespace"; <add> this.addDependency(new StaticExportsDependency(true, true)); <add> } <ide> callback(); <ide> } <ide> <ide> class ExternalModule extends Module { <ide> hash.update( <ide> JSON.stringify(Boolean(this.isOptional(chunkGraph.moduleGraph))) <ide> ); <add> if (this.externalType === "system") { <add> const exportsInfo = chunkGraph.moduleGraph.getExportsInfo(this); <add> for (const exportInfo of exportsInfo.orderedExports) { <add> hash.update(exportInfo.name); <add> hash.update(exportInfo.getUsedName() || ""); <add> } <add> } <ide> super.updateHash(hash, chunkGraph); <ide> } <ide> <ide><path>lib/FlagDependencyExportsPlugin.js <ide> class FlagDependencyExportsPlugin { <ide> const exportDesc = dep.getExports(moduleGraph); <ide> if (!exportDesc) return; <ide> const exports = exportDesc.exports; <add> const canMangle = exportDesc.canMangle; <ide> const exportDeps = exportDesc.dependencies; <ide> if (exports === true) { <ide> // unknown exports <del> if (exportsInfo.setUnknownExportsProvided()) { <add> if (exportsInfo.setUnknownExportsProvided(canMangle)) { <ide> changed = true; <ide> } <ide> } else if (Array.isArray(exports)) { <ide> class FlagDependencyExportsPlugin { <ide> exportInfo.provided = true; <ide> changed = true; <ide> } <add> if ( <add> canMangle === false && <add> exportInfo.canMangleProvide !== false <add> ) { <add> exportInfo.canMangleProvide = false; <add> changed = true; <add> } <ide> } else { <ide> const exportInfo = exportsInfo.getExportInfo( <ide> exportNameOrSpec.name <ide> class FlagDependencyExportsPlugin { <ide> exportInfo.provided = true; <ide> changed = true; <ide> } <del> if (exportNameOrSpec.canMangle === false) { <del> if (exportInfo.canMangleProvide !== false) { <del> exportInfo.canMangleProvide = false; <del> changed = true; <del> } <add> if ( <add> exportInfo.canMangleProvide !== false && <add> (exportNameOrSpec.canMangle === false || <add> (canMangle === false && <add> exportNameOrSpec.canMangle === undefined)) <add> ) { <add> exportInfo.canMangleProvide = false; <add> changed = true; <ide> } <ide> if (exportNameOrSpec.exports) { <ide> const nestedExportsInfo = exportInfo.createNestedExportsInfo(); <ide><path>lib/ModuleGraph.js <ide> class ExportsInfo { <ide> this._redirectTo = undefined; <ide> } <ide> <add> /** <add> * @returns {Iterable<ExportInfo>} all owned exports in any order <add> */ <ide> get ownedExports() { <ide> return this._exports.values(); <ide> } <ide> <add> /** <add> * @returns {Iterable<ExportInfo>} all exports in any order <add> */ <ide> get exports() { <ide> if (this._redirectTo) { <ide> const map = new Map(this._redirectTo._exports); <ide> class ExportsInfo { <ide> return this._exports.values(); <ide> } <ide> <add> /** <add> * @returns {Iterable<ExportInfo>} all exports in order <add> */ <ide> get orderedExports() { <ide> if (!this._exportsAreOrdered) { <ide> this._sortExports(); <ide> class ExportsInfo { <ide> return this._exports.values(); <ide> } <ide> <add> /** <add> * @returns {ExportInfo} the export info of unlisted exports <add> */ <ide> get otherExportsInfo() { <ide> if (this._redirectTo) return this._redirectTo.otherExportsInfo; <ide> return this._otherExportsInfo; <ide> class ExportsInfo { <ide> } <ide> <ide> /** <add> * @param {boolean=} canMangle true, if exports can still be mangled (defaults to false) <ide> * @returns {boolean} true, if this call changed something <ide> */ <del> setUnknownExportsProvided() { <add> setUnknownExportsProvided(canMangle) { <ide> let changed = false; <ide> for (const exportInfo of this._exports.values()) { <ide> if (exportInfo.provided !== true && exportInfo.provided !== null) { <ide> exportInfo.provided = null; <ide> changed = true; <ide> } <del> if (exportInfo.canMangleProvide !== false) { <add> if (!canMangle && exportInfo.canMangleProvide !== false) { <ide> exportInfo.canMangleProvide = false; <ide> changed = true; <ide> } <ide> class ExportsInfo { <ide> this._otherExportsInfo.provided = null; <ide> changed = true; <ide> } <del> if (this._otherExportsInfo.canMangleProvide !== false) { <add> if (!canMangle && this._otherExportsInfo.canMangleProvide !== false) { <ide> this._otherExportsInfo.canMangleProvide = false; <ide> changed = true; <ide> } <ide><path>lib/dependencies/HarmonyExportImportedSpecifierDependency.js <ide> class HarmonyExportImportedSpecifierDependency extends HarmonyImportDependency { <ide> case "dynamic-reexport": <ide> return { <ide> exports: true, <add> canMangle: false, <ide> // TODO: consider passing `ignored` from `dynamic-reexport` <ide> dependencies: [moduleGraph.getModule(this)] <ide> }; <ide><path>lib/dependencies/StaticExportsDependency.js <ide> class StaticExportsDependency extends NullDependency { <ide> * @returns {ExportsSpec | undefined} export names <ide> */ <ide> getExports(moduleGraph) { <del> if (!this.canMangle && this.exports !== true) { <del> return { <del> exports: this.exports.map(name => ({ <del> name, <del> canMangle: false <del> })), <del> dependencies: undefined <del> }; <del> } else { <del> return { <del> exports: this.exports, <del> dependencies: undefined <del> }; <del> } <add> return { <add> exports: this.exports, <add> canMangle: this.canMangle, <add> dependencies: undefined <add> }; <ide> } <ide> <ide> /** <ide><path>lib/library/SystemLibraryPlugin.js <ide> <ide> const { ConcatSource } = require("webpack-sources"); <ide> const ExternalModule = require("../ExternalModule"); <add>const { UsageState } = require("../ModuleGraph"); <ide> const Template = require("../Template"); <add>const propertyAccess = require("../util/propertyAccess"); <ide> const AbstractLibraryPlugin = require("./AbstractLibraryPlugin"); <ide> <ide> /** @typedef {import("webpack-sources").Source} Source */ <ide> class SystemLibraryPlugin extends AbstractLibraryPlugin { <ide> * @param {LibraryContext<T>} libraryContext context <ide> * @returns {Source} source with library export <ide> */ <del> render(source, { chunkGraph, chunk }, { options, compilation }) { <add> render(source, { chunkGraph, moduleGraph, chunk }, { options, compilation }) { <ide> const modules = chunkGraph <ide> .getChunkModules(chunk) <ide> .filter(m => m instanceof ExternalModule); <ide> class SystemLibraryPlugin extends AbstractLibraryPlugin { <ide> ); <ide> <ide> // Declaring variables for the internal variable names for the webpack externals <del> const externalVarDeclarations = <del> externalWebpackNames.length > 0 <del> ? `var ${externalWebpackNames.join(", ")};` <del> : ""; <add> const externalVarDeclarations = externalWebpackNames <add> .map(name => `var ${name} = {};`) <add> .join("\n"); <add> <add> // Define __esModule flag on all internal variables and helpers <add> const externalVarInitialization = []; <ide> <ide> // The system.register format requires an array of setter functions for externals. <ide> const setters = <ide> class SystemLibraryPlugin extends AbstractLibraryPlugin { <ide> : Template.asString([ <ide> "setters: [", <ide> Template.indent( <del> externalWebpackNames <del> .map(external => <del> Template.asString([ <add> externals <add> .map((module, i) => { <add> const external = externalWebpackNames[i]; <add> const exportsInfo = moduleGraph.getExportsInfo(module); <add> const otherUnused = <add> exportsInfo.otherExportsInfo.used === UsageState.Unused; <add> const instructions = []; <add> const handledNames = []; <add> for (const exportInfo of exportsInfo.orderedExports) { <add> const used = exportInfo.getUsedName(); <add> if (used) { <add> if (otherUnused || used !== exportInfo.name) { <add> instructions.push( <add> `${external}${propertyAccess([ <add> used <add> ])} = module${propertyAccess([exportInfo.name])};` <add> ); <add> handledNames.push(exportInfo.name); <add> } <add> } else { <add> handledNames.push(exportInfo.name); <add> } <add> } <add> if (!otherUnused) { <add> externalVarInitialization.push( <add> `Object.defineProperty(${external}, "__esModule", { value: true });` <add> ); <add> if (handledNames.length > 0) { <add> const name = `${external}handledNames`; <add> externalVarInitialization.push( <add> `var ${name} = ${JSON.stringify(handledNames)};` <add> ); <add> instructions.push( <add> Template.asString([ <add> "Object.keys(module).forEach(function(key) {", <add> Template.indent([ <add> `if(${name}.indexOf(key) >= 0)`, <add> Template.indent(`${external}[key] = module[key];`) <add> ]), <add> "});" <add> ]) <add> ); <add> } else { <add> instructions.push( <add> Template.asString([ <add> "Object.keys(module).forEach(function(key) {", <add> Template.indent([`${external}[key] = module[key];`]), <add> "});" <add> ]) <add> ); <add> } <add> } <add> if (instructions.length === 0) return "undefined"; <add> return Template.asString([ <ide> "function(module) {", <del> Template.indent(`${external} = {__esModule: true};`), <del> Template.indent([ <del> "for (var key in module) {", <del> Template.indent("defineGetter(key, module[key]);"), <del> "}", <del> "function defineGetter(key, value) {", <del> Template.indent([ <del> `Object.defineProperty(${external}, key, {get: function() {return value;}, enumerable: true});` <del> ]), <del> "}" <del> ]), <add> Template.indent(instructions), <ide> "}" <del> ]) <del> ) <add> ]); <add> }) <ide> .join(",\n") <ide> ), <ide> "]," <ide> class SystemLibraryPlugin extends AbstractLibraryPlugin { <ide> `System.register(${name}${systemDependencies}, function(${dynamicExport}) {`, <ide> Template.indent([ <ide> externalVarDeclarations, <add> Template.asString(externalVarInitialization), <ide> "return {", <ide> Template.indent([ <ide> setters, <ide> "execute: function() {", <ide> Template.indent(`${dynamicExport}(`) <ide> ]) <del> ]) <del> ]) + "\n", <add> ]), <add> "" <add> ]), <ide> source, <del> "\n" + <del> Template.asString([ <del> Template.indent([ <del> Template.indent([Template.indent([");"]), "}"]), <del> "};" <del> ]), <del> "})" <del> ]) <add> Template.asString([ <add> "", <add> Template.indent([ <add> Template.indent([Template.indent([");"]), "}"]), <add> "};" <add> ]), <add> "})" <add> ]) <ide> ); <ide> } <ide> <ide><path>lib/wasm/WebAssemblyParser.js <ide> class WebAssemblyParser extends Parser { <ide> } <ide> }); <ide> <del> state.module.addDependency(new StaticExportsDependency(exports, true)); <add> state.module.addDependency(new StaticExportsDependency(exports, false)); <ide> <ide> return state; <ide> } <ide><path>test/configCases/externals/externals-system/index.js <del>import external3Default, { namedThing } from 'external3'; <add>import external3Default, { namedThing } from "external3"; <add>import "external4"; <ide> <ide> /* This test verifies that webpack externals are properly indicated as dependencies to System. <ide> * Also that when System provides the external variables to webpack that the variables get plumbed <ide><path>test/configCases/externals/externals-system/test.config.js <ide> module.exports = { <ide> beforeExecute: () => { <ide> System.init({ <ide> external1: { <del> default: "the external1 value", <add> default: "the external1 value" <ide> }, <ide> external2: { <del> default: "the external2 value", <add> default: "the external2 value" <ide> }, <ide> external3: { <ide> default: "the external3 default export", <ide> namedThing: "the external3 named export" <add> }, <add> external4: { <add> default: "the external4 default export", <add> namedThing: "the external4 named export" <ide> } <ide> }); <ide> }, <ide><path>test/configCases/externals/externals-system/webpack.config.js <ide> module.exports = { <ide> externals: { <ide> external1: "external1", <ide> external2: "external2", <del> external3: "external3" <add> external3: "external3", <add> external4: "external4" <ide> } <ide> }; <ide><path>test/helpers/fakeSystem.js <ide> const System = { <ide> m.executed = true; <ide> for (let i = 0; i < m.deps.length; i++) { <ide> const dep = m.deps[i]; <add> const setters = m.mod.setters[i]; <ide> System.ensureExecuted(dep); <del> m.mod.setters[i](System.registry[dep].exports); <add> if (setters) setters(System.registry[dep].exports); <ide> } <ide> m.mod.execute(); <ide> }
12
PHP
PHP
add limit bindings for having between + tests
c6b8168e6cbbe339fdc3af5ccdded545779965df
<ide><path>src/Illuminate/Database/Query/Builder.php <ide> public function havingBetween($column, array $values, $boolean = 'and', $not = f <ide> <ide> $this->havings[] = compact('type', 'column', 'values', 'boolean', 'not'); <ide> <del> $this->addBinding($this->cleanBindings($values), 'having'); <add> $this->addBinding(array_slice($this->cleanBindings(Arr::flatten($values)), 0, 2), 'having'); <ide> <ide> return $this; <ide> } <ide><path>tests/Database/DatabaseQueryBuilderTest.php <ide> public function testWhereBetweens() <ide> $this->assertSame('select * from "users" where "id" between ? and ?', $builder->toSql()); <ide> $this->assertEquals([0 => 1, 1 => 2], $builder->getBindings()); <ide> <add> $builder = $this->getBuilder(); <add> $builder->select('*')->from('users')->whereBetween('id', [[1, 2, 3]]); <add> $this->assertSame('select * from "users" where "id" between ? and ?', $builder->toSql()); <add> $this->assertEquals([0 => 1, 1 => 2], $builder->getBindings()); <add> <add> $builder = $this->getBuilder(); <add> $builder->select('*')->from('users')->whereBetween('id', [[1], [2, 3]]); <add> $this->assertSame('select * from "users" where "id" between ? and ?', $builder->toSql()); <add> $this->assertEquals([0 => 1, 1 => 2], $builder->getBindings()); <add> <ide> $builder = $this->getBuilder(); <ide> $builder->select('*')->from('users')->whereNotBetween('id', [1, 2]); <ide> $this->assertSame('select * from "users" where "id" not between ? and ?', $builder->toSql()); <ide> public function testHavings() <ide> $builder = $this->getBuilder(); <ide> $builder->select(['category', new Raw('count(*) as "total"')])->from('item')->where('department', '=', 'popular')->groupBy('category')->having('total', '>', 3); <ide> $this->assertSame('select "category", count(*) as "total" from "item" where "department" = ? group by "category" having "total" > ?', $builder->toSql()); <add> } <ide> <add> public function testHavingBetweens() <add> { <ide> $builder = $this->getBuilder(); <del> $builder->select('*')->from('users')->havingBetween('last_login_date', ['2018-11-16', '2018-12-16']); <del> $this->assertSame('select * from "users" having "last_login_date" between ? and ?', $builder->toSql()); <add> $builder->select('*')->from('users')->havingBetween('id', [1, 2, 3]); <add> $this->assertSame('select * from "users" having "id" between ? and ?', $builder->toSql()); <add> $this->assertEquals([0 => 1, 1 => 2], $builder->getBindings()); <add> <add> $builder = $this->getBuilder(); <add> $builder->select('*')->from('users')->havingBetween('id', [[1, 2], [3, 4]]); <add> $this->assertSame('select * from "users" having "id" between ? and ?', $builder->toSql()); <add> $this->assertEquals([0 => 1, 1 => 2], $builder->getBindings()); <ide> } <ide> <ide> public function testHavingShortcut()
2
Java
Java
improve compose() generics
a3ccbf906bed6565829fe3bdd8659443549a71e3
<ide><path>src/main/java/io/reactivex/Flowable.java <ide> public final <U> Single<U> collectInto(final U initialItem, BiConsumer<? super U <ide> * @return the source Publisher, transformed by the transformer function <ide> * @see <a href="https://github.com/ReactiveX/RxJava/wiki/Implementing-Your-Own-Operators">RxJava wiki: Implementing Your Own Operators</a> <ide> */ <add> @SuppressWarnings("unchecked") <ide> @CheckReturnValue <ide> @BackpressureSupport(BackpressureKind.PASS_THROUGH) <ide> @SchedulerSupport(SchedulerSupport.NONE) <del> public final <R> Flowable<R> compose(FlowableTransformer<T, R> composer) { <del> return fromPublisher(composer.apply(this)); <add> public final <R> Flowable<R> compose(FlowableTransformer<? super T, ? extends R> composer) { <add> return fromPublisher(((FlowableTransformer<T, R>) composer).apply(this)); <ide> } <ide> <ide> /** <ide><path>src/main/java/io/reactivex/Maybe.java <ide> public final <U> Maybe<U> cast(final Class<? extends U> clazz) { <ide> * @return a Maybe, transformed by the transformer function <ide> * @see <a href="https://github.com/ReactiveX/RxJava/wiki/Implementing-Your-Own-Operators">RxJava wiki: Implementing Your Own Operators</a> <ide> */ <add> @SuppressWarnings("unchecked") <ide> @CheckReturnValue <ide> @SchedulerSupport(SchedulerSupport.NONE) <del> public final <R> Maybe<R> compose(MaybeTransformer<T, R> transformer) { <del> return wrap(transformer.apply(this)); <add> public final <R> Maybe<R> compose(MaybeTransformer<? super T, ? extends R> transformer) { <add> return wrap(((MaybeTransformer<T, R>) transformer).apply(this)); <ide> } <ide> <ide> /** <ide><path>src/main/java/io/reactivex/Observable.java <ide> public final <U> Single<U> collectInto(final U initialValue, BiConsumer<? super <ide> * @return the source ObservableSource, transformed by the transformer function <ide> * @see <a href="https://github.com/ReactiveX/RxJava/wiki/Implementing-Your-Own-Operators">RxJava wiki: Implementing Your Own Operators</a> <ide> */ <add> @SuppressWarnings("unchecked") <ide> @CheckReturnValue <ide> @SchedulerSupport(SchedulerSupport.NONE) <del> public final <R> Observable<R> compose(ObservableTransformer<T, R> composer) { <del> return wrap(composer.apply(this)); <add> public final <R> Observable<R> compose(ObservableTransformer<? super T, ? extends R> composer) { <add> return wrap(((ObservableTransformer<T, R>) composer).apply(this)); <ide> } <ide> <ide> /** <ide><path>src/main/java/io/reactivex/Single.java <ide> public final Single<T> hide() { <ide> * @return the source Single, transformed by the transformer function <ide> * @see <a href="https://github.com/ReactiveX/RxJava/wiki/Implementing-Your-Own-Operators">RxJava wiki: Implementing Your Own Operators</a> <ide> */ <add> @SuppressWarnings("unchecked") <ide> @CheckReturnValue <ide> @SchedulerSupport(SchedulerSupport.NONE) <del> public final <R> Single<R> compose(SingleTransformer<T, R> transformer) { <del> return wrap(transformer.apply(this)); <add> public final <R> Single<R> compose(SingleTransformer<? super T, ? extends R> transformer) { <add> return wrap(((SingleTransformer<T, R>) transformer).apply(this)); <ide> } <ide> <ide> /** <ide><path>src/test/java/io/reactivex/TransformerTest.java <ide> <ide> package io.reactivex; <ide> <del>import static org.junit.Assert.*; <del> <add>import io.reactivex.exceptions.TestException; <ide> import org.junit.Test; <ide> import org.reactivestreams.Publisher; <ide> <del>import io.reactivex.exceptions.TestException; <add>import static org.junit.Assert.*; <ide> <ide> public class TransformerTest { <ide> <ide> public Maybe<Integer> apply(Maybe<Integer> v) { <ide> } <ide> <ide> @Test <del> public void completabeTransformerThrows() { <add> public void completableTransformerThrows() { <ide> try { <ide> Completable.complete().compose(new CompletableTransformer() { <ide> @Override <ide> public Completable apply(Completable v) { <ide> assertEquals("Forced failure", ex.getMessage()); <ide> } <ide> } <add> <add> // Test demos for signature generics in compose() methods. Just needs to compile. <add> <add> @Test <add> public void observableGenericsSignatureTest() { <add> A<String, Integer> a = new A<String, Integer>() { }; <add> <add> Observable.just(a).compose(TransformerTest.<String>testObservableTransformerCreator()); <add> } <add> <add> @Test <add> public void singleGenericsSignatureTest() { <add> A<String, Integer> a = new A<String, Integer>() { }; <add> <add> Single.just(a).compose(TransformerTest.<String>testSingleTransformerCreator()); <add> } <add> <add> @Test <add> public void maybeGenericsSignatureTest() { <add> A<String, Integer> a = new A<String, Integer>() { }; <add> <add> Maybe.just(a).compose(TransformerTest.<String>testMaybeTransformerCreator()); <add> } <add> <add> @Test <add> public void flowableGenericsSignatureTest() { <add> A<String, Integer> a = new A<String, Integer>() { }; <add> <add> Flowable.just(a).compose(TransformerTest.<String>testFlowableTransformerCreator()); <add> } <add> <add> interface A<T, R> {} <add> interface B<T> {} <add> <add> private static <T> ObservableTransformer<A<T, ?>, B<T>> testObservableTransformerCreator() { <add> return new ObservableTransformer<A<T, ?>, B<T>>() { <add> @Override <add> public ObservableSource<B<T>> apply(Observable<A<T, ?>> a) { <add> return Observable.empty(); <add> } <add> }; <add> } <add> <add> private static <T> SingleTransformer<A<T, ?>, B<T>> testSingleTransformerCreator() { <add> return new SingleTransformer<A<T, ?>, B<T>>() { <add> @Override <add> public SingleSource<B<T>> apply(Single<A<T, ?>> a) { <add> return Single.never(); <add> } <add> }; <add> } <add> <add> private static <T> MaybeTransformer<A<T, ?>, B<T>> testMaybeTransformerCreator() { <add> return new MaybeTransformer<A<T, ?>, B<T>>() { <add> @Override <add> public MaybeSource<B<T>> apply(Maybe<A<T, ?>> a) { <add> return Maybe.empty(); <add> } <add> }; <add> } <add> <add> private static <T> FlowableTransformer<A<T, ?>, B<T>> testFlowableTransformerCreator() { <add> return new FlowableTransformer<A<T, ?>, B<T>>() { <add> @Override <add> public Publisher<B<T>> apply(Flowable<A<T, ?>> a) { <add> return Flowable.empty(); <add> } <add> }; <add> } <ide> }
5
Ruby
Ruby
introduce block form of mkdir
554d5a2670fbc75279dbc0a13b3c9dc94fbef7e6
<ide><path>Library/Homebrew/formula.rb <ide> def var; HOMEBREW_PREFIX+'var' end <ide> def plist_name; 'homebrew.mxcl.'+name end <ide> def plist_path; prefix+(plist_name+'.plist') end <ide> <add> # A version of mkdir that also changes to that folder in a block <add> def mkdir name, &block <add> FileUtils.mkdir name <add> if block_given? <add> FileUtils.chdir name do <add> yield <add> end <add> end <add> end <add> <ide> # Use the @spec_to_use to detect the download strategy. <ide> # Can be overriden to force a custom download strategy <ide> def download_strategy
1
Text
Text
add example links
bbaf908bb236e421a52b984048fc64cd725eaa2c
<ide><path>docs/upgrading/upgrading-your-ui-theme.md <ide> atom-text-editor::shadow .highlight.my-linter { <ide> } <ide> ``` <ide> <add>Check out the [find-and-replace][https://github.com/atom/find-and-replace/blob/master/stylesheets/find-and-replace.less#L10] package for another example of using `::shadow` to pierce the shadow DOM. <add> <ide> #### /deep/ <ide> <ide> The `/deep/` combinator overrides *all* shadow boundaries, making it useful for rules you want to apply globally such as scrollbar styling. Here's a snippet containing scrollbar styling for the Atom Dark UI theme before shadow DOM support: <ide> my-ui-theme/ <ide> index.atom-text-editor.less # loaded in the text editor shadow DOM <ide> ``` <ide> <add>Check out this [style sheet](https://github.com/atom/decoration-example/blob/master/stylesheets/decoration-example.atom-text-editor.less) from the decoration-example package for an example of context-targeting. <add> <ide> Inside a context-targeted style sheet, there's no need to use the `::shadow` or `/deep/` expressions. If you want to refer to the element containing the shadow root, you can use the `::host` pseudo-element. <ide> <del>During the transition phase, style sheets targeting the `atom-text-editor` context will *also* be loaded globally, but that will change once the option to disable the shadow DOM is removed. <add>During the transition phase, style sheets targeting the `atom-text-editor` context will *also* be loaded globally. Make sure you update your selectors in a way that maintains compatibility with the shadow DOM being disabled. That means if you use a `::host` pseudo element, you should also include the same style rule matches against `atom-text-editor`. <ide> <ide> [shadow-dom-101]: http://www.html5rocks.com/en/tutorials/webcomponents/shadowdom <ide> [shadow-dom-201]: http://www.html5rocks.com/en/tutorials/webcomponents/shadowdom-201/
1
Python
Python
fix full_pod_spec for k8spodoperator
221f809c1b4e4b78d5a437d012aa7daffd8410a4
<ide><path>airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py <ide> def create_pod_request_obj(self) -> k8s.V1Pod: <ide> if self.pod_template_file: <ide> self.log.debug("Pod template file found, will parse for base pod") <ide> pod_template = pod_generator.PodGenerator.deserialize_model_file(self.pod_template_file) <add> if self.full_pod_spec: <add> pod_template = PodGenerator.reconcile_pods(pod_template, self.full_pod_spec) <add> elif self.full_pod_spec: <add> pod_template = self.full_pod_spec <ide> else: <ide> pod_template = k8s.V1Pod(metadata=k8s.V1ObjectMeta(name="name")) <ide> <ide><path>kubernetes_tests/test_kubernetes_pod_operator.py <ide> def test_pod_template_file_with_overrides_system(self): <ide> self.assertEqual(k.pod.spec.containers[0].env, [k8s.V1EnvVar(name="env_name", value="value")]) <ide> self.assertDictEqual(result, {"hello": "world"}) <ide> <add> def test_pod_template_file_with_full_pod_spec(self): <add> fixture = sys.path[0] + '/tests/kubernetes/basic_pod.yaml' <add> pod_spec = k8s.V1Pod( <add> metadata=k8s.V1ObjectMeta( <add> labels={"foo": "bar", "fizz": "buzz"}, <add> ), <add> spec=k8s.V1PodSpec( <add> containers=[ <add> k8s.V1Container( <add> name="base", <add> env=[k8s.V1EnvVar(name="env_name", value="value")], <add> ) <add> ] <add> ), <add> ) <add> k = KubernetesPodOperator( <add> task_id="task" + self.get_current_task_name(), <add> in_cluster=False, <add> pod_template_file=fixture, <add> full_pod_spec=pod_spec, <add> do_xcom_push=True, <add> ) <add> <add> context = create_context(k) <add> result = k.execute(context) <add> self.assertIsNotNone(result) <add> self.assertEqual(k.pod.metadata.labels, {'fizz': 'buzz', 'foo': 'bar'}) <add> self.assertEqual(k.pod.spec.containers[0].env, [k8s.V1EnvVar(name="env_name", value="value")]) <add> self.assertDictEqual(result, {"hello": "world"}) <add> <add> def test_full_pod_spec(self): <add> pod_spec = k8s.V1Pod( <add> metadata=k8s.V1ObjectMeta( <add> labels={"foo": "bar", "fizz": "buzz"}, namespace="default", name="test-pod" <add> ), <add> spec=k8s.V1PodSpec( <add> containers=[ <add> k8s.V1Container( <add> name="base", <add> image="perl", <add> command=["/bin/bash"], <add> args=["-c", 'echo {\\"hello\\" : \\"world\\"} | cat > /airflow/xcom/return.json'], <add> env=[k8s.V1EnvVar(name="env_name", value="value")], <add> ) <add> ], <add> restart_policy="Never", <add> ), <add> ) <add> k = KubernetesPodOperator( <add> task_id="task" + self.get_current_task_name(), <add> in_cluster=False, <add> full_pod_spec=pod_spec, <add> do_xcom_push=True, <add> ) <add> <add> context = create_context(k) <add> result = k.execute(context) <add> self.assertIsNotNone(result) <add> self.assertEqual(k.pod.metadata.labels, {'fizz': 'buzz', 'foo': 'bar'}) <add> self.assertEqual(k.pod.spec.containers[0].env, [k8s.V1EnvVar(name="env_name", value="value")]) <add> self.assertDictEqual(result, {"hello": "world"}) <add> <ide> def test_init_container(self): <ide> # GIVEN <ide> volume_mounts = [
2
Text
Text
add security non-events
6f06e98f57fe4564003d5b2adbe4ef2fcfbe8df8
<ide><path>docs/security/non-events.md <add><!--[metadata]> <add>+++ <add>title = "Docker Security Non-events" <add>description = "Review of security vulnerabilities Docker mitigated" <add>keywords = ["Docker, Docker documentation, security, security non-events"] <add>[menu.main] <add>parent = "smn_secure_docker" <add>weight =-99 <add>+++ <add><![end-metadata]--> <add> <add># Docker Security Non-events <add> <add>This page lists security vulnerabilities which Docker mitigated, such that <add>processes run in Docker containers were never vulnerable to the bug—even before <add>it was fixed. This assumes containers are run without adding extra capabilities <add>or not run as `--privileged`. <add> <add>The list below is not even remotely complete. Rather, it is a sample of the few <add>bugs we've actually noticed to have attracted security review and publicly <add>disclosed vulnerabilities. In all likelihood, the bugs that haven't been <add>reported far outnumber those that have. Luckily, since Docker's approach to <add>secure by default through apparmor, seccomp, and dropping capabilities, it <add>likely mitigates unknown bugs just as well as it does known ones. <add> <add>Bugs mitigated: <add> <add>* [CVE-2013-1956](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2013-1956), <add>[1957](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2013-1957), <add>[1958](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2013-1958), <add>[1959](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2013-1959), <add>[1979](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2013-1979), <add>[CVE-2014-4014](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2014-4014), <add>[5206](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2014-5206), <add>[5207](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2014-5207), <add>[7970](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2014-7970), <add>[7975](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2014-7975), <add>[CVE-2015-2925](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2015-2925), <add>[8543](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2015-8543), <add>[CVE-2016-3134](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-3134), <add>[3135](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-3135), etc.: <add>The introduction of unprivileged user namespaces lead to a huge increase in the <add>attack surface available to unprivileged users by giving such users legitimate <add>access to previously root-only system calls like `mount()`. All of these CVEs <add>are examples of security vulnerabilities due to introduction of user namespaces. <add>Docker can use user namespaces to set up containers, but then disallows the <add>process inside the container from creating its own nested namespaces through the <add>default seccomp profile, rendering these vulnerabilities unexploitable. <add>* [CVE-2014-0181](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2014-0181), <add>[CVE-2015-3339](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2015-3339): <add>These are bugs that require the presence of a setuid binary. Docker disables <add>setuid binaries inside containers via the `NO_NEW_PRIVS` process flag and <add>other mechanisms. <add>* [CVE-2014-4699](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2014-4699): <add>A bug in `ptrace()` could allow privilege escalation. Docker disables `ptrace()` <add>inside the container using apparmor, seccomp and by dropping `CAP_PTRACE`. <add>Three times the layers of protection there! <add>* [CVE-2014-9529](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2014-9529): <add>A series of crafted `keyctl()` calls could cause kernel DoS / memory corruption. <add>Docker disables `keyctl()` inside containers using seccomp. <add>* [CVE-2015-3214](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2015-3214), <add>[4036](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2015-4036): These are <add>bugs in common virtualization drivers which could allow a guest OS user to <add>execute code on the host OS. Exploiting them requires access to virtualization <add>devices in the guest. Docker hides direct access to these devices when run <add>without `--privileged`. Interestingly, these seem to be cases where containers <add>are "more secure" than a VM, going against common wisdom that VMs are <add>"more secure" than containers. <add>* [CVE-2016-0728](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-0728): <add>Use-after-free caused by crafted `keyctl()` calls could lead to privilege <add>escalation. Docker disables `keyctl()` inside containers using the default <add>seccomp profile. <add>* [CVE-2016-2383](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2016-2383): <add>A bug in eBPF -- the special in-kernel DSL used to express things like seccomp <add>filters -- allowed arbitrary reads of kernel memory. The `bpf()` system call <add>is blocked inside Docker containers using (ironically) seccomp. <add> <add>Bugs *not* mitigated: <add> <add>* [CVE-2015-3290](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2015-3290), <add>[5157](https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2015-5157): Bugs in <add>the kernel's non-maskable interrupt handling allowed privilege escalation. <add>Can be exploited in Docker containers because the `modify_ldt()` system call is <add>not currently blocked using seccomp.
1
Javascript
Javascript
fix test-process-uptime.js test
3c0dd8196aeb2352a16ab41c296165747e8d52f3
<ide><path>test/pummel/test-process-uptime.js <add>// Copyright Joyent, Inc. and other Node contributors. <add>// <add>// Permission is hereby granted, free of charge, to any person obtaining a <add>// copy of this software and associated documentation files (the <add>// "Software"), to deal in the Software without restriction, including <add>// without limitation the rights to use, copy, modify, merge, publish, <add>// distribute, sublicense, and/or sell copies of the Software, and to permit <add>// persons to whom the Software is furnished to do so, subject to the <add>// following conditions: <add>// <add>// The above copyright notice and this permission notice shall be included <add>// in all copies or substantial portions of the Software. <add>// <add>// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS <add>// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF <add>// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN <add>// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, <add>// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR <add>// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE <add>// USE OR OTHER DEALINGS IN THE SOFTWARE. <add> <add>var assert = require('assert'); <add> <add>console.error(process.uptime()); <add>assert.ok(process.uptime() <= 0.9); <add> <add>setTimeout(function() { <add> var uptime = process.uptime(); <add> // some wiggle room to account for timer <add> // granularity, processor speed, and scheduling <add> assert.ok(uptime >= 2); <add> assert.ok(uptime <= 3); <add>}, 2000); <ide><path>test/simple/test-process-uptime.js <del>var assert = require('assert'); <del> <del>assert.equal(process.uptime(), 0); <del> <del>setTimeout(function() { <del> var uptime = process.uptime(); <del> // some wiggle room to account for timer <del> // granularity, processor speed, and scheduling <del> assert.ok(uptime >= 2); <del> assert.ok(uptime <= 3); <del>}, 2000);
2
Ruby
Ruby
remove an extra comment [ci skip]
718d3b0bc53bb5da4e5fc32d1a27f2119e6c747c
<ide><path>actionmailer/lib/action_mailer/base.rb <ide> def register_observer(observer) <ide> # Register an Interceptor which will be called before mail is sent. <ide> # Either a class, string or symbol can be passed in as the Interceptor. <ide> # If a string or symbol is passed in it will be camelized and constantized. <del> # it will be <tt>constantize</tt>d. <ide> def register_interceptor(interceptor) <ide> delivery_interceptor = case interceptor <ide> when String, Symbol
1
Python
Python
support custom ops for video ssl
465b85da09bc03135dea8957f335791d04ad4732
<ide><path>official/vision/beta/projects/video_ssl/ops/video_ssl_preprocess_ops.py <add># Lint as: python3 <add># Copyright 2021 The TensorFlow Authors. All Rights Reserved. <add># <add># Licensed under the Apache License, Version 2.0 (the "License"); <add># you may not use this file except in compliance with the License. <add># You may obtain a copy of the License at <add># <add># http://www.apache.org/licenses/LICENSE-2.0 <add># <add># Unless required by applicable law or agreed to in writing, software <add># distributed under the License is distributed on an "AS IS" BASIS, <add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add># See the License for the specific language governing permissions and <add># limitations under the License. <add># ============================================================================== <add>"""Utils for customed ops for video ssl.""" <add> <add>import functools <add>from typing import Optional <add>import tensorflow as tf <add> <add> <add>def random_apply(func, p, x): <add> """Randomly apply function func to x with probability p.""" <add> return tf.cond( <add> tf.less(tf.random.uniform([], minval=0, maxval=1, dtype=tf.float32), <add> tf.cast(p, tf.float32)), <add> lambda: func(x), <add> lambda: x) <add> <add> <add>def random_brightness(image, max_delta): <add> """Distort brightness of image (SimCLRv2 style).""" <add> factor = tf.random.uniform( <add> [], tf.maximum(1.0 - max_delta, 0), 1.0 + max_delta) <add> image = image * factor <add> return image <add> <add> <add>def to_grayscale(image, keep_channels=True): <add> """Turn the input image to gray scale. <add> <add> Args: <add> image: The input image tensor. <add> keep_channels: Whether maintaining the channel number for the image. <add> If true, the transformed image will repeat three times in channel. <add> If false, the transformed image will only have one channel. <add> <add> Returns: <add> The distorted image tensor. <add> """ <add> image = tf.image.rgb_to_grayscale(image) <add> if keep_channels: <add> image = tf.tile(image, [1, 1, 3]) <add> return image <add> <add> <add>def color_jitter(image, strength, random_order=True): <add> """Distorts the color of the image (SimCLRv2 style). <add> <add> Args: <add> image: The input image tensor. <add> strength: The floating number for the strength of the color augmentation. <add> random_order: A bool, specifying whether to randomize the jittering order. <add> <add> Returns: <add> The distorted image tensor. <add> """ <add> brightness = 0.8 * strength <add> contrast = 0.8 * strength <add> saturation = 0.8 * strength <add> hue = 0.2 * strength <add> if random_order: <add> return color_jitter_rand( <add> image, brightness, contrast, saturation, hue) <add> else: <add> return color_jitter_nonrand( <add> image, brightness, contrast, saturation, hue) <add> <add> <add>def color_jitter_nonrand(image, <add> brightness=0, <add> contrast=0, <add> saturation=0, <add> hue=0): <add> """Distorts the color of the image (jittering order is fixed, SimCLRv2 style). <add> <add> Args: <add> image: The input image tensor. <add> brightness: A float, specifying the brightness for color jitter. <add> contrast: A float, specifying the contrast for color jitter. <add> saturation: A float, specifying the saturation for color jitter. <add> hue: A float, specifying the hue for color jitter. <add> <add> Returns: <add> The distorted image tensor. <add> """ <add> with tf.name_scope('distort_color'): <add> def apply_transform(i, x, brightness, contrast, saturation, hue): <add> """Apply the i-th transformation.""" <add> if brightness != 0 and i == 0: <add> x = random_brightness(x, max_delta=brightness) <add> elif contrast != 0 and i == 1: <add> x = tf.image.random_contrast( <add> x, lower=1-contrast, upper=1+contrast) <add> elif saturation != 0 and i == 2: <add> x = tf.image.random_saturation( <add> x, lower=1-saturation, upper=1+saturation) <add> elif hue != 0: <add> x = tf.image.random_hue(x, max_delta=hue) <add> return x <add> <add> for i in range(4): <add> image = apply_transform(i, image, brightness, contrast, saturation, hue) <add> image = tf.clip_by_value(image, 0., 1.) <add> return image <add> <add> <add>def color_jitter_rand(image, <add> brightness=0, <add> contrast=0, <add> saturation=0, <add> hue=0): <add> """Distorts the color of the image (jittering order is random, SimCLRv2 style). <add> <add> Args: <add> image: The input image tensor. <add> brightness: A float, specifying the brightness for color jitter. <add> contrast: A float, specifying the contrast for color jitter. <add> saturation: A float, specifying the saturation for color jitter. <add> hue: A float, specifying the hue for color jitter. <add> <add> Returns: <add> The distorted image tensor. <add> """ <add> with tf.name_scope('distort_color'): <add> def apply_transform(i, x): <add> """Apply the i-th transformation.""" <add> def brightness_transform(): <add> if brightness == 0: <add> return x <add> else: <add> return random_brightness(x, max_delta=brightness) <add> def contrast_transform(): <add> if contrast == 0: <add> return x <add> else: <add> return tf.image.random_contrast(x, lower=1-contrast, upper=1+contrast) <add> def saturation_transform(): <add> if saturation == 0: <add> return x <add> else: <add> return tf.image.random_saturation( <add> x, lower=1-saturation, upper=1+saturation) <add> def hue_transform(): <add> if hue == 0: <add> return x <add> else: <add> return tf.image.random_hue(x, max_delta=hue) <add> # pylint:disable=g-long-lambda <add> x = tf.cond( <add> tf.less(i, 2), lambda: tf.cond( <add> tf.less(i, 1), brightness_transform, contrast_transform), <add> lambda: tf.cond(tf.less(i, 3), saturation_transform, hue_transform)) <add> # pylint:disable=g-long-lambda <add> return x <add> <add> perm = tf.random.shuffle(tf.range(4)) <add> for i in range(4): <add> image = apply_transform(perm[i], image) <add> image = tf.clip_by_value(image, 0., 1.) <add> return image <add> <add> <add>def random_color_jitter_3d(frames): <add> """Applies temporally consistent color jittering to one video clip. <add> <add> Args: <add> frames: `Tensor` of shape [num_frames, height, width, channels]. <add> <add> Returns: <add> A Tensor of shape [num_frames, height, width, channels] being color jittered <add> with the same operation. <add> """ <add> def random_color_jitter(image, p=1.0): <add> def _transform(image): <add> color_jitter_t = functools.partial( <add> color_jitter, strength=1.0) <add> image = random_apply(color_jitter_t, p=0.8, x=image) <add> return random_apply(to_grayscale, p=0.2, x=image) <add> return random_apply(_transform, p=p, x=image) <add> <add> num_frames, width, height, channels = tf.shape(frames) <add> big_image = tf.reshape(frames, [num_frames*width, height, channels]) <add> big_image = random_color_jitter(big_image) <add> return tf.reshape(big_image, [num_frames, width, height, channels]) <add> <add> <add>def gaussian_blur(image, kernel_size, sigma, padding='SAME'): <add> """Blurs the given image with separable convolution. <add> <add> Args: <add> image: Tensor of shape [height, width, channels] and dtype float to blur. <add> kernel_size: Integer Tensor for the size of the blur kernel. This is should <add> be an odd number. If it is an even number, the actual kernel size will be <add> size + 1. <add> sigma: Sigma value for gaussian operator. <add> padding: Padding to use for the convolution. Typically 'SAME' or 'VALID'. <add> <add> Returns: <add> A Tensor representing the blurred image. <add> """ <add> radius = tf.cast(kernel_size / 2, dtype=tf.int32) <add> kernel_size = radius * 2 + 1 <add> x = tf.cast(tf.range(-radius, radius + 1), dtype=tf.float32) <add> blur_filter = tf.exp( <add> -tf.pow(x, 2.0) / (2.0 * tf.pow(tf.cast(sigma, dtype=tf.float32), 2.0))) <add> blur_filter /= tf.reduce_sum(blur_filter) <add> # One vertical and one horizontal filter. <add> blur_v = tf.reshape(blur_filter, [kernel_size, 1, 1, 1]) <add> blur_h = tf.reshape(blur_filter, [1, kernel_size, 1, 1]) <add> num_channels = tf.shape(image)[-1] <add> blur_h = tf.tile(blur_h, [1, 1, num_channels, 1]) <add> blur_v = tf.tile(blur_v, [1, 1, num_channels, 1]) <add> expand_batch_dim = image.shape.ndims == 3 <add> if expand_batch_dim: <add> # Tensorflow requires batched input to convolutions, which we can fake with <add> # an extra dimension. <add> image = tf.expand_dims(image, axis=0) <add> blurred = tf.nn.depthwise_conv2d( <add> image, blur_h, strides=[1, 1, 1, 1], padding=padding) <add> blurred = tf.nn.depthwise_conv2d( <add> blurred, blur_v, strides=[1, 1, 1, 1], padding=padding) <add> if expand_batch_dim: <add> blurred = tf.squeeze(blurred, axis=0) <add> return blurred <add> <add> <add>def random_blur(image, height, width, p=1.0): <add> """Randomly blur an image. <add> <add> Args: <add> image: `Tensor` representing an image of arbitrary size. <add> height: Height of output image. <add> width: Width of output image. <add> p: probability of applying this transformation. <add> <add> Returns: <add> A preprocessed image `Tensor`. <add> """ <add> del width <add> def _transform(image): <add> sigma = tf.random.uniform([], 0.1, 2.0, dtype=tf.float32) <add> return gaussian_blur( <add> image, kernel_size=height//10, sigma=sigma, padding='SAME') <add> return random_apply(_transform, p=p, x=image) <add> <add> <add>def random_blur_3d(frames, height, width, blur_probability=0.5): <add> """Apply efficient batch data transformations. <add> <add> Args: <add> frames: `Tensor` of shape [timesteps, height, width, 3]. <add> height: the height of image. <add> width: the width of image. <add> blur_probability: the probaility to apply the blur operator. <add> <add> Returns: <add> Preprocessed feature list. <add> """ <add> def generate_selector(p, bsz): <add> shape = [bsz, 1, 1, 1] <add> selector = tf.cast( <add> tf.less(tf.random.uniform(shape, 0, 1, dtype=tf.float32), p), <add> tf.float32) <add> return selector <add> <add> frames_new = random_blur(frames, height, width, p=1.) <add> selector = generate_selector(blur_probability, 1) <add> frames = frames_new * selector + frames * (1 - selector) <add> frames = tf.clip_by_value(frames, 0., 1.) <add> <add> return frames <add> <add> <add>def _sample_or_pad_sequence_indices(sequence: tf.Tensor, <add> num_steps: int, <add> stride: int, <add> offset: tf.Tensor) -> tf.Tensor: <add> """Returns indices to take for sampling or padding sequences to fixed size.""" <add> sequence_length = tf.shape(sequence)[0] <add> sel_idx = tf.range(sequence_length) <add> <add> # Repeats sequence until num_steps are available in total. <add> max_length = num_steps * stride + offset <add> num_repeats = tf.math.floordiv( <add> max_length + sequence_length - 1, sequence_length) <add> sel_idx = tf.tile(sel_idx, [num_repeats]) <add> <add> steps = tf.range(offset, offset + num_steps * stride, stride) <add> return tf.gather(sel_idx, steps) <add> <add> <add>def sample_ssl_sequence(sequence: tf.Tensor, <add> num_steps: int, <add> random: bool, <add> stride: int = 1, <add> num_windows: Optional[int] = 2) -> tf.Tensor: <add> """Samples two segments of size num_steps randomly from a given sequence. <add> <add> Currently it only supports images, and specically designed for video self- <add> supervised learning. <add> <add> Args: <add> sequence: Any tensor where the first dimension is timesteps. <add> num_steps: Number of steps (e.g. frames) to take. <add> random: A boolean indicating whether to random sample the single window. If <add> True, the offset is randomized. Only True is supported. <add> stride: Distance to sample between timesteps. <add> num_windows: Number of sequence sampled. <add> <add> Returns: <add> A single Tensor with first dimension num_steps with the sampled segment. <add> """ <add> sequence_length = tf.shape(sequence)[0] <add> sequence_length = tf.cast(sequence_length, tf.float32) <add> if random: <add> max_offset = tf.cond( <add> tf.greater(sequence_length, (num_steps - 1) * stride), <add> lambda: sequence_length - (num_steps - 1) * stride, <add> lambda: sequence_length) <add> <add> max_offset = tf.cast(max_offset, dtype=tf.float32) <add> def cdf(k, power=1.0): <add> """Cumulative distribution function for x^power.""" <add> p = -tf.math.pow(k, power + 1) / ( <add> power * tf.math.pow(max_offset, power + 1)) + k * (power + 1) / ( <add> power * max_offset) <add> return p <add> <add> u = tf.random.uniform(()) <add> k_low = tf.constant(0, dtype=tf.float32) <add> k_up = max_offset <add> k = tf.math.floordiv(max_offset, 2.0) <add> <add> c = lambda k_low, k_up, k: tf.greater(tf.math.abs(k_up - k_low), 1.0) <add> # pylint:disable=g-long-lambda <add> b = lambda k_low, k_up, k: tf.cond( <add> tf.greater(cdf(k), u), <add> lambda: [k_low, k, tf.math.floordiv(k + k_low, 2.0)], <add> lambda: [k, k_up, tf.math.floordiv(k_up + k, 2.0)]) <add> <add> _, _, k = tf.while_loop(c, b, [k_low, k_up, k]) <add> delta = tf.cast(k, tf.int32) <add> max_offset = tf.cast(max_offset, tf.int32) <add> sequence_length = tf.cast(sequence_length, tf.int32) <add> <add> choice_1 = tf.cond( <add> tf.equal(max_offset, sequence_length), <add> lambda: tf.random.uniform((), <add> maxval=tf.cast(max_offset, dtype=tf.int32), <add> dtype=tf.int32), <add> lambda: tf.random.uniform((), <add> maxval=tf.cast(max_offset - delta, <add> dtype=tf.int32), <add> dtype=tf.int32)) <add> choice_2 = tf.cond( <add> tf.equal(max_offset, sequence_length), <add> lambda: tf.random.uniform((), <add> maxval=tf.cast(max_offset, dtype=tf.int32), <add> dtype=tf.int32), <add> lambda: choice_1 + delta) <add> # pylint:disable=g-long-lambda <add> shuffle_choice = tf.random.shuffle((choice_1, choice_2)) <add> offset_1 = shuffle_choice[0] <add> offset_2 = shuffle_choice[1] <add> <add> else: <add> raise NotImplementedError <add> <add> indices_1 = _sample_or_pad_sequence_indices( <add> sequence=sequence, <add> num_steps=num_steps, <add> stride=stride, <add> offset=offset_1) <add> <add> indices_2 = _sample_or_pad_sequence_indices( <add> sequence=sequence, <add> num_steps=num_steps, <add> stride=stride, <add> offset=offset_2) <add> <add> indices = tf.concat([indices_1, indices_2], axis=0) <add> indices.set_shape((num_windows * num_steps,)) <add> output = tf.gather(sequence, indices) <add> <add> return output <ide><path>official/vision/beta/projects/video_ssl/ops/video_ssl_preprocess_ops_test.py <add># Copyright 2021 The TensorFlow Authors. All Rights Reserved. <add># <add># Licensed under the Apache License, Version 2.0 (the "License"); <add># you may not use this file except in compliance with the License. <add># You may obtain a copy of the License at <add># <add># http://www.apache.org/licenses/LICENSE-2.0 <add># <add># Unless required by applicable law or agreed to in writing, software <add># distributed under the License is distributed on an "AS IS" BASIS, <add># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add># See the License for the specific language governing permissions and <add># limitations under the License. <add># ============================================================================== <add> <add>import tensorflow as tf <add>from official.vision.beta.ops import preprocess_ops_3d <add>from official.vision.beta.projects.video_ssl.ops import video_ssl_preprocess_ops <add> <add> <add>class VideoSslPreprocessOpsTest(tf.test.TestCase): <add> <add> def setUp(self): <add> super().setUp() <add> self._raw_frames = tf.random.uniform((250, 256, 256, 3), minval=0, <add> maxval=255, dtype=tf.dtypes.int32) <add> self._sampled_frames = self._raw_frames[:16] <add> self._frames = preprocess_ops_3d.normalize_image( <add> self._sampled_frames, False, tf.float32) <add> <add> def test_sample_ssl_sequence(self): <add> sampled_seq = video_ssl_preprocess_ops.sample_ssl_sequence( <add> self._raw_frames, 16, True, 2) <add> self.assertAllEqual(sampled_seq.shape, (32, 256, 256, 3)) <add> <add> def test_random_color_jitter_3d(self): <add> jittered_clip = video_ssl_preprocess_ops.random_color_jitter_3d( <add> self._frames) <add> self.assertAllEqual(jittered_clip.shape, (16, 256, 256, 3)) <add> <add> def test_random_blur_3d(self): <add> blurred_clip = video_ssl_preprocess_ops.random_blur_3d( <add> self._frames, 256, 256) <add> self.assertAllEqual(blurred_clip.shape, (16, 256, 256, 3)) <add> <add>if __name__ == '__main__': <add> tf.test.main()
2
Go
Go
remove unused var 'errtlsconfigunavailable'
1ea0be9dbca79efc239a49a802189f2b6d5ae7c0
<ide><path>client/transport.go <ide> package client <ide> <ide> import ( <ide> "crypto/tls" <del> "errors" <ide> "net/http" <ide> ) <ide> <del>var errTLSConfigUnavailable = errors.New("TLSConfig unavailable") <del> <ide> // transportFunc allows us to inject a mock transport for testing. We define it <ide> // here so we can detect the tlsconfig and return nil for only this type. <ide> type transportFunc func(*http.Request) (*http.Response, error)
1
Ruby
Ruby
generalize method for setting cpu flags
55d4f4874660c768d28dce18155da627a899a176
<ide><path>Library/Homebrew/extend/ENV.rb <ide> def set_cflags f <ide> end <ide> end <ide> <del> def set_cpu_cflags default, map = {} <add> # Sets architecture-specific flags for every environment variable <add> # given in the list `flags`. <add> def set_cpu_flags flags, default, map = {} <ide> cflags =~ %r{(-Xarch_i386 )-march=} <ide> xarch = $1.to_s <del> remove_from_cflags %r{(-Xarch_i386 )?-march=\S*} <del> remove_from_cflags %r{( -Xclang \S+)+} <del> remove_from_cflags %r{-mssse3} <del> remove_from_cflags %r{-msse4(\.\d)?} <del> append_to_cflags xarch unless xarch.empty? <add> remove flags, %r{(-Xarch_i386 )?-march=\S*} <add> remove flags, %r{( -Xclang \S+)+} <add> remove flags, %r{-mssse3} <add> remove flags, %r{-msse4(\.\d)?} <add> append flags, xarch unless xarch.empty? <ide> <ide> if ARGV.build_bottle? <del> append_to_cflags '-mtune=' + map.fetch(:bottle) if map.has_key? :bottle <add> append flags, '-mtune=' + map.fetch(:bottle) if map.has_key? :bottle <ide> else <ide> # Don't set -msse3 and older flags because -march does that for us <del> append_to_cflags '-march=' + map.fetch(Hardware.intel_family, default) <add> append flags, '-march=' + map.fetch(Hardware.intel_family, default) <ide> end <ide> <ide> # not really a 'CPU' cflag, but is only used with clang <del> remove_from_cflags '-Qunused-arguments' <add> remove flags, '-Qunused-arguments' <add> end <add> <add> def set_cpu_cflags default, map = {} <add> set_cpu_flags cc_flag_vars, default, map <ide> end <ide> <ide> # actually c-compiler, so cc would be a better name
1
Text
Text
remove rnpm references in favour of react-native
1f9b765f8159216ca7696a0d818acfef7d1e4aff
<ide><path>docs/LinkingLibraries.md <ide> error as soon as you try to use the library._ <ide> <ide> ### Automatic linking <ide> <del>"[rnpm](http://github.com/rnpm/rnpm)" is a community project that allows linking of native dependencies automatically: <del> <ide> #### Step 1 <ide> <del>Install `rnpm`: <del>```bash <del>$ npm install rnpm -g <del>``` <del> <del>**Note:** _`rnpm` requires `node` version 4.1 or higher_ <del> <del>#### Step 2 <del> <ide> Install a library with native dependencies: <ide> ```bash <ide> $ npm install <library-with-native-dependencies> --save <ide> ``` <ide> <del>**Note:** _`--save` or `--save-dev` flag is very important for this step. `rnpm` will link <add>**Note:** _`--save` or `--save-dev` flag is very important for this step. React Native will link <ide> your libs based on `dependencies` and `devDependencies` in your `package.json` file._ <ide> <del>#### Step 3 <add>#### Step 2 <ide> <ide> Link your native dependencies: <ide> ```bash <del>$ rnpm link <add>$ react-native link <ide> ``` <ide> <ide> Done! All libraries with a native dependencies should be successfully linked to your iOS/Android project.
1
Javascript
Javascript
protect the case where tail is empty
daf5d6034e6234e2917132ba3948fdbf8f998f2d
<ide><path>dist/Immutable.js <ide> function setVectorBounds(vector, begin, end) { <ide> node.array[(oldTailOffset >>> SHIFT) & MASK] = oldTail; <ide> } <ide> if (newSize < oldSize) { <del> newTail = newTail.removeAfter(owner, 0, newSize); <add> newTail = newTail && newTail.removeAfter(owner, 0, newSize); <ide> } <ide> if (newOrigin >= newTailOffset) { <ide> newOrigin -= newTailOffset; <ide> newSize -= newTailOffset; <ide> newLevel = SHIFT; <ide> newRoot = EMPTY_VNODE; <del> newTail = newTail.removeBefore(owner, 0, newOrigin); <add> newTail = newTail && newTail.removeBefore(owner, 0, newOrigin); <ide> } else if (newOrigin > oldOrigin || newTailOffset < oldTailOffset) { <ide> var beginIndex, <ide> endIndex; <ide><path>dist/Immutable.min.js <ide> */ <ide> function t(){function t(t,e,n,r){var i;if(r){var u=r.prototype;i=se.create(u)}else i=t.prototype;return se.keys(e).forEach(function(t){i[t]=e[t]}),se.keys(n).forEach(function(e){t[e]=n[e]}),i.constructor=t,t.prototype=i,t}function e(t,e,n,r){return se.getPrototypeOf(e)[n].apply(t,r)}function n(t,n,r){e(t,n,"constructor",r)}function r(){return Object.create(ce)}function i(t){var e=Object.create(_e);return e.__reversedIndices=t?t.__reversedIndices:!1,e}function u(t,e,n,r){var i=t.get?t.get(e[r],ye):ye;return i===ye?n:++r===e.length?i:u(i,e,n,r)}function s(t,e,n){return(0===t||null!=n&&-n>=t)&&(null==e||null!=n&&e>=n)}function a(t,e){return 0>t?Math.max(0,e+t):e?Math.min(e,t):t}function h(t,e){return null==t?e:0>t?Math.max(0,e+t):e?Math.min(e,t):t}function o(t){return t}function c(t,e){return[e,t]}function f(){return!0}function l(){return this}function _(t){return(t||0)+1}function g(t,e,n,r,i){var u=t.__makeSequence();return u.__iterateUncached=function(u,s,a){var h=0,o=t.__iterate(function(t,i,s){if(e.call(n,t,i,s)){if(u(t,r?i:h,s)===!1)return!1;h++}},s,a);return i?o:h},u}function v(t){return function(){return!t.apply(this,arguments)}}function p(t){return"string"==typeof t?JSON.stringify(t):t}function m(t,e){for(var n="";e;)1&e&&(n+=t),(e>>=1)&&(t+=t);return n}function d(t,e){return t>e?1:e>t?-1:0}function y(t){O(1/0!==t,"Cannot perform this action with an infinite sequence.")}function w(t){return t.value=!1,t}function I(t){t&&(t.value=!0)}function D(){}function b(t){for(var e=t.length,n=Array(e),r=0;e>r;r++)n[r]=t[r];return n}function k(t,e,n){var r=t._rootData.updateIn(t._keyPath,n?ke.empty():void 0,e),i=t._keyPath||[];return t._onChange&&t._onChange.call(void 0,r,t._rootData,n?i.concat(n):i),new De(r,t._keyPath,t._onChange)}function M(t,e){return t instanceof De&&(t=t.deref()),e instanceof De&&(e=e.deref()),t===e?0!==t||0!==e||1/t===1/e:t!==t?e!==e:t instanceof he?t.equals(e):!1}function O(t,e){if(!t)throw Error(e)}function S(t,e,n){var r=Object.create(Oe);return r.length=t,r._root=e,r.__ownerID=n,r.__altered=!1,r <ide> }function x(t,e,n){var r=w(we),i=w(Ie),u=A(t._root,t.__ownerID,0,V(e),e,n,r,i);if(!i.value)return t;var s=t.length+(r.value?n===ye?-1:1:0);return t.__ownerID?(t.length=s,t._root=u,t.__altered=!0,t):u?S(s,u):ke.empty()}function A(t,e,n,r,i,u,s,a){return t?t.update(e,n,r,i,u,s,a):u===ye?t:(I(a),I(s),new je(e,r,[i,u]))}function C(t){return t.constructor===je||t.constructor===Ee}function E(t,e,n,r,i){if(t.hash===r)return new Ee(e,r,[t.entry,i]);var u,s=t.hash>>>n&de,a=r>>>n&de,h=s===a?[E(t,e,n+pe,r,i)]:(u=new je(e,r,i),a>s?[t,u]:[u,t]);return new Se(e,1<<s|1<<a,h)}function q(t,e,n,r){for(var i=0,u=0,s=Array(n),a=0,h=1,o=e.length;o>a;a++,h<<=1){var c=e[a];null!=c&&a!==r&&(i|=h,s[u++]=c)}return new Se(t,i,s)}function j(t,e,n,r,i){for(var u=0,s=Array(me),a=0;0!==n;a++,n>>>=1)s[a]=1&n?e[u++]:null;return s[r]=i,new Ae(t,u+1,s)}function P(t,e,n){for(var r=[],i=0;n.length>i;i++){var u=n[i];u&&r.push(Array.isArray(u)?he(u).fromEntries():he(u))}return W(t,e,r)}function U(t){return function(e,n){return e&&e.mergeDeepWith?e.mergeDeepWith(t,n):t?t(e,n):n}}function W(t,e,n){return 0===n.length?t:t.withMutations(function(t){for(var r=e?function(n,r){var i=t.get(r,ye);t.set(r,i===ye?n:e(i,n))}:function(e,n){t.set(n,e)},i=0;n.length>i;i++)n[i].forEach(r)})}function R(t,e,n,r,i){var u=e[i],s=++i===e.length,a=s?n:ke.empty(),h=t.get?t.get(u,a):a,o=s?r(h):R(h,e,n,r,i);return O(!h||t.set,"updateIn with invalid keyPath"),o===h?t:t.set(u,o)}function J(t){return t-=t>>1&1431655765,t=(858993459&t)+(t>>2&858993459),t=t+(t>>4)&252645135,t+=t>>8,t+=t>>16,127&t}function z(t,e,n,r){var i=r?t:b(t);return i[e]=n,i}function B(t,e,n,r){var i=t.length+1;if(r&&e+1===i)return t[e]=n,t;for(var u=Array(i),s=0,a=0;i>a;a++)a===e?(u[a]=n,s=-1):u[a]=t[a+s];return u}function L(t,e,n){var r=t.length-1;if(n&&e===r)return t.pop(),t;for(var i=Array(r),u=0,s=0;r>s;s++)s===e&&(u=1),i[s]=t[s+u];return i}function V(t){if(!t)return 0;if(t===!0)return 1;var e=typeof t;if("number"===e){if((0|t)===t)return t&We;t=""+t,e="string"}if("string"===e)return t.length>Re?K(t):N(t); <del>if(t.hashCode&&"function"==typeof t.hashCode)return t.hashCode();throw Error("Unable to hash: "+t)}function K(t){var e=Be[t];return null==e&&(e=N(t),ze===Je&&(ze=0,Be={}),ze++,Be[t]=e),e}function N(t){for(var e=0,n=0;t.length>n;n++)e=31*e+t.charCodeAt(n)&We;return e}function F(t,e,n,r,i){return{array:t,level:e,offset:n,max:r,__prev:i}}function G(t,e,n,r,i,u){var s=Object.create(Fe);return s.length=e-t,s._origin=t,s._size=e,s._level=n,s._root=r,s._tail=i,s.__ownerID=u,s.__altered=!1,s}function H(t,e,n){if(e>=t.length)return n===ye?t:t.withMutations(function(t){X(t,0,e+1).set(e,n)});e=Z(e,t._origin);var r=t._tail,i=t._root,u=w(Ie);return e>=$(t._size)?r=Q(r,t.__ownerID,0,e,n,u):i=Q(i,t.__ownerID,t._level,e,n,u),u.value?t.__ownerID?(t._root=i,t._tail=r,t.__altered=!0,t):G(t._origin,t._size,t._level,i,r):t}function Q(t,e,n,r,i,u){var s=i===ye,a=r>>>n&de,h=t&&t.array.length>a&&t.array.hasOwnProperty(a);if(s&&!h)return t;if(n>0){var o=t&&t.array[a],c=Q(o,e,n-pe,r,i,u);return c===o?t:(f=t?t.ensureOwner(e):new Ge([],e),f.array[a]=c,f)}if(!s&&h&&t.array[a]===i)return t;I(u);var f=t?t.ensureOwner(e):new Ge([],e);return s?delete f.array[a]:f.array[a]=i,f}function T(t,e){if(e>=$(t._size))return t._tail;if(1<<t._level+pe>e){for(var n=t._root,r=t._level;n&&r>0;)n=n.array[e>>>r&de],r-=pe;return n}}function X(t,e,n){var r=t.__ownerID||new D,i=t._origin,u=t._size,s=i+e,a=null==n?u:0>n?u+n:i+n;if(s===i&&a===u)return t;if(s>=a)return t.clear();for(var h=t._level,o=t._root,c=0;0>s+c;)o=new Ge(o.array.length?[null,o]:[],r),h+=pe,c+=1<<h;c&&(s+=c,i+=c,a+=c,u+=c);for(var f=$(u),l=$(a);l>=1<<h+pe;)o=new Ge(o.array.length?[o]:[],r),h+=pe;var _=t._tail,g=f>l?T(t,a-1):l>f?new Ge([],r):_;if(l>f&&u>s&&_.array.length){o=o.ensureOwner(r);for(var v=o,p=h;p>pe;p-=pe){var m=f>>>p&de;v=v.array[m]=v.array[m]?v.array[m].ensureOwner(r):new Ge([],r)}v.array[f>>>pe&de]=_}if(u>a&&(g=g.removeAfter(r,0,a)),s>=l)s-=l,a-=l,h=pe,o=Xe,g=g.removeBefore(r,0,s);else if(s>i||f>l){var d,y;c=0;do d=s>>>h&de,y=l-1>>>h&de,d===y&&(d&&(c+=(1<<h)*d),h-=pe,o=o&&o.array[d]); <add>if(t.hashCode&&"function"==typeof t.hashCode)return t.hashCode();throw Error("Unable to hash: "+t)}function K(t){var e=Be[t];return null==e&&(e=N(t),ze===Je&&(ze=0,Be={}),ze++,Be[t]=e),e}function N(t){for(var e=0,n=0;t.length>n;n++)e=31*e+t.charCodeAt(n)&We;return e}function F(t,e,n,r,i){return{array:t,level:e,offset:n,max:r,__prev:i}}function G(t,e,n,r,i,u){var s=Object.create(Fe);return s.length=e-t,s._origin=t,s._size=e,s._level=n,s._root=r,s._tail=i,s.__ownerID=u,s.__altered=!1,s}function H(t,e,n){if(e>=t.length)return n===ye?t:t.withMutations(function(t){X(t,0,e+1).set(e,n)});e=Z(e,t._origin);var r=t._tail,i=t._root,u=w(Ie);return e>=$(t._size)?r=Q(r,t.__ownerID,0,e,n,u):i=Q(i,t.__ownerID,t._level,e,n,u),u.value?t.__ownerID?(t._root=i,t._tail=r,t.__altered=!0,t):G(t._origin,t._size,t._level,i,r):t}function Q(t,e,n,r,i,u){var s=i===ye,a=r>>>n&de,h=t&&t.array.length>a&&t.array.hasOwnProperty(a);if(s&&!h)return t;if(n>0){var o=t&&t.array[a],c=Q(o,e,n-pe,r,i,u);return c===o?t:(f=t?t.ensureOwner(e):new Ge([],e),f.array[a]=c,f)}if(!s&&h&&t.array[a]===i)return t;I(u);var f=t?t.ensureOwner(e):new Ge([],e);return s?delete f.array[a]:f.array[a]=i,f}function T(t,e){if(e>=$(t._size))return t._tail;if(1<<t._level+pe>e){for(var n=t._root,r=t._level;n&&r>0;)n=n.array[e>>>r&de],r-=pe;return n}}function X(t,e,n){var r=t.__ownerID||new D,i=t._origin,u=t._size,s=i+e,a=null==n?u:0>n?u+n:i+n;if(s===i&&a===u)return t;if(s>=a)return t.clear();for(var h=t._level,o=t._root,c=0;0>s+c;)o=new Ge(o.array.length?[null,o]:[],r),h+=pe,c+=1<<h;c&&(s+=c,i+=c,a+=c,u+=c);for(var f=$(u),l=$(a);l>=1<<h+pe;)o=new Ge(o.array.length?[o]:[],r),h+=pe;var _=t._tail,g=f>l?T(t,a-1):l>f?new Ge([],r):_;if(l>f&&u>s&&_.array.length){o=o.ensureOwner(r);for(var v=o,p=h;p>pe;p-=pe){var m=f>>>p&de;v=v.array[m]=v.array[m]?v.array[m].ensureOwner(r):new Ge([],r)}v.array[f>>>pe&de]=_}if(u>a&&(g=g&&g.removeAfter(r,0,a)),s>=l)s-=l,a-=l,h=pe,o=Xe,g=g&&g.removeBefore(r,0,s);else if(s>i||f>l){var d,y;c=0;do d=s>>>h&de,y=l-1>>>h&de,d===y&&(d&&(c+=(1<<h)*d),h-=pe,o=o&&o.array[d]); <ide> while(o&&d===y);o&&s>i&&(o=o.removeBefore(r,h,s-c)),o&&f>l&&(o=o.removeAfter(r,h,l-c)),c&&(s-=c,a-=c),o=o||Xe}return t.__ownerID?(t.length=a-s,t._origin=s,t._size=a,t._level=h,t._root=o,t._tail=g,t.__altered=!0,t):G(s,a,h,o,g)}function Y(t,e,n){for(var r=[],i=0;n.length>i;i++){var u=n[i];u&&r.push(he(u))}var s=Math.max.apply(null,r.map(function(t){return t.length||0}));return s>t.length&&(t=t.setLength(s)),W(t,e,r)}function Z(t,e){return O(t>=0,"Index out of bounds"),t+e}function $(t){return me>t?0:t-1>>>pe<<pe}function te(t,e){var n=Object.create($e);return n.length=t?t.length:0,n._map=t,n.__ownerID=e,n}function ee(t,e,n){var r=Object.create(en.prototype);return r.length=t?t.length:0,r._map=t,r._vector=e,r.__ownerID=n,r}function ne(t,e,n){var r=Object.create(Object.getPrototypeOf(t));return r._map=e,r.__ownerID=n,r}function re(t,e){return e?ie(e,t,"",{"":t}):ue(t)}function ie(t,e,n,r){return e&&(Array.isArray(e)||e.constructor===Object)?t.call(r,n,he(e).map(function(n,r){return ie(t,n,r,e)})):e}function ue(t){if(t){if(Array.isArray(t))return he(t).map(ue).toVector();if(t.constructor===Object)return he(t).map(ue).toMap()}return t}var se=Object,ae={};ae.createClass=t,ae.superCall=e,ae.defaultSuperCall=n;var he=function(t){return oe.from(1===arguments.length?t:Array.prototype.slice.call(arguments))},oe=he;ae.createClass(he,{toString:function(){return this.__toString("Seq {","}")},__toString:function(t,e){return 0===this.length?t+e:t+" "+this.map(this.__toStringMapper).join(", ")+" "+e},__toStringMapper:function(t,e){return e+": "+p(t)},toJS:function(){return this.map(function(t){return t instanceof oe?t.toJS():t}).__toJS()},toArray:function(){y(this.length);var t=Array(this.length||0);return this.values().forEach(function(e,n){t[n]=e}),t},toObject:function(){y(this.length);var t={};return this.forEach(function(e,n){t[n]=e}),t},toVector:function(){return y(this.length),Ke.from(this)},toMap:function(){return y(this.length),ke.from(this)},toOrderedMap:function(){return y(this.length),en.from(this)},toSet:function(){return y(this.length),Ye.from(this) <ide> },equals:function(t){if(this===t)return!0;if(!(t instanceof oe))return!1;if(null!=this.length&&null!=t.length){if(this.length!==t.length)return!1;if(0===this.length&&0===t.length)return!0}return this.__deepEquals(t)},__deepEquals:function(t){var e=this.cacheResult().entries().toArray(),n=0;return t.every(function(t,r){var i=e[n++];return M(r,i[0])&&M(t,i[1])})},join:function(t){t=t||",";var e="",n=!0;return this.forEach(function(r){n?(n=!1,e+=r):e+=t+r}),e},count:function(t,e){return t?this.filter(t,e).count():(null==this.length&&(this.length=this.forEach(f)),this.length)},countBy:function(t){var e=this;return en.empty().withMutations(function(n){e.forEach(function(e,r,i){n.update(t(e,r,i),_)})})},concat:function(){for(var t=[],e=0;arguments.length>e;e++)t[e]=arguments[e];var n=[this].concat(t.map(function(t){return oe(t)})),r=this.__makeSequence();return r.length=n.reduce(function(t,e){return null!=t&&null!=e.length?t+e.length:void 0},0),r.__iterateUncached=function(t,e){for(var r,i=0,u=n.length-1,s=0;u>=s&&!r;s++){var a=n[e?u-s:s];i+=a.__iterate(function(e,n,i){return t(e,n,i)===!1?(r=!0,!1):void 0},e)}return i},r},reverse:function(){var t=this,e=t.__makeSequence();return e.length=t.length,e.__iterateUncached=function(e,n){return t.__iterate(e,!n)},e.reverse=function(){return t},e},keys:function(){return this.flip().values()},values:function(){var t=this,e=i(t);return e.length=t.length,e.values=l,e.__iterateUncached=function(e,n,r){if(r&&null==this.length)return this.cacheResult().__iterate(e,n,r);var i,u=0;return r?(u=this.length-1,i=function(t,n,r){return e(t,u--,r)!==!1}):i=function(t,n,r){return e(t,u++,r)!==!1},t.__iterate(i,n),r?this.length:u},e},entries:function(){var t=this;if(t._cache)return oe(t._cache);var e=t.map(c).values();return e.fromEntries=function(){return t},e},forEach:function(t,e){return this.__iterate(e?t.bind(e):t)},reduce:function(t,e,n){var r=e;return this.forEach(function(e,i,u){r=t.call(n,r,e,i,u)}),r},reduceRight:function(t,e,n){return this.reverse(!0).reduce(t,e,n)},every:function(t,e){var n=!0; <ide> return this.forEach(function(r,i,u){return t.call(e,r,i,u)?void 0:(n=!1,!1)}),n},some:function(t,e){return!this.every(v(t),e)},first:function(){return this.find(f)},last:function(){return this.findLast(f)},rest:function(){return this.slice(1)},butLast:function(){return this.slice(0,-1)},has:function(t){return this.get(t,ye)!==ye},get:function(t,e){return this.find(function(e,n){return M(n,t)},null,e)},getIn:function(t,e){return t&&0!==t.length?u(this,t,e,0):this},contains:function(t){return this.find(function(e){return M(e,t)},null,ye)!==ye},find:function(t,e,n){var r=n;return this.forEach(function(n,i,u){return t.call(e,n,i,u)?(r=n,!1):void 0}),r},findKey:function(t,e){var n;return this.forEach(function(r,i,u){return t.call(e,r,i,u)?(n=i,!1):void 0}),n},findLast:function(t,e,n){return this.reverse(!0).find(t,e,n)},findLastKey:function(t,e){return this.reverse(!0).findKey(t,e)},flip:function(){var t=this,e=r();return e.length=t.length,e.flip=function(){return t},e.__iterateUncached=function(e,n){return t.__iterate(function(t,n,r){return e(n,t,r)!==!1},n)},e},map:function(t,e){var n=this,r=n.__makeSequence();return r.length=n.length,r.__iterateUncached=function(r,i){return n.__iterate(function(n,i,u){return r(t.call(e,n,i,u),i,u)!==!1},i)},r},mapKeys:function(t,e){var n=this,r=n.__makeSequence();return r.length=n.length,r.__iterateUncached=function(r,i){return n.__iterate(function(n,i,u){return r(n,t.call(e,i,n,u),u)!==!1},i)},r},filter:function(t,e){return g(this,t,e,!0,!1)},slice:function(t,e){if(s(t,e,this.length))return this;var n=a(t,this.length),r=h(e,this.length);if(n!==n||r!==r)return this.entries().slice(t,e).fromEntries();var i=0===n?this:this.skip(n);return null==r||r===this.length?i:i.take(r-n)},take:function(t){var e=0,n=this.takeWhile(function(){return e++<t});return n.length=this.length&&Math.min(this.length,t),n},takeLast:function(t,e){return this.reverse(e).take(t).reverse(e)},takeWhile:function(t,e){var n=this,r=n.__makeSequence();return r.__iterateUncached=function(r,i,u){if(i)return this.cacheResult().__iterate(r,i,u); <ide><path>src/Vector.js <ide> function setVectorBounds(vector, begin, end) { <ide> <ide> // If the size has been reduced, there's a chance the tail needs to be trimmed. <ide> if (newSize < oldSize) { <del> newTail = newTail.removeAfter(owner, 0, newSize); <add> newTail = newTail && newTail.removeAfter(owner, 0, newSize); <ide> } <ide> <ide> // If the new origin is within the tail, then we do not need a root. <ide> function setVectorBounds(vector, begin, end) { <ide> newSize -= newTailOffset; <ide> newLevel = SHIFT; <ide> newRoot = EMPTY_VNODE; <del> newTail = newTail.removeBefore(owner, 0, newOrigin); <add> newTail = newTail && newTail.removeBefore(owner, 0, newOrigin); <ide> <ide> // Otherwise, if the root has been trimmed, garbage collect. <ide> } else if (newOrigin > oldOrigin || newTailOffset < oldTailOffset) {
3
Javascript
Javascript
add benches for fs.stat & fs.statsync
450ee635846dc583c893f2e5ee80896ca02461e1
<ide><path>benchmark/fs/bench-stat.js <add>'use strict'; <add> <add>const common = require('../common'); <add>const fs = require('fs'); <add> <add>const bench = common.createBenchmark(main, { <add> n: [1e4], <add> kind: ['lstat', 'stat'] <add>}); <add> <add> <add>function main(conf) { <add> const n = conf.n >>> 0; <add> <add> bench.start(); <add> (function r(cntr, fn) { <add> if (cntr-- <= 0) <add> return bench.end(n); <add> fn(__filename, function() { <add> r(cntr, fn); <add> }); <add> }(n, fs[conf.kind])); <add>} <ide><path>benchmark/fs/bench-statSync.js <add>'use strict'; <add> <add>const common = require('../common'); <add>const fs = require('fs'); <add> <add>const bench = common.createBenchmark(main, { <add> n: [1e4], <add> kind: ['lstatSync', 'statSync'] <add>}); <add> <add> <add>function main(conf) { <add> const n = conf.n >>> 0; <add> const fn = fs[conf.kind]; <add> <add> bench.start(); <add> for (var i = 0; i < n; i++) { <add> fn(__filename); <add> } <add> bench.end(n); <add>}
2
Text
Text
fix misspelt keyword in stringstore example
322c5a3ac4da412582e684e8ec2355b68efe7b27
<ide><path>website/docs/api/stringstore.md <ide> Load state from a binary string. <ide> > #### Example <ide> > <ide> > ```python <del>> fron spacy.strings import StringStore <add>> from spacy.strings import StringStore <ide> > store_bytes = stringstore.to_bytes() <ide> > new_store = StringStore().from_bytes(store_bytes) <ide> > ```
1
Javascript
Javascript
remove domain specific code
fc96743454db30fbf9b4c4dc9fda0f260f8d35a6
<ide><path>lib/timers.js <ide> Timeout.prototype.unref = function() { <ide> this._handle.owner = this; <ide> this._handle[kOnTimeout] = unrefdHandle; <ide> this._handle.start(delay); <del> this._handle.domain = this.domain; <ide> this._handle.unref(); <ide> } <ide> return this; <ide><path>test/parallel/test-domain-timers.js <ide> timeoutd.on('error', common.mustCall(function(e) { <ide> assert.strictEqual(e.message, 'Timeout UNREFd', <ide> 'Domain should catch timer error'); <ide> clearTimeout(timeout); <del>})); <add>}, 2)); <ide> <add>let t; <ide> timeoutd.run(function() { <ide> setTimeout(function() { <ide> throw new Error('Timeout UNREFd'); <ide> }, 0).unref(); <add> <add> t = setTimeout(function() { <add> throw new Error('Timeout UNREFd'); <add> }, 0); <ide> }); <add>t.unref(); <ide> <ide> const immediated = domain.create(); <ide>
2
Java
Java
apply allowedoriginpatterns in sockjsservice
8130bf505fa69a00e0d566db36e543a9cf030683
<ide><path>spring-websocket/src/main/java/org/springframework/web/socket/sockjs/support/AbstractSockJsService.java <ide> import java.util.LinkedHashSet; <ide> import java.util.List; <ide> import java.util.Random; <del>import java.util.Set; <ide> import java.util.concurrent.TimeUnit; <ide> <ide> import javax.servlet.http.HttpServletRequest; <ide> public abstract class AbstractSockJsService implements SockJsService, CorsConfig <ide> <ide> private boolean suppressCors = false; <ide> <del> protected final Set<String> allowedOrigins = new LinkedHashSet<>(); <del> <del> protected final Set<String> allowedOriginPatterns = new LinkedHashSet<>(); <add> protected final CorsConfiguration corsConfiguration; <ide> <ide> private final SockJsRequestHandler infoHandler = new InfoHandler(); <ide> <ide> public abstract class AbstractSockJsService implements SockJsService, CorsConfig <ide> public AbstractSockJsService(TaskScheduler scheduler) { <ide> Assert.notNull(scheduler, "TaskScheduler must not be null"); <ide> this.taskScheduler = scheduler; <add> this.corsConfiguration = initCorsConfiguration(); <add> } <add> <add> private static CorsConfiguration initCorsConfiguration() { <add> CorsConfiguration config = new CorsConfiguration(); <add> config.addAllowedMethod("*"); <add> config.setAllowedOrigins(Collections.emptyList()); <add> config.setAllowedOriginPatterns(Collections.emptyList()); <add> config.setAllowCredentials(true); <add> config.setMaxAge(ONE_YEAR); <add> config.addAllowedHeader("*"); <add> return config; <ide> } <ide> <ide> <ide> public boolean shouldSuppressCors() { <ide> */ <ide> public void setAllowedOrigins(Collection<String> allowedOrigins) { <ide> Assert.notNull(allowedOrigins, "Allowed origins Collection must not be null"); <del> this.allowedOrigins.clear(); <del> this.allowedOrigins.addAll(allowedOrigins); <add> this.corsConfiguration.setAllowedOrigins(new ArrayList<>(allowedOrigins)); <ide> } <ide> <add> /** <add> * Return configure allowed {@code Origin} header values. <add> * @since 4.1.2 <add> * @see #setAllowedOrigins <add> */ <add> @SuppressWarnings("ConstantConditions") <add> public Collection<String> getAllowedOrigins() { <add> return this.corsConfiguration.getAllowedOrigins(); <add> } <ide> /** <ide> * A variant of {@link #setAllowedOrigins(Collection)} that accepts flexible <ide> * domain patterns, e.g. {@code "https://*.domain1.com"}. Furthermore it <ide> public void setAllowedOrigins(Collection<String> allowedOrigins) { <ide> */ <ide> public void setAllowedOriginPatterns(Collection<String> allowedOriginPatterns) { <ide> Assert.notNull(allowedOriginPatterns, "Allowed origin patterns Collection must not be null"); <del> this.allowedOriginPatterns.clear(); <del> this.allowedOriginPatterns.addAll(allowedOriginPatterns); <del> } <del> <del> /** <del> * Return configure allowed {@code Origin} header values. <del> * @since 4.1.2 <del> * @see #setAllowedOrigins <del> */ <del> public Collection<String> getAllowedOrigins() { <del> return Collections.unmodifiableSet(this.allowedOrigins); <add> this.corsConfiguration.setAllowedOriginPatterns(new ArrayList<>(allowedOriginPatterns)); <ide> } <ide> <ide> /** <del> * Return configure allowed {@code Origin} pattern header values. <add> * Return {@link #setAllowedOriginPatterns(Collection) configured} origin patterns. <ide> * @since 5.3.2 <ide> * @see #setAllowedOriginPatterns <ide> */ <add> @SuppressWarnings("ConstantConditions") <ide> public Collection<String> getAllowedOriginPatterns() { <del> return Collections.unmodifiableSet(this.allowedOriginPatterns); <add> return this.corsConfiguration.getAllowedOriginPatterns(); <ide> } <ide> <ide> <ide> else if (sockJsPath.equals("/info")) { <ide> } <ide> <ide> else if (sockJsPath.matches("/iframe[0-9-.a-z_]*.html")) { <del> if (!this.allowedOrigins.isEmpty() && !this.allowedOrigins.contains("*")) { <add> if (!getAllowedOrigins().isEmpty() && !getAllowedOrigins().contains("*") || <add> !getAllowedOriginPatterns().isEmpty()) { <ide> if (requestInfo != null) { <ide> logger.debug("Iframe support is disabled when an origin check is required. " + <ide> "Ignoring transport request: " + requestInfo); <ide> } <ide> response.setStatusCode(HttpStatus.NOT_FOUND); <ide> return; <ide> } <del> if (this.allowedOrigins.isEmpty()) { <add> if (getAllowedOrigins().isEmpty()) { <ide> response.getHeaders().add(XFRAME_OPTIONS_HEADER, "SAMEORIGIN"); <ide> } <ide> if (requestInfo != null) { <ide> protected boolean checkOrigin(ServerHttpRequest request, ServerHttpResponse resp <ide> return true; <ide> } <ide> <del> if (!WebUtils.isValidOrigin(request, this.allowedOrigins)) { <add> if (this.corsConfiguration.checkOrigin(request.getHeaders().getOrigin()) == null) { <ide> if (logger.isWarnEnabled()) { <ide> logger.warn("Origin header value '" + request.getHeaders().getOrigin() + "' not allowed."); <ide> } <ide> protected boolean checkOrigin(ServerHttpRequest request, ServerHttpResponse resp <ide> @Nullable <ide> public CorsConfiguration getCorsConfiguration(HttpServletRequest request) { <ide> if (!this.suppressCors && (request.getHeader(HttpHeaders.ORIGIN) != null)) { <del> CorsConfiguration config = new CorsConfiguration(); <del> config.setAllowedOrigins(new ArrayList<>(this.allowedOrigins)); <del> config.setAllowedOriginPatterns(new ArrayList<>(this.allowedOriginPatterns)); <del> config.addAllowedMethod("*"); <del> config.setAllowCredentials(true); <del> config.setMaxAge(ONE_YEAR); <del> config.addAllowedHeader("*"); <del> return config; <add> return this.corsConfiguration; <ide> } <ide> return null; <ide> } <ide><path>spring-websocket/src/main/java/org/springframework/web/socket/sockjs/transport/TransportHandlingSockJsService.java <ide> protected boolean validateRequest(String serverId, String sessionId, String tran <ide> return false; <ide> } <ide> <del> if (!this.allowedOrigins.contains("*")) { <add> if (!getAllowedOrigins().isEmpty() && !getAllowedOrigins().contains("*") || <add> !getAllowedOriginPatterns().isEmpty()) { <ide> TransportType transportType = TransportType.fromValue(transport); <ide> if (transportType == null || !transportType.supportsOrigin()) { <ide> if (logger.isWarnEnabled()) { <ide><path>spring-websocket/src/test/java/org/springframework/web/socket/sockjs/support/SockJsServiceTests.java <ide> public void handleInfoOptionsWithForbiddenOrigin() { <ide> @Test // SPR-12283 <ide> public void handleInfoOptionsWithOriginAndCorsHeadersDisabled() { <ide> this.servletRequest.addHeader(HttpHeaders.ORIGIN, "https://mydomain2.example"); <del> this.service.setAllowedOrigins(Collections.singletonList("*")); <add> this.service.setAllowedOriginPatterns(Collections.singletonList("*")); <ide> this.service.setSuppressCors(true); <ide> <ide> this.servletRequest.addHeader(HttpHeaders.ACCESS_CONTROL_REQUEST_HEADERS, "Last-Modified"); <ide> resetResponseAndHandleRequest("OPTIONS", "/echo/info", HttpStatus.NO_CONTENT); <ide> assertThat(this.service.getCorsConfiguration(this.servletRequest)).isNull(); <ide> <ide> this.service.setAllowedOrigins(Collections.singletonList("https://mydomain1.example")); <add> this.service.setAllowedOriginPatterns(Collections.emptyList()); <ide> resetResponseAndHandleRequest("OPTIONS", "/echo/info", HttpStatus.FORBIDDEN); <ide> assertThat(this.service.getCorsConfiguration(this.servletRequest)).isNull(); <ide> <ide> this.service.setAllowedOrigins(Arrays.asList("https://mydomain1.example", "https://mydomain2.example", "http://mydomain3.example")); <add> this.service.setAllowedOriginPatterns(Collections.emptyList()); <ide> resetResponseAndHandleRequest("OPTIONS", "/echo/info", HttpStatus.NO_CONTENT); <ide> assertThat(this.service.getCorsConfiguration(this.servletRequest)).isNull(); <ide> }
3
Python
Python
fix exception msg matching in tests
59c20848a53402bfcf70625390391054aabac760
<ide><path>numpy/lib/tests/test_loadtxt.py <ide> def test_manual_universal_newlines(self, newline): <ide> <ide> <ide> def test_delimiter_comment_collision_raises(): <del> with pytest.raises(TypeError, match="control characters.*are identical"): <add> with pytest.raises(TypeError, match=".*control characters.*incompatible"): <ide> np.loadtxt(StringIO("1, 2, 3"), delimiter=",", comments=",") <ide> <ide> <ide> def test_delimiter_quotechar_collision_raises(): <del> with pytest.raises(TypeError, match="control characters.*are identical"): <add> with pytest.raises(TypeError, match=".*control characters.*incompatible"): <ide> np.loadtxt(StringIO("1, 2, 3"), delimiter=",", quotechar=",") <ide> <ide> <ide> def test_comment_quotechar_collision_raises(): <del> with pytest.raises(TypeError, match="control characters.*are identical"): <add> with pytest.raises(TypeError, match=".*control characters.*incompatible"): <ide> np.loadtxt(StringIO("1 2 3"), comments="#", quotechar="#") <ide> <ide> <ide> def test_delimiter_and_multiple_comments_collision_raises(): <ide> ) <ide> ) <ide> def test_collision_with_default_delimiter_raises(ws): <del> with pytest.raises(TypeError, match="control characters.*are identical"): <add> with pytest.raises(TypeError, match=".*control characters.*incompatible"): <ide> np.loadtxt(StringIO(f"1{ws}2{ws}3\n4{ws}5{ws}6\n"), comments=ws) <del> with pytest.raises(TypeError, match="control characters.*are identical"): <add> with pytest.raises(TypeError, match=".*control characters.*incompatible"): <ide> np.loadtxt(StringIO(f"1{ws}2{ws}3\n4{ws}5{ws}6\n"), quotechar=ws) <ide> <ide>
1
PHP
PHP
add afterrender hook for widgets
403af312e049b763963affe2309d1f1c8473962f
<ide><path>src/View/Helper/FormHelper.php <ide> public function widget($name, array $data = []) { <ide> } <ide> unset($data['secure']); <ide> <del> return $widget->render($data); <add> $out = $widget->render($data); <add> if (method_exists($widget, 'afterRender')) { <add> $widget->afterRender($this->_View, $data); <add> } <add> return $out; <ide> } <ide> <ide> /**
1
Java
Java
adjust webflux behavior for @requestpart list<t>
ff9daa93775961ea3c6561c760d40f43103fdbba
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/result/method/annotation/RequestPartMethodArgumentResolver.java <ide> /* <del> * Copyright 2002-2019 the original author or authors. <add> * Copyright 2002-2020 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> <ide> package org.springframework.web.reactive.result.method.annotation; <ide> <add>import java.util.Collection; <ide> import java.util.Collections; <ide> import java.util.List; <ide> <ide> import org.springframework.http.server.reactive.ServerHttpRequestDecorator; <ide> import org.springframework.lang.Nullable; <ide> import org.springframework.util.CollectionUtils; <add>import org.springframework.util.StringUtils; <ide> import org.springframework.web.bind.annotation.RequestPart; <ide> import org.springframework.web.reactive.BindingContext; <ide> import org.springframework.web.server.ServerWebExchange; <ide> public Mono<Object> resolveArgument( <ide> <ide> RequestPart requestPart = parameter.getParameterAnnotation(RequestPart.class); <ide> boolean isRequired = (requestPart == null || requestPart.required()); <del> String name = getPartName(parameter, requestPart); <del> <del> Flux<Part> parts = exchange.getMultipartData() <del> .flatMapIterable(map -> { <del> List<Part> list = map.get(name); <del> if (CollectionUtils.isEmpty(list)) { <del> if (isRequired) { <del> throw getMissingPartException(name, parameter); <del> } <del> return Collections.emptyList(); <del> } <del> return list; <del> }); <add> Class<?> paramType = parameter.getParameterType(); <add> Flux<Part> partValues = getPartValues(parameter, requestPart, isRequired, exchange); <ide> <del> if (Part.class.isAssignableFrom(parameter.getParameterType())) { <del> return parts.next().cast(Object.class); <add> if (Part.class.isAssignableFrom(paramType)) { <add> return partValues.next().cast(Object.class); <ide> } <ide> <del> if (List.class.isAssignableFrom(parameter.getParameterType())) { <add> if (Collection.class.isAssignableFrom(paramType) || List.class.isAssignableFrom(paramType)) { <ide> MethodParameter elementType = parameter.nested(); <ide> if (Part.class.isAssignableFrom(elementType.getNestedParameterType())) { <del> return parts.collectList().cast(Object.class); <add> return partValues.collectList().cast(Object.class); <ide> } <ide> else { <del> return decodePartValues(parts, elementType, bindingContext, exchange, isRequired) <del> .collectList().cast(Object.class); <add> return partValues.next() <add> .flatMap(part -> decode(part, parameter, bindingContext, exchange, isRequired)) <add> .defaultIfEmpty(Collections.emptyList()); <ide> } <ide> } <ide> <del> ReactiveAdapter adapter = getAdapterRegistry().getAdapter(parameter.getParameterType()); <del> if (adapter != null) { <del> MethodParameter elementType = parameter.nested(); <del> return Mono.just(adapter.fromPublisher( <del> Part.class.isAssignableFrom(elementType.getNestedParameterType()) ? <del> parts : decodePartValues(parts, elementType, bindingContext, exchange, isRequired))); <add> ReactiveAdapter adapter = getAdapterRegistry().getAdapter(paramType); <add> if (adapter == null) { <add> return partValues.next().flatMap(part -> <add> decode(part, parameter, bindingContext, exchange, isRequired)); <ide> } <ide> <del> return decodePartValues(parts, parameter, bindingContext, exchange, isRequired) <del> .next().cast(Object.class); <del> } <del> <del> private String getPartName(MethodParameter methodParam, @Nullable RequestPart requestPart) { <del> String partName = (requestPart != null ? requestPart.name() : ""); <del> if (partName.isEmpty()) { <del> partName = methodParam.getParameterName(); <del> if (partName == null) { <del> throw new IllegalArgumentException("Request part name for argument type [" + <del> methodParam.getNestedParameterType().getName() + <del> "] not specified, and parameter name information not found in class file either."); <del> } <add> MethodParameter elementType = parameter.nested(); <add> if (Part.class.isAssignableFrom(elementType.getNestedParameterType())) { <add> return Mono.just(adapter.fromPublisher(partValues)); <ide> } <del> return partName; <add> <add> Flux<?> flux = partValues.flatMap(part -> decode(part, elementType, bindingContext, exchange, isRequired)); <add> return Mono.just(adapter.fromPublisher(flux)); <ide> } <ide> <del> private ServerWebInputException getMissingPartException(String name, MethodParameter param) { <del> String reason = "Required request part '" + name + "' is not present"; <del> return new ServerWebInputException(reason, param); <add> public Flux<Part> getPartValues( <add> MethodParameter parameter, @Nullable RequestPart requestPart, boolean isRequired, <add> ServerWebExchange exchange) { <add> <add> String name = getPartName(parameter, requestPart); <add> return exchange.getMultipartData() <add> .flatMapIterable(map -> { <add> List<Part> list = map.get(name); <add> if (CollectionUtils.isEmpty(list)) { <add> if (isRequired) { <add> String reason = "Required request part '" + name + "' is not present"; <add> throw new ServerWebInputException(reason, parameter); <add> } <add> return Collections.emptyList(); <add> } <add> return list; <add> }); <ide> } <ide> <add> private String getPartName(MethodParameter methodParam, @Nullable RequestPart requestPart) { <add> String name = null; <add> if (requestPart != null) { <add> name = requestPart.name(); <add> } <add> if (StringUtils.isEmpty(name)) { <add> name = methodParam.getParameterName(); <add> } <add> if (StringUtils.isEmpty(name)) { <add> throw new IllegalArgumentException("Request part name for argument type [" + <add> methodParam.getNestedParameterType().getName() + <add> "] not specified, and parameter name information not found in class file either."); <add> } <add> return name; <add> } <ide> <del> private Flux<?> decodePartValues(Flux<Part> parts, MethodParameter elementType, BindingContext bindingContext, <add> @SuppressWarnings("unchecked") <add> private <T> Mono<T> decode( <add> Part part, MethodParameter elementType, BindingContext bindingContext, <ide> ServerWebExchange exchange, boolean isRequired) { <ide> <del> return parts.flatMap(part -> { <del> ServerHttpRequest partRequest = new PartServerHttpRequest(exchange.getRequest(), part); <del> ServerWebExchange partExchange = exchange.mutate().request(partRequest).build(); <del> if (logger.isDebugEnabled()) { <del> logger.debug(exchange.getLogPrefix() + "Decoding part '" + part.name() + "'"); <del> } <del> return readBody(elementType, isRequired, bindingContext, partExchange); <del> }); <add> ServerHttpRequest partRequest = new PartServerHttpRequest(exchange.getRequest(), part); <add> ServerWebExchange partExchange = exchange.mutate().request(partRequest).build(); <add> if (logger.isDebugEnabled()) { <add> logger.debug(exchange.getLogPrefix() + "Decoding part '" + part.name() + "'"); <add> } <add> return (Mono<T>) readBody(elementType, isRequired, bindingContext, partExchange); <ide> } <ide> <ide> <ide><path>spring-webflux/src/test/java/org/springframework/web/reactive/result/method/annotation/RequestPartMethodArgumentResolverTests.java <ide> package org.springframework.web.reactive.result.method.annotation; <ide> <ide> import java.time.Duration; <add>import java.util.Arrays; <ide> import java.util.Collections; <ide> import java.util.List; <ide> <ide> * @author Rossen Stoyanchev <ide> * @author Ilya Lukyanovich <ide> */ <del>public class RequestPartMethodArgumentResolverTests { <add>class RequestPartMethodArgumentResolverTests { <ide> <ide> private RequestPartMethodArgumentResolver resolver; <ide> <del> private ResolvableMethod testMethod = ResolvableMethod.on(getClass()).named("handle").build(); <add> private final ResolvableMethod testMethod = ResolvableMethod.on(getClass()).named("handle").build(); <ide> <ide> private MultipartHttpMessageWriter writer; <ide> <ide> <ide> @BeforeEach <del> public void setup() throws Exception { <add> void setup() { <ide> List<HttpMessageReader<?>> readers = ServerCodecConfigurer.create().getReaders(); <ide> ReactiveAdapterRegistry registry = ReactiveAdapterRegistry.getSharedInstance(); <ide> this.resolver = new RequestPartMethodArgumentResolver(readers, registry); <ide> public void setup() throws Exception { <ide> <ide> <ide> @Test <del> public void supportsParameter() { <add> void supportsParameter() { <ide> <ide> MethodParameter param; <ide> <ide> public void supportsParameter() { <ide> <ide> <ide> @Test <del> public void person() { <add> void person() { <ide> MethodParameter param = this.testMethod.annot(requestPart()).arg(Person.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> bodyBuilder.part("name", new Person("Jones")); <ide> public void person() { <ide> } <ide> <ide> @Test <del> public void listPerson() { <add> void listPerson() { <ide> MethodParameter param = this.testMethod.annot(requestPart()).arg(List.class, Person.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <del> bodyBuilder.part("name", new Person("Jones")); <del> bodyBuilder.part("name", new Person("James")); <add> bodyBuilder.part("name", Arrays.asList(new Person("Jones"), new Person("James"))); <ide> List<Person> actual = resolveArgument(param, bodyBuilder); <ide> <ide> assertThat(actual.get(0).getName()).isEqualTo("Jones"); <ide> assertThat(actual.get(1).getName()).isEqualTo("James"); <ide> } <ide> <ide> @Test // gh-23060 <del> public void listPersonNotRequired() { <add> void listPersonNotRequired() { <ide> MethodParameter param = this.testMethod.annot(requestPart().notRequired()).arg(List.class, Person.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> List<Person> actual = resolveArgument(param, bodyBuilder); <ide> public void listPersonNotRequired() { <ide> } <ide> <ide> @Test <del> public void monoPerson() { <add> void monoPerson() { <ide> MethodParameter param = this.testMethod.annot(requestPart()).arg(Mono.class, Person.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> bodyBuilder.part("name", new Person("Jones")); <ide> public void monoPerson() { <ide> } <ide> <ide> @Test // gh-23060 <del> public void monoPersonNotRequired() { <add> void monoPersonNotRequired() { <ide> MethodParameter param = this.testMethod.annot(requestPart().notRequired()).arg(Mono.class, Person.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> Mono<Person> actual = resolveArgument(param, bodyBuilder); <ide> public void monoPersonNotRequired() { <ide> } <ide> <ide> @Test <del> public void fluxPerson() { <add> void fluxPerson() { <ide> MethodParameter param = this.testMethod.annot(requestPart()).arg(Flux.class, Person.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> bodyBuilder.part("name", new Person("Jones")); <ide> public void fluxPerson() { <ide> } <ide> <ide> @Test // gh-23060 <del> public void fluxPersonNotRequired() { <add> void fluxPersonNotRequired() { <ide> MethodParameter param = this.testMethod.annot(requestPart().notRequired()).arg(Flux.class, Person.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> Flux<Person> actual = resolveArgument(param, bodyBuilder); <ide> public void fluxPersonNotRequired() { <ide> } <ide> <ide> @Test <del> public void part() { <add> void part() { <ide> MethodParameter param = this.testMethod.annot(requestPart()).arg(Part.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> bodyBuilder.part("name", new Person("Jones")); <ide> public void part() { <ide> } <ide> <ide> @Test <del> public void listPart() { <add> void listPart() { <ide> MethodParameter param = this.testMethod.annot(requestPart()).arg(List.class, Part.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> bodyBuilder.part("name", new Person("Jones")); <ide> public void listPart() { <ide> } <ide> <ide> @Test // gh-23060 <del> public void listPartNotRequired() { <add> void listPartNotRequired() { <ide> MethodParameter param = this.testMethod.annot(requestPart().notRequired()).arg(List.class, Part.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> List<Part> actual = resolveArgument(param, bodyBuilder); <ide> public void listPartNotRequired() { <ide> } <ide> <ide> @Test <del> public void monoPart() { <add> void monoPart() { <ide> MethodParameter param = this.testMethod.annot(requestPart()).arg(Mono.class, Part.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> bodyBuilder.part("name", new Person("Jones")); <ide> public void monoPart() { <ide> } <ide> <ide> @Test // gh-23060 <del> public void monoPartNotRequired() { <add> void monoPartNotRequired() { <ide> MethodParameter param = this.testMethod.annot(requestPart().notRequired()).arg(Mono.class, Part.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> Mono<Part> actual = resolveArgument(param, bodyBuilder); <ide> public void monoPartNotRequired() { <ide> } <ide> <ide> @Test <del> public void fluxPart() { <add> void fluxPart() { <ide> MethodParameter param = this.testMethod.annot(requestPart()).arg(Flux.class, Part.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> bodyBuilder.part("name", new Person("Jones")); <ide> public void fluxPart() { <ide> } <ide> <ide> @Test // gh-23060 <del> public void fluxPartNotRequired() { <add> void fluxPartNotRequired() { <ide> MethodParameter param = this.testMethod.annot(requestPart().notRequired()).arg(Flux.class, Part.class); <ide> MultipartBodyBuilder bodyBuilder = new MultipartBodyBuilder(); <ide> Flux<Part> actual = resolveArgument(param, bodyBuilder); <ide> public void fluxPartNotRequired() { <ide> } <ide> <ide> @Test <del> public void personRequired() { <add> void personRequired() { <ide> MethodParameter param = this.testMethod.annot(requestPart()).arg(Person.class); <ide> ServerWebExchange exchange = createExchange(new MultipartBodyBuilder()); <ide> Mono<Object> result = this.resolver.resolveArgument(param, new BindingContext(), exchange); <ide> public void personRequired() { <ide> } <ide> <ide> @Test <del> public void personNotRequired() { <add> void personNotRequired() { <ide> MethodParameter param = this.testMethod.annot(requestPart().notRequired()).arg(Person.class); <ide> ServerWebExchange exchange = createExchange(new MultipartBodyBuilder()); <ide> Mono<Object> result = this.resolver.resolveArgument(param, new BindingContext(), exchange); <ide> public void personNotRequired() { <ide> } <ide> <ide> @Test <del> public void partRequired() { <add> void partRequired() { <ide> MethodParameter param = this.testMethod.annot(requestPart()).arg(Part.class); <ide> ServerWebExchange exchange = createExchange(new MultipartBodyBuilder()); <ide> Mono<Object> result = this.resolver.resolveArgument(param, new BindingContext(), exchange); <ide> public void partRequired() { <ide> } <ide> <ide> @Test <del> public void partNotRequired() { <add> void partNotRequired() { <ide> MethodParameter param = this.testMethod.annot(requestPart().notRequired()).arg(Part.class); <ide> ServerWebExchange exchange = createExchange(new MultipartBodyBuilder()); <ide> Mono<Object> result = this.resolver.resolveArgument(param, new BindingContext(), exchange); <ide> private ServerWebExchange createExchange(MultipartBodyBuilder builder) { <ide> this.writer.write(Mono.just(builder.build()), forClass(MultiValueMap.class), <ide> MediaType.MULTIPART_FORM_DATA, clientRequest, Collections.emptyMap()).block(); <ide> <del> MockServerHttpRequest serverRequest = MockServerHttpRequest.post("/") <del> .contentType(clientRequest.getHeaders().getContentType()) <del> .body(clientRequest.getBody()); <add> MediaType contentType = clientRequest.getHeaders().getContentType(); <add> Flux<DataBuffer> body = clientRequest.getBody(); <add> MockServerHttpRequest serverRequest = MockServerHttpRequest.post("/").contentType(contentType).body(body); <ide> <ide> return MockServerWebExchange.from(serverRequest); <ide> } <ide> <ide> private String partToUtf8String(Part part) { <del> DataBuffer buffer = DataBufferUtils.join(part.content()).block(); <del> return buffer.toString(UTF_8); <add> return DataBufferUtils.join(part.content()).block().toString(UTF_8); <ide> } <ide> <ide> <ide> void handle( <ide> <ide> private static class Person { <ide> <del> private String name; <add> private final String name; <ide> <ide> @JsonCreator <ide> public Person(@JsonProperty("name") String name) {
2
Java
Java
add missing space in aopconfigexception message
e7d489667c06b76a8fdb248259eba6f1b7f31062
<ide><path>spring-aop/src/main/java/org/springframework/aop/framework/ProxyFactoryBean.java <ide> private Advisor namedBeanToAdvisor(Object next) { <ide> // We expected this to be an Advisor or Advice, <ide> // but it wasn't. This is a configuration error. <ide> throw new AopConfigException("Unknown advisor type " + next.getClass() + <del> "; Can only include Advisor or Advice type beans in interceptorNames chain except for last entry," + <add> "; Can only include Advisor or Advice type beans in interceptorNames chain except for last entry, " + <ide> "which may also be target or TargetSource", ex); <ide> } <ide> }
1
Go
Go
keep old network ids
e017717d96540dd263d95f90fdb2457928909924
<ide><path>daemon/daemon_windows.go <ide> func (daemon *Daemon) initNetworkController(config *config.Config, activeSandbox <ide> controller.WalkNetworks(s) <ide> <ide> drvOptions := make(map[string]string) <del> <add> nid := "" <ide> if n != nil { <add> nid = n.ID() <add> <ide> // global networks should not be deleted by local HNS <ide> if n.Info().Scope() == datastore.GlobalScope { <ide> continue <ide> func (daemon *Daemon) initNetworkController(config *config.Config, activeSandbox <ide> } <ide> <ide> v6Conf := []*libnetwork.IpamConf{} <del> _, err := controller.NewNetwork(strings.ToLower(v.Type), name, "", <add> _, err := controller.NewNetwork(strings.ToLower(v.Type), name, nid, <ide> libnetwork.NetworkOptionGeneric(options.Generic{ <ide> netlabel.GenericData: netOption, <ide> }),
1
Javascript
Javascript
fix import order
d9d2983225f388cba4cb88b462dd81ee45fce66e
<ide><path>lib/WebpackOptionsDefaulter.js <ide> "use strict"; <ide> <ide> const findCacheDir = require("find-cache-dir"); <del>const path = require("path"); <ide> const os = require("os"); <add>const path = require("path"); <ide> const OptionsDefaulter = require("./OptionsDefaulter"); <ide> const Template = require("./Template"); <ide>
1
Text
Text
add the docstring style guide
2e88fa83600ac690dc38fb1d9b1ebf329e3da373
<ide><path>CONTRIBUTING.md <ide> need to follow the output of the command to resolve them manually. <ide> If you do not want to auto format the code but only show the lint errors, you <ide> can run `sh shell/lint.sh` **at the root directory of the repo**. <ide> <add>### Docstrings <add> <add>We do not have an automated way to check docstring style, so if you write <add>or edit any docstrings, please make sure to check them manually. <add>Keras docstrings follow the conventions below: <add> <add>A **class docstring** may contain the following items: <add> <add>* One-line description of the class. <add>* Paragraph(s) of more detailed information. <add>* Optional `Examples` section. <add>* `Args` section for arguments in `__init__()`. <add>* If it's a layer: <add> * `Call arguments` section for arguments in `Layer.call()`. <add> * `Returns` section for the return values of `Layer.call()`. <add> * Optional `Raises` section for possible errors. <add> <add>You can check out `MultiHeadAttention` as an example <add>[(link)](https://github.com/keras-team/keras/blob/v2.10.0/keras/layers/attention/multi_head_attention.py#L130). <add> <add>A **function docstring** may contain the following items: <add> <add>* One-line description of the function. <add>* Paragraph(s) of more detailed information. <add>* Optional `Examples` section. <add>* `Args` section for the function arguments. <add>* `Returns` section for the return values. <add>* Optional `Raises` section for possible errors. <add> <add>You can check out `text_dataset_from_directory` as an example <add>[(link)](https://github.com/keras-team/keras/blob/v2.10.0/keras/utils/text_dataset.py#L26). <add> <add> <ide> ## Run tests <ide> <ide> We use [Bazel](https://bazel.build/) to build and run the tests.
1
Text
Text
impove sentence structure. closes
1a7f622edf1dbf2ba40ebe748756e303913d3044
<ide><path>README.md <ide> This is how we use the donations: <ide> <ide> <h2 align="center">Other Backers and Sponsors</h2> <ide> <del>We had other sources of donations before starting to use OpenCollective (or people that can't donate via OpenCollective). We want to acknowledge these sponsors and backers, but donations were not public and we are not sure of donors want to stay anonymous. So if you want to be in this list, just send a PR. <add>Before we started using OpenCollective, donations were made anonymously. Now that we have made the switch, we would like to acknowledge these sponsors (and the ones who continue to donate using OpenCollective). If we've missed someone, please send us a PR, and we'll add you to this list. <ide> <ide> [Google Angular Team](https://angular.io/), [Architects.io](http://architects.io/), <ide> <a href="https://moonmail.io" target="_blank" title="Email Marketing Software"><img
1
Javascript
Javascript
fix typo in lib/buffer.js
4489a48dff3f44ee570a9dde6bb2428bd810b03d
<ide><path>lib/buffer.js <ide> Buffer.prototype.compare = function compare(target, <ide> // - buffer - a Buffer to search <ide> // - val - a string, Buffer, or number <ide> // - byteOffset - an index into `buffer`; will be clamped to an int32 <del>// - encoding - an optional encoding, relevant is val is a string <add>// - encoding - an optional encoding, relevant if val is a string <ide> // - dir - true for indexOf, false for lastIndexOf <ide> function bidirectionalIndexOf(buffer, val, byteOffset, encoding, dir) { <ide> if (typeof byteOffset === 'string') {
1
Python
Python
remove mutable state from axisconcatenator
37d756c46424b2da04a87e7df45a6c64f9b50117
<ide><path>numpy/lib/index_tricks.py <ide> from numpy.core.numerictypes import find_common_type, issubdtype <ide> <ide> from . import function_base <del>import numpy.matrixlib as matrix <add>import numpy.matrixlib as matrixlib <ide> from .function_base import diff <ide> from numpy.core.multiarray import ravel_multi_index, unravel_index <ide> from numpy.lib.stride_tricks import as_strided <ide> <del>makemat = matrix.matrix <add>makemat = matrixlib.matrix <ide> <ide> <ide> __all__ = [ <ide> class AxisConcatenator(object): <ide> Translates slice objects to concatenation along an axis. <ide> <ide> For detailed documentation on usage, see `r_`. <del> <ide> """ <ide> # allow ma.mr_ to override this <ide> concatenate = staticmethod(_nx.concatenate) <del> <del> def _retval(self, res): <del> if self.matrix: <del> oldndim = res.ndim <del> res = makemat(res) <del> if oldndim == 1 and self.col: <del> res = res.T <del> self.axis = self._axis <del> self.matrix = self._matrix <del> self.col = 0 <del> return res <del> <ide> def __init__(self, axis=0, matrix=False, ndmin=1, trans1d=-1): <del> self._axis = axis <del> self._matrix = matrix <ide> self.axis = axis <ide> self.matrix = matrix <del> self.col = 0 <ide> self.trans1d = trans1d <ide> self.ndmin = ndmin <ide> <ide> def __getitem__(self, key): <del> trans1d = self.trans1d <del> ndmin = self.ndmin <add> # handle matrix builder syntax <ide> if isinstance(key, str): <ide> frame = sys._getframe().f_back <del> mymat = matrix.bmat(key, frame.f_globals, frame.f_locals) <add> mymat = matrixlib.bmat(key, frame.f_globals, frame.f_locals) <ide> return mymat <add> <ide> if not isinstance(key, tuple): <ide> key = (key,) <add> <add> # copy attributes, since they can be overriden in the first argument <add> trans1d = self.trans1d <add> ndmin = self.ndmin <add> matrix = self.matrix <add> axis = self.axis <add> <ide> objs = [] <ide> scalars = [] <ide> arraytypes = [] <ide> scalartypes = [] <add> <ide> for k in range(len(key)): <ide> scalar = False <ide> if isinstance(key[k], slice): <ide> def __getitem__(self, key): <ide> "first entry.") <ide> key0 = key[0] <ide> if key0 in 'rc': <del> self.matrix = True <del> self.col = (key0 == 'c') <add> matrix = True <add> col = (key0 == 'c') <ide> continue <ide> if ',' in key0: <ide> vec = key0.split(',') <ide> try: <del> self.axis, ndmin = \ <del> [int(x) for x in vec[:2]] <add> axis, ndmin = [int(x) for x in vec[:2]] <ide> if len(vec) == 3: <ide> trans1d = int(vec[2]) <ide> continue <ide> except: <ide> raise ValueError("unknown special directive") <ide> try: <del> self.axis = int(key[k]) <add> axis = int(key[k]) <ide> continue <ide> except (ValueError, TypeError): <ide> raise ValueError("unknown special directive") <ide> def __getitem__(self, key): <ide> if not scalar and isinstance(newobj, _nx.ndarray): <ide> arraytypes.append(newobj.dtype) <ide> <del> # Esure that scalars won't up-cast unless warranted <add> # Ensure that scalars won't up-cast unless warranted <ide> final_dtype = find_common_type(arraytypes, scalartypes) <ide> if final_dtype is not None: <ide> for k in scalars: <ide> objs[k] = objs[k].astype(final_dtype) <ide> <del> res = self.concatenate(tuple(objs), axis=self.axis) <del> return self._retval(res) <add> res = self.concatenate(tuple(objs), axis=axis) <add> <add> if matrix: <add> oldndim = res.ndim <add> res = makemat(res) <add> if oldndim == 1 and col: <add> res = res.T <add> return res <ide> <ide> def __len__(self): <ide> return 0 <ide><path>numpy/lib/tests/test_index_tricks.py <ide> def test_2d(self): <ide> assert_array_equal(d[:5, :], b) <ide> assert_array_equal(d[5:, :], c) <ide> <add> def test_matrix_builder(self): <add> a = np.array([1]) <add> b = np.array([2]) <add> c = np.array([3]) <add> d = np.array([4]) <add> actual = np.r_['a, b; c, d'] <add> expected = np.bmat([[a, b], [c, d]]) <add> <add> assert_equal(actual, expected) <add> assert_equal(type(actual), type(expected)) <add> <ide> <ide> class TestNdenumerate(TestCase): <ide> def test_basic(self):
2
Ruby
Ruby
add test for casks
cf6ff4d84ce30d79b2247d95daff96f01b917793
<ide><path>Library/Homebrew/test/cmd/--cache_spec.rb <ide> .and not_to_output.to_stderr <ide> .and be_a_success <ide> end <add> <add> it "prints the cache files for a given Cask" do <add> expect { brew "--cache", cask_path("local-caffeine") } <add> .to output(%r{cask: #{HOMEBREW_CACHE}/downloads/[\da-f]{64}--caffeine\.zip}).to_stdout <add> .and not_to_output.to_stderr <add> .and be_a_success <add> end <add> <add> it "prints the cache files for a given Formula and Cask" do <add> expect { brew "--cache", testball, cask_path("local-caffeine") } <add> .to output(%r{#{HOMEBREW_CACHE}/downloads/[\da-f]{64}--testball-.*\ncask: #{HOMEBREW_CACHE}/downloads/[\da-f]{64}--caffeine\.zip}).to_stdout <add> .and not_to_output.to_stderr <add> .and be_a_success <add> end <ide> end
1
Javascript
Javascript
drain messages from internal message port
c61327d37673ab2ea454d14c36329b113aaca32a
<ide><path>lib/internal/worker.js <ide> class Worker extends EventEmitter { <ide> [kOnExit](code) { <ide> debug(`[${threadId}] hears end event for Worker ${this.threadId}`); <ide> MessagePortPrototype.drain.call(this[kPublicPort]); <add> MessagePortPrototype.drain.call(this[kPort]); <ide> this[kDispose](); <ide> this.emit('exit', code); <ide> this.removeAllListeners(); <ide><path>test/parallel/test-worker-message-port-drain.js <add>// Flags: --experimental-worker <add>'use strict'; <add>require('../common'); <add> <add>// This test ensures that the messages from the internal <add>// message port are drained before the call to 'kDispose', <add>// and so all the stdio messages from the worker are processed <add>// in the parent and are pushed to their target streams. <add> <add>const assert = require('assert'); <add>const { <add> Worker, <add> isMainThread, <add> parentPort, <add> threadId, <add>} = require('worker_threads'); <add> <add>if (isMainThread) { <add> const workerIdsToOutput = new Map(); <add> <add> for (let i = 0; i < 2; i++) { <add> const worker = new Worker(__filename, { stdout: true }); <add> const workerOutput = []; <add> workerIdsToOutput.set(worker.threadId, workerOutput); <add> worker.on('message', console.log); <add> worker.stdout.on('data', (chunk) => { <add> workerOutput.push(chunk.toString().trim()); <add> }); <add> } <add> <add> process.on('exit', () => { <add> for (const [threadId, workerOutput] of workerIdsToOutput) { <add> assert.ok(workerOutput.includes(`1 threadId: ${threadId}`)); <add> assert.ok(workerOutput.includes(`2 threadId: ${threadId}`)); <add> } <add> }); <add>} else { <add> console.log(`1 threadId: ${threadId}`); <add> console.log(`2 threadId: ${threadId}`); <add> parentPort.postMessage(Array(100).fill(1)); <add>}
2
Text
Text
add v3.11.1 to changelog
d97d74ddff1a2407c5012777cfe185f85db0495d
<ide><path>CHANGELOG.md <ide> - [#18150](https://github.com/emberjs/ember.js/pull/18150) [BUGFIX] Fix a memory retention issue with string-based event listeners <ide> - [#18124](https://github.com/emberjs/ember.js/pull/18124) [CLEANUP] Remove deprecated `NAME_KEY` <ide> <add>### v3.11.1 (June 27, 2019) <add> <add>- [#18159](https://github.com/emberjs/ember.js/pull/18159) Ensure `RouteInfo` object's do not eagerly cache routes in lazy Engines <add>- [#18150](https://github.com/emberjs/ember.js/pull/18150) Ensure string based event listeners that are removed are not retained <add> <ide> ### v3.11.0 (June 24, 2019) <ide> <ide> - [#17842](https://github.com/emberjs/ember.js/pull/17842) / [#17901](https://github.com/emberjs/ember.js/pull/17901) [FEATURE] Implement the [Forwarding Element Modifiers with "Splattributes" RFC](https://github.com/emberjs/rfcs/blob/master/text/0435-modifier-splattributes.md).
1
Javascript
Javascript
add config to enable sync scheduling by default
78aef38bfa6865b409b30ce143c50635f93b3cc3
<ide><path>src/renderers/shared/fiber/ReactFiberReconciler.js <ide> export type HostConfig<T, P, I, TI, C> = { <ide> removeChild(parentInstance : I, child : I | TI) : void, <ide> <ide> scheduleAnimationCallback(callback : () => void) : void, <del> scheduleDeferredCallback(callback : (deadline : Deadline) => void) : void <add> scheduleDeferredCallback(callback : (deadline : Deadline) => void) : void, <ide> <add> useSyncScheduling ?: boolean, <ide> }; <ide> <ide> type OpaqueNode = Fiber; <ide><path>src/renderers/shared/fiber/ReactFiberScheduler.js <ide> module.exports = function<T, P, I, TI, C>(config : HostConfig<T, P, I, TI, C>) { <ide> <ide> const scheduleAnimationCallback = config.scheduleAnimationCallback; <ide> const scheduleDeferredCallback = config.scheduleDeferredCallback; <add> const useSyncScheduling = config.useSyncScheduling; <ide> <ide> // The priority level to use when scheduling an update. <ide> let priorityContext : (PriorityLevel | null) = null; <ide> // The priority level to use if there is no priority context. <del> let defaultPriorityContext : PriorityLevel = LowPriority; <add> let defaultPriorityContext : PriorityLevel = useSyncScheduling ? <add> SynchronousPriority : <add> LowPriority; <ide> <ide> // The next work in progress fiber that we're currently working on. <ide> let nextUnitOfWork : ?Fiber = null;
2
Javascript
Javascript
fix csp errors in with-strict-csp-app example
42c309ad3771a4c77acb4a2dca539d8935eff51f
<ide><path>examples/with-strict-csp/pages/_document.js <ide> const cspHashOf = (text) => { <ide> hash.update(text) <ide> return `'sha256-${hash.digest('base64')}'` <ide> } <del> <ide> export default class MyDocument extends Document { <ide> render() { <del> const csp = `default-src 'self'; script-src 'self' ${cspHashOf( <add> let csp = `default-src 'self'; script-src 'self' ${cspHashOf( <ide> NextScript.getInlineScriptSource(this.props) <ide> )}` <add> if (process.env.NODE_ENV !== 'production') { <add> csp = `style-src 'self' 'unsafe-inline'; font-src 'self' data:; default-src 'self'; script-src 'unsafe-eval' 'self' ${cspHashOf( <add> NextScript.getInlineScriptSource(this.props) <add> )}` <add> } <ide> <ide> return ( <ide> <html>
1
Python
Python
remove unused variables
d79703f39f1273e2c5ec2bc6732226634fcac49a
<ide><path>glances/exports/glances_csv.py <ide> def update(self, stats): <ide> for item in all_stats[i]: <ide> # First line: header <ide> if self.first_line: <del> fieldnames = item.keys() <ide> csv_header += map(lambda x: plugin + '_' + item[item['key']] + '_' + x, item) <ide> # Others lines: stats <ide> fieldvalues = item.values() <ide><path>glances/exports/glances_influxdb.py <ide> def load_conf(self, section="influxdb"): <ide> # Prefix is optional <ide> try: <ide> self.prefix = self.config.get_value(section, 'prefix') <del> except NoOptionError as e: <add> except NoOptionError: <ide> pass <ide> return True <ide> <ide><path>glances/exports/glances_statsd.py <ide> def load_conf(self, section="statsd"): <ide> # Prefix is optional <ide> try: <ide> self.prefix = self.config.get_value(section, 'prefix') <del> except NoOptionError as e: <add> except NoOptionError: <ide> pass <ide> return True <ide> <ide><path>glances/outputs/glances_curses.py <ide> def display(self, servers_list): <ide> try: <ide> if c[0] == 'alias' and v[c[0]] is not None: <ide> server_stat['name'] = v[c[0]] <del> except KeyError as e: <add> except KeyError: <ide> pass <ide> <ide> # Display line for server stats <ide><path>glances/plugins/glances_plugin.py <ide> def load_limits(self, config): <ide> """Load limits from the configuration file, if it exists.""" <ide> if (hasattr(config, 'has_section') and <ide> config.has_section(self.plugin_name)): <del> for level, v in config.items(self.plugin_name): <add> for level, _ in config.items(self.plugin_name): <ide> # Read limits <ide> limit = '_'.join([self.plugin_name, level]) <ide> try:
5
Ruby
Ruby
remove more codes
e66bf6f5e3085014976ff8fa3981813b77510b69
<ide><path>activerecord/lib/active_record/association_preload.rb <ide> def preload_belongs_to_association(records, reflection, preload_options={}) <ide> options = reflection.options <ide> primary_key_name = reflection.primary_key_name <ide> <add> klasses_and_ids = {} <add> <ide> if options[:polymorphic] <ide> polymorph_type = options[:foreign_type] <del> klasses_and_ids = {} <ide> <ide> # Construct a mapping from klass to a list of ids to load and a mapping of those ids back <ide> # to their parent_records <ide> def preload_belongs_to_association(records, reflection, preload_options={}) <ide> klass_id = record.send(primary_key_name) <ide> if klass_id <ide> id_map = klasses_and_ids[klass] ||= {} <del> id_list_for_klass_id = (id_map[klass_id.to_s] ||= []) <del> id_list_for_klass_id << record <add> (id_map[klass_id.to_s] ||= []) << record <ide> end <ide> end <ide> end <del> klasses_and_ids = klasses_and_ids.to_a <ide> else <ide> id_map = {} <ide> records.each do |record| <ide> key = record.send(primary_key_name) <ide> (id_map[key.to_s] ||= []) << record if key <ide> end <del> klasses_and_ids = [[reflection.klass.name, id_map]] <add> klasses_and_ids[reflection.klass.name] = id_map unless id_map.empty? <ide> end <ide> <ide> klasses_and_ids.each do |klass_name, id_map| <del> next if id_map.empty? <ide> klass = klass_name.constantize <ide> <ide> table_name = klass.quoted_table_name
1
PHP
PHP
remove some customization
e271b486d86f4b7c428371a800e64050692fd606
<ide><path>src/Illuminate/Auth/AuthManager.php <ide> public function createTokenDriver($name, $config) <ide> $guard = new TokenGuard( <ide> $this->createUserProvider($config['source']), <ide> $this->app['request'], <del> Arr::get($config, 'input', 'api_token'), <del> Arr::get($config, 'token', 'api_token') <ide> ); <ide> <ide> $this->app->refresh('request', $guard, 'setRequest'); <ide><path>src/Illuminate/Auth/TokenGuard.php <ide> class TokenGuard implements Guard <ide> * <ide> * @param \Illuminate\Contracts\Auth\UserProvider $provider <ide> * @param \Symfony\Component\HttpFoundation\Request $request <del> * @param string $inputKey <del> * @param string $storageKey <ide> * @return void <ide> */ <ide> public function __construct(UserProvider $provider, <del> Request $request, <del> $inputKey = 'api_token', <del> $storageKey = 'api_token') <add> Request $request) <ide> { <ide> $this->request = $request; <ide> $this->provider = $provider; <del> $this->inputKey = $inputKey; <del> $this->storageKey = $storageKey; <add> $this->inputKey = 'api_token'; <add> $this->storageKey = 'api_token'; <ide> } <ide> <ide> /**
2
Text
Text
modify unscoped usage guide to include chaining
b20ef7e92f718cf00da9c0e3a4914fc949e2fe72
<ide><path>guides/source/active_record_querying.md <ide> Client.unscoped.load <ide> <ide> This method removes all scoping and will do a normal query on the table. <ide> <del>Note that chaining `unscoped` with a `scope` does not work. In these cases, it is <del>recommended that you use the block form of `unscoped`: <add>```ruby <add>Client.unscoped.all <add># SELECT "clients".* FROM "clients" <add> <add>Client.where(published: false).unscoped.all <add># SELECT "clients".* FROM "clients" <add>``` <add> <add>`unscoped` can also accept a block. <ide> <ide> ```ruby <ide> Client.unscoped {
1
Text
Text
add pretrained checkpoints and tfhub links
61aee86e4cc2a7c5aac82c01bef0173dda81a0aa
<ide><path>official/vision/image_classification/README.md <ide> uses a ResNet50 model implemented in [`resnet_model.py`](./resnet_model.py). <ide> To use <ide> either dataset, make sure that you have the latest version of TensorFlow <ide> installed and <del>[add the models folder to your Python path](/official/#running-the-models), <del>otherwise you may encounter an error like `ImportError: No module named <del>official.resnet`. <add>[add the models folder to your Python path](/official/#running-the-models). <add> <add>### Pretrained Models <add> <add>* [ResNet50 Checkpoints](https://storage.googleapis.com/cloud-tpu-checkpoints/resnet/resnet50.tar.gz) <add> <add>* ResNet50 TFHub: [feature vector](https://tfhub.dev/tensorflow/resnet_50/feature_vector/1) <add>and [classification](https://tfhub.dev/tensorflow/resnet_50/classification/1) <ide> <ide> ### CIFAR-10 <ide> <ide> location with the `--data_dir` flag, like: <ide> python resnet_cifar_main.py --data_dir=/path/to/cifar <ide> ``` <ide> <del>### ImageNet <add>### ImageNet Training <ide> <ide> Download the ImageNet dataset and convert it to TFRecord format. <ide> The following [script](https://github.com/tensorflow/tpu/blob/master/tools/datasets/imagenet_to_gcs.py)
1
Java
Java
use list.of() and set.of() where feasible
720261db26ea917cd979bbac2cd7db1ff70928ee
<ide><path>spring-beans/src/main/java/org/springframework/beans/factory/config/YamlProcessor.java <ide> /* <del> * Copyright 2002-2021 the original author or authors. <add> * Copyright 2002-2022 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> public abstract class YamlProcessor { <ide> * </pre> <ide> */ <ide> public void setDocumentMatchers(DocumentMatcher... matchers) { <del> this.documentMatchers = Arrays.asList(matchers); <add> this.documentMatchers = List.of(matchers); <ide> } <ide> <ide> /** <ide><path>spring-beans/src/main/java/org/springframework/beans/factory/groovy/GroovyBeanDefinitionReader.java <ide> else if (constructorArgs[i] instanceof Map<?, ?> map){ <ide> constructorArgs[i] = manageMapIfNecessary(map); <ide> } <ide> } <del> return Arrays.asList(constructorArgs); <add> return List.of(constructorArgs); <ide> } <ide> <ide> /** <ide><path>spring-context/src/main/java/org/springframework/cache/annotation/EnableCaching.java <ide> /* <del> * Copyright 2002-2021 the original author or authors. <add> * Copyright 2002-2022 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> * public CacheManager cacheManager() { <ide> * // configure and return an implementation of Spring's CacheManager SPI <ide> * SimpleCacheManager cacheManager = new SimpleCacheManager(); <del> * cacheManager.setCaches(Arrays.asList(new ConcurrentMapCache("default"))); <add> * cacheManager.setCaches(Set.of(new ConcurrentMapCache("default"))); <ide> * return cacheManager; <ide> * } <ide> * }</pre> <ide> * public CacheManager cacheManager() { <ide> * // configure and return an implementation of Spring's CacheManager SPI <ide> * SimpleCacheManager cacheManager = new SimpleCacheManager(); <del> * cacheManager.setCaches(Arrays.asList(new ConcurrentMapCache("default"))); <add> * cacheManager.setCaches(Set.of(new ConcurrentMapCache("default"))); <ide> * return cacheManager; <ide> * } <ide> * <ide><path>spring-context/src/main/java/org/springframework/cache/interceptor/NamedCacheResolver.java <ide> /* <del> * Copyright 2002-2017 the original author or authors. <add> * Copyright 2002-2022 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> <ide> package org.springframework.cache.interceptor; <ide> <del>import java.util.ArrayList; <del>import java.util.Arrays; <ide> import java.util.Collection; <add>import java.util.List; <ide> <ide> import org.springframework.cache.CacheManager; <ide> import org.springframework.lang.Nullable; <ide> public NamedCacheResolver() { <ide> <ide> public NamedCacheResolver(CacheManager cacheManager, String... cacheNames) { <ide> super(cacheManager); <del> this.cacheNames = new ArrayList<>(Arrays.asList(cacheNames)); <add> this.cacheNames = List.of(cacheNames); <ide> } <ide> <ide> <ide><path>spring-context/src/main/java/org/springframework/scheduling/annotation/ScheduledAnnotationBeanPostProcessor.java <ide> /* <del> * Copyright 2002-2021 the original author or authors. <add> * Copyright 2002-2022 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> import java.lang.reflect.Method; <ide> import java.time.Duration; <ide> import java.util.ArrayList; <del>import java.util.Arrays; <ide> import java.util.Collection; <ide> import java.util.Collections; <ide> import java.util.IdentityHashMap; <ide> public Object postProcessAfterInitialization(Object bean, String beanName) { <ide> <ide> Class<?> targetClass = AopProxyUtils.ultimateTargetClass(bean); <ide> if (!this.nonAnnotatedClasses.contains(targetClass) && <del> AnnotationUtils.isCandidateClass(targetClass, Arrays.asList(Scheduled.class, Schedules.class))) { <add> AnnotationUtils.isCandidateClass(targetClass, List.of(Scheduled.class, Schedules.class))) { <ide> Map<Method, Set<Scheduled>> annotatedMethods = MethodIntrospector.selectMethods(targetClass, <ide> (MethodIntrospector.MetadataLookup<Set<Scheduled>>) method -> { <ide> Set<Scheduled> scheduledAnnotations = AnnotatedElementUtils.getMergedRepeatableAnnotations( <ide><path>spring-core/src/main/java/org/springframework/core/codec/StringDecoder.java <ide> /* <del> * Copyright 2002-2021 the original author or authors. <add> * Copyright 2002-2022 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> import java.nio.charset.Charset; <ide> import java.nio.charset.StandardCharsets; <ide> import java.util.ArrayList; <del>import java.util.Arrays; <ide> import java.util.Collection; <ide> import java.util.Collections; <ide> import java.util.List; <ide> public final class StringDecoder extends AbstractDataBufferDecoder<String> { <ide> public static final Charset DEFAULT_CHARSET = StandardCharsets.UTF_8; <ide> <ide> /** The default delimiter strings to use, i.e. {@code \r\n} and {@code \n}. */ <del> public static final List<String> DEFAULT_DELIMITERS = Arrays.asList("\r\n", "\n"); <add> public static final List<String> DEFAULT_DELIMITERS = List.of("\r\n", "\n"); <ide> <ide> <ide> private final List<String> delimiters; <ide><path>spring-test/src/main/java/org/springframework/mock/web/MockFilterChain.java <ide> package org.springframework.mock.web; <ide> <ide> import java.io.IOException; <del>import java.util.Arrays; <ide> import java.util.Collections; <ide> import java.util.Iterator; <ide> import java.util.List; <ide> public MockFilterChain(Servlet servlet, Filter... filters) { <ide> <ide> private static List<Filter> initFilterList(Servlet servlet, Filter... filters) { <ide> Filter[] allFilters = ObjectUtils.addObjectToArray(filters, new ServletFilterProxy(servlet)); <del> return Arrays.asList(allFilters); <add> return List.of(allFilters); <ide> } <ide> <ide> <ide><path>spring-test/src/main/java/org/springframework/test/context/junit/jupiter/SpringExtension.java <ide> /* <del> * Copyright 2002-2021 the original author or authors. <add> * Copyright 2002-2022 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> public class SpringExtension implements BeforeAllCallback, AfterAllCallback, Tes <ide> // Note that @Test, @TestFactory, @TestTemplate, @RepeatedTest, and @ParameterizedTest <ide> // are all meta-annotated with @Testable. <ide> private static final List<Class<? extends Annotation>> JUPITER_ANNOTATION_TYPES = <del> Arrays.asList(BeforeAll.class, AfterAll.class, BeforeEach.class, AfterEach.class, Testable.class); <add> List.of(BeforeAll.class, AfterAll.class, BeforeEach.class, AfterEach.class, Testable.class); <ide> <ide> private static final MethodFilter autowiredTestOrLifecycleMethodFilter = <ide> ReflectionUtils.USER_DECLARED_METHODS <ide><path>spring-test/src/main/java/org/springframework/test/web/reactive/server/ExchangeResult.java <ide> import java.nio.charset.Charset; <ide> import java.nio.charset.StandardCharsets; <ide> import java.time.Duration; <del>import java.util.Arrays; <ide> import java.util.List; <ide> import java.util.stream.Collectors; <ide> <ide> public class ExchangeResult { <ide> <ide> private static final Log logger = LogFactory.getLog(ExchangeResult.class); <ide> <del> private static final List<MediaType> PRINTABLE_MEDIA_TYPES = Arrays.asList( <add> private static final List<MediaType> PRINTABLE_MEDIA_TYPES = List.of( <ide> MediaType.parseMediaType("application/*+json"), MediaType.APPLICATION_XML, <ide> MediaType.parseMediaType("text/*"), MediaType.APPLICATION_FORM_URLENCODED); <ide> <ide><path>spring-tx/src/main/java/org/springframework/transaction/annotation/SpringTransactionAnnotationParser.java <ide> import java.io.Serializable; <ide> import java.lang.reflect.AnnotatedElement; <ide> import java.util.ArrayList; <del>import java.util.Arrays; <ide> import java.util.List; <add>import java.util.Set; <ide> <ide> import org.springframework.core.annotation.AnnotatedElementUtils; <ide> import org.springframework.core.annotation.AnnotationAttributes; <ide> protected TransactionAttribute parseTransactionAnnotation(AnnotationAttributes a <ide> <ide> rbta.setReadOnly(attributes.getBoolean("readOnly")); <ide> rbta.setQualifier(attributes.getString("value")); <del> rbta.setLabels(Arrays.asList(attributes.getStringArray("label"))); <add> rbta.setLabels(Set.of(attributes.getStringArray("label"))); <ide> <ide> List<RollbackRuleAttribute> rollbackRules = new ArrayList<>(); <ide> for (Class<?> rbRule : attributes.getClassArray("rollbackFor")) { <ide><path>spring-web/src/testFixtures/java/org/springframework/web/testfixture/servlet/MockFilterChain.java <ide> package org.springframework.web.testfixture.servlet; <ide> <ide> import java.io.IOException; <del>import java.util.Arrays; <ide> import java.util.Collections; <ide> import java.util.Iterator; <ide> import java.util.List; <ide> public MockFilterChain(Servlet servlet, Filter... filters) { <ide> <ide> private static List<Filter> initFilterList(Servlet servlet, Filter... filters) { <ide> Filter[] allFilters = ObjectUtils.addObjectToArray(filters, new ServletFilterProxy(servlet)); <del> return Arrays.asList(allFilters); <add> return List.of(allFilters); <ide> } <ide> <ide>
11
Python
Python
add tests for security views
17fbd7a9d8930a5fd252b3db8f6574d4375b8a52
<ide><path>tests/www/test_views.py <ide> def test_home(self): <ide> resp = self.client.get('home', follow_redirects=True) <ide> self.check_content_in_response('DAGs', resp) <ide> <add> def test_users_list(self): <add> resp = self.client.get('users/list', follow_redirects=True) <add> self.check_content_in_response('List Users', resp) <add> <add> def test_roles_list(self): <add> resp = self.client.get('roles/list', follow_redirects=True) <add> self.check_content_in_response('List Roles', resp) <add> <add> def test_userstatschart_view(self): <add> resp = self.client.get('userstatschartview/chart/', follow_redirects=True) <add> self.check_content_in_response('User Statistics', resp) <add> <add> def test_permissions_list(self): <add> resp = self.client.get('permissions/list/', follow_redirects=True) <add> self.check_content_in_response('List Base Permissions', resp) <add> <add> def test_viewmenus_list(self): <add> resp = self.client.get('viewmenus/list/', follow_redirects=True) <add> self.check_content_in_response('List View Menus', resp) <add> <add> def test_permissionsviews_list(self): <add> resp = self.client.get('permissionviews/list/', follow_redirects=True) <add> self.check_content_in_response('List Permissions on Views/Menus', resp) <add> <ide> def test_home_filter_tags(self): <ide> from airflow.www.views import FILTER_TAGS_COOKIE <ide> with self.client:
1
Ruby
Ruby
allow reseting of request variants
e1fb3483d6402bd66c41a12d158fd1c987fac983
<ide><path>actionpack/lib/action_dispatch/http/mime_negotiation.rb <ide> def formats <ide> end <ide> end <ide> end <add> <ide> # Sets the \variant for template. <ide> def variant=(variant) <ide> if variant.is_a?(Symbol) <ide> @variant = [variant] <del> elsif variant.is_a?(Array) && variant.any? && variant.all?{ |v| v.is_a?(Symbol) } <add> elsif variant.nil? || variant.is_a?(Array) && variant.any? && variant.all?{ |v| v.is_a?(Symbol) } <ide> @variant = variant <ide> else <ide> raise ArgumentError, "request.variant must be set to a Symbol or an Array of Symbols, not a #{variant.class}. " \ <ide><path>actionpack/test/dispatch/request_test.rb <ide> class RequestVariant < BaseRequestTest <ide> end <ide> end <ide> <add> test "reset variant" do <add> request = stub_request <add> <add> request.variant = nil <add> assert_equal nil, request.variant <add> end <add> <ide> test "setting variant with non symbol value" do <ide> request = stub_request <ide> assert_raise ArgumentError do
2
Ruby
Ruby
remove extraneous files
732c6efa19663f69aad52677cbf41b181061dfd9
<ide><path>Library/Homebrew/dev-cmd/bump.rb <ide> def bump_args <ide> Homebrew::CLI::Parser.new do <ide> usage_banner <<~EOS <ide> `bump` <del> <ide> Display out-of-date brew formulae, the latest version available, and whether a pull request has been opened. <ide> EOS <ide> end <ide> end <ide> <ide> def bump <ide> bump_args.parse <del> # puts "command run" <del> outdated_repology_pacakges = RepologyParser.parse_api_response() <del> puts RepologyParser.validate__packages(outdated_repology_pacakges) <add> <add> outdated_repology_packages = RepologyParser.parse_api_response <add> ohai RepologyParser.validate__packages(outdated_repology_packages) <ide> end <ide> end <ide><path>scripts/bumpFormulae.rb <del>require_relative "helpers/parsed_file" <del>require_relative "helpers/brew_commands.rb" <del> <del>brew_commands = BrewCommands.new <del> <del>parsed_file = ParsedFile.new <del>outdated_pckgs_to_update = parsed_file.get_latest_file("data/outdated_pckgs_to_update") <del> <del>File.foreach(outdated_pckgs_to_update) do |line| <del> line_hash = eval(line) <del> puts "\n bumping package: #{line_hash['name']} formula" <del> <del> begin <del> bump_pr_response, bump_pr_status = brew_commands.bump_formula_pr(line_hash["name"], line_hash["download_url"], line_hash["checksum"]) <del> puts "#{bump_pr_response}" <del> rescue <del> puts "- An error occured whilst bumping package #{line_hash["name"]} \n" <del> return <del> end <del>end <ide><path>scripts/helpers/brew_commands.rb <del>require "open3" <del> <del>class BrewCommands <del> <del> def livecheck_check_formula(formula_name) <del> puts "- livecheck formula : #{formula_name}" <del> command_args = [ <del> "brew", <del> "livecheck", <del> formula_name, <del> "--quiet", <del> ] <del> <del> response = Open3.capture2e(*command_args) <del> self.parse_livecheck_response(response) <del> end <del> <del> def parse_livecheck_response(livecheck_output) <del> livecheck_output = livecheck_output.first.gsub(" ", "").split(/:|==>|\n/) <del> <del> # eg: ["burp", "2.2.18", "2.2.18"] <del> package_name, brew_version, latest_version = livecheck_output <del> <del> {"name" => package_name, "current_brew_version" => brew_version, "livecheck_latest_version" => latest_version} <del> end <del> <del> def bump_formula_pr(formula_name, url) <del> command_args = [ <del> "brew", <del> "bump-formula-pr", <del> "--no-browse", <del> "--dry-run", <del> formula_name, <del> "--url=#{url}", <del> ] <del> <del> response = Open3.capture2e(*command_args) <del> self.parse_formula_bump_response(response) <del> end <del> <del> def parse_formula_bump_response(formula_bump_response) <del> response, status = formula_bump_response <del> response <del> end <del> <del> def check_for_open_pr(formula_name, download_url) <del> puts "- Checking for open PRs for formula : #{formula_name}" <del> <del> response = bump_formula_pr(formula_name, download_url) <del> <del> !response.include? "Error: These open pull requests may be duplicates" <del> end <del> <del>end <ide><path>scripts/helpers/homebrew_formula.rb <del>require "net/http" <del>require "open-uri" <del> <del>class HomebrewFormula <del> <del> def generate_new_download_url(outdated_url, old_version, latest_version) <del> if [outdated_url, old_version, latest_version].include? nil <del> puts "\n- Could not generate download url" <del> nil <del> else <del> puts "\n- Generating download url" <del> outdated_url.gsub(old_version, latest_version) <del> end <del> end <del> <del> def generate_checksum(new_url) <del> begin <del> puts "- Generating checksum for url: #{new_url}" <del> tempfile = URI.parse(new_url).open <del> tempfile.close <del> return Digest::SHA256.file(tempfile.path).hexdigest <del> rescue <del> puts "- Failed to generate Checksum \n" <del> return nil <del> end <del> end <del>end <ide><path>scripts/printPackageUpdates.rb <del>require_relative "helpers/api_parser" <del> <del>api_parser = ApiParser.new <del> <del>outdated_repology_packages = api_parser.parse_repology_api() <del>brew_formulas = api_parser.parse_homebrew_formulas() <del> <del>formatted_outdated_packages = api_parser.validate_packages(outdated_repology_packages, brew_formulas) <del> <del>api_parser.display_version_data(formatted_outdated_packages)
5
Text
Text
add log.d hint
36656e35c928e69b9648b24f6c9822c5d0b8c14c
<ide><path>guide/english/android-development/core-components/index.md <ide> An _activity_ is a component that has a user interface and represents a single s <ide> This call is often used when the user hits the back button, or closes the instance of the app. <ide> <ide> #### Sample code to understand Activity Lifecycle <add> <add>You can print in log console using Log.d() method. <add> <ide> ``` java <ide> import android.app.Activity; <ide> import android.os.Bundle;
1
Javascript
Javascript
remove typo in settimeout comment
3f6450b638a1ddb6ce0aceebaae2aae86c8499f5
<ide><path>lib/net.js <ide> Socket.prototype.setTimeout = function(msecs, callback) { <ide> // Type checking identical to timers.enroll() <ide> msecs = validateTimerDuration(msecs); <ide> <del> // Attempt to clear an existing timer lear in both cases - <add> // Attempt to clear an existing timer in both cases - <ide> // even if it will be rescheduled we don't want to leak an existing timer. <ide> clearTimeout(this[kTimeout]); <ide>
1
Javascript
Javascript
update broken types in type parser
9037ad3e78324be6952ea114bf1fd701a2803516
<ide><path>tools/doc/type-parser.js <ide> const jsPrimitives = { <ide> <ide> const jsGlobalObjectsUrl = `${jsDocPrefix}Reference/Global_Objects/`; <ide> const jsGlobalTypes = [ <del> 'Array', 'ArrayBuffer', 'ArrayBufferView', 'DataView', 'Date', 'Error', <add> 'Array', 'ArrayBuffer', 'DataView', 'Date', 'Error', <ide> 'EvalError', 'Function', 'Map', 'Object', 'Promise', 'RangeError', <ide> 'ReferenceError', 'RegExp', 'Set', 'SharedArrayBuffer', 'SyntaxError', <del> 'TypeError', 'TypedArray', 'URIError', 'Uint8Array', 'WebAssembly.Instance', <add> 'TypeError', 'TypedArray', 'URIError', 'Uint8Array', <ide> ]; <ide> <ide> const customTypesMap = { <ide> 'any': `${jsDataStructuresUrl}#Data_types`, <ide> <ide> 'this': `${jsDocPrefix}Reference/Operators/this`, <ide> <add> 'ArrayBufferView': <add> 'https://developer.mozilla.org/en-US/docs/Web/API/ArrayBufferView', <add> <ide> 'AsyncIterator': 'https://tc39.github.io/ecma262/#sec-asynciterator-interface', <ide> <ide> 'AsyncIterable': 'https://tc39.github.io/ecma262/#sec-asynciterable-interface', <ide> <ide> 'bigint': `${jsDocPrefix}Reference/Global_Objects/BigInt`, <add> 'WebAssembly.Instance': <add> `${jsDocPrefix}Reference/Global_Objects/WebAssembly/Instance`, <ide> <ide> 'Iterable': <ide> `${jsDocPrefix}Reference/Iteration_protocols#The_iterable_protocol`,
1
Javascript
Javascript
remove comments from migration
f656311b3e2c320af7174ceb6c3fc36e2b4d976c
<ide><path>seed/loopbackMigration.js <ide> function createConnection(URI) { <ide> } <ide> <ide> function createQuery(db, collection, options, batchSize) { <del> return Rx.Observable.create(function (observer) { <add> return Rx.Observable.create(function(observer) { <ide> var cursor = db.collection(collection).find({}, options); <ide> cursor.batchSize(batchSize || 20); <ide> // Cursor.each will yield all doc from a batch in the same tick, <ide> // or schedule getting next batch on nextTick <ide> debug('opening cursor for %s', collection); <del> cursor.each(function (err, doc) { <add> cursor.each(function(err, doc) { <ide> if (err) { <ide> return observer.onError(err); <ide> } <ide> function createQuery(db, collection, options, batchSize) { <ide> observer.onNext(doc); <ide> }); <ide> <del> return Rx.Disposable.create(function () { <add> return Rx.Disposable.create(function() { <ide> debug('closing cursor for %s', collection); <ide> cursor.close(); <ide> }); <ide> var storyCount = dbObservable <ide> }) <ide> .count(); <ide> <del>var commentCount = dbObservable <del> .flatMap(function(db) { <del> return createQuery(db, 'comments', {}); <del> }) <del> .bufferWithCount(20) <del> .withLatestFrom(dbObservable, function(comments, db) { <del> return { <del> comments: comments, <del> db: db <del> }; <del> }) <del> .flatMap(function(dats) { <del> return insertMany(dats.db, 'comment', dats.comments, { w: 1 }); <del> }) <del> .count(); <del> <ide> Rx.Observable.combineLatest( <ide> userIdentityCount, <ide> userSavesCount, <ide> storyCount, <del> commentCount, <del> function(userIdentCount, userCount, storyCount, commentCount) { <add> function(userIdentCount, userCount, storyCount) { <ide> return { <ide> userIdentCount: userIdentCount * 20, <ide> userCount: userCount * 20, <del> storyCount: storyCount * 20, <del> commentCount: commentCount * 20 <add> storyCount: storyCount * 20 <ide> }; <ide> }) <ide> .subscribe(
1
PHP
PHP
apply base64 encoding after helper serialization
803e7be363868a808c5a00572f477ae62f477995
<ide><path>lib/Cake/View/Helper/CacheHelper.php <ide> protected function _writeFile($content, $timestamp, $useCallbacks = false) { <ide> $response = new CakeResponse(array("charset" => Configure::read("App.encoding"))); <ide> $controller = new ' . $this->_View->name . 'Controller($request, $response); <ide> $controller->plugin = $this->plugin = \'' . $this->_View->plugin . '\'; <del> $controller->helpers = $this->helpers = unserialize(\'' . str_replace("'", "\'", serialize($this->_View->helpers)) . '\'); <add> $controller->helpers = $this->helpers = unserialize(base64_decode(\'' . base64_encode(serialize($this->_View->helpers)) . '\')); <ide> $controller->layout = $this->layout = \'' . $this->_View->layout. '\'; <ide> $controller->theme = $this->theme = \'' . $this->_View->theme . '\'; <ide> $controller->viewVars = $this->viewVars = unserialize(base64_decode(\'' . base64_encode(serialize($this->_View->viewVars)) . '\'));
1
Python
Python
fix tf input for np.ndarray
4fbcf8ea496bece21b9442d71280257b9953152a
<ide><path>src/transformers/modeling_tf_utils.py <ide> def input_processing(func, config, input_ids, **kwargs): <ide> signature.pop("kwargs", None) <ide> parameter_names = list(signature.keys()) <ide> output = {} <del> allowed_types = (tf.Tensor, bool, int, ModelOutput, tuple, list, dict) <add> allowed_types = (tf.Tensor, bool, int, ModelOutput, tuple, list, dict, np.ndarray) <ide> <ide> if "inputs" in kwargs["kwargs_call"]: <ide> warnings.warn( <ide><path>tests/test_modeling_tf_common.py <ide> def test_inputs_embeds(self): <ide> <ide> model(inputs) <ide> <add> def test_numpy_arrays_inputs(self): <add> config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common() <add> <add> def prepare_numpy_arrays(inputs_dict): <add> inputs_np_dict = {} <add> for k, v in inputs_dict.items(): <add> if tf.is_tensor(v): <add> inputs_np_dict[k] = v.numpy() <add> else: <add> inputs_np_dict[k] = np.array(k) <add> <add> return inputs_np_dict <add> <add> for model_class in self.all_model_classes: <add> model = model_class(config) <add> <add> inputs = self._prepare_for_class(inputs_dict, model_class) <add> inputs_np = prepare_numpy_arrays(inputs) <add> <add> model(inputs_np) <add> <ide> def test_resize_token_embeddings(self): <ide> if not self.test_resize_embeddings: <ide> return
2
Javascript
Javascript
upgrade localmodule to es6
9687015e07c0c6b669b4afeae3595a2adf19bfc4
<ide><path>lib/dependencies/LocalModule.js <ide> MIT License http://www.opensource.org/licenses/mit-license.php <ide> Author Tobias Koppers @sokra <ide> */ <del>function LocalModule(module, name, idx) { <del> this.module = module; <del> this.name = name; <del> this.idx = idx; <del> this.used = false; <del>} <del>module.exports = LocalModule; <add>"use strict"; <add> <add>class LocalModule { <add> constructor(module, name, idx) { <add> this.module = module; <add> this.name = name; <add> this.idx = idx; <add> this.used = false; <add> } <ide> <del>LocalModule.prototype.flagUsed = function() { <del> this.used = true; <del>}; <add> flagUsed() { <add> this.used = true; <add> } <ide> <del>LocalModule.prototype.variableName = function() { <del> return "__WEBPACK_LOCAL_MODULE_" + this.idx + "__"; <del>}; <add> variableName() { <add> return "__WEBPACK_LOCAL_MODULE_" + this.idx + "__"; <add> } <add>} <add>module.exports = LocalModule;
1