text
stringlengths 2
1.04M
| meta
dict |
|---|---|
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.application.options.codeStyle.cache;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.ReadAction;
import consulo.logging.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.SimpleModificationTracker;
import com.intellij.psi.PsiFile;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.modifier.CodeStyleSettingsModifier;
import com.intellij.psi.codeStyle.modifier.TransientCodeStyleSettings;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.util.ArrayUtil;
import com.intellij.util.concurrency.AppExecutorUtil;
import consulo.util.lang.ObjectUtil;
import javax.annotation.Nonnull;
import org.jetbrains.concurrency.CancellablePromise;
import javax.annotation.Nullable;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
class CodeStyleCachedValueProvider implements CachedValueProvider<CodeStyleSettings> {
private final static Logger LOG = Logger.getInstance(CodeStyleCachedValueProvider.class);
private final static int MAX_COMPUTATION_THREADS = 10;
private final
@Nonnull
WeakReference<PsiFile> myFileRef;
private final
@Nonnull
AsyncComputation myComputation;
private final
@Nonnull
Lock myComputationLock = new ReentrantLock();
private final static ExecutorService ourExecutorService = AppExecutorUtil.createBoundedApplicationPoolExecutor("CodeStyleCachedValueProvider", MAX_COMPUTATION_THREADS);
CodeStyleCachedValueProvider(@Nonnull PsiFile file) {
myFileRef = new WeakReference<>(file);
myComputation = new AsyncComputation();
}
boolean isExpired() {
return myFileRef.get() == null || myComputation.isExpired();
}
CodeStyleSettings tryGetSettings() {
try {
final PsiFile file = getReferencedPsi();
if (myComputationLock.tryLock()) {
try {
return CachedValuesManager.getCachedValue(file, this);
}
finally {
myComputationLock.unlock();
}
}
else {
return null;
}
}
catch (OutdatedFileReferenceException e) {
LOG.error(e);
return null;
}
}
void scheduleWhenComputed(@Nonnull Runnable runnable) {
myComputation.schedule(runnable);
}
@Nullable
@Override
public Result<CodeStyleSettings> compute() {
CodeStyleSettings settings = myComputation.getCurrResult();
if (settings != null) {
logCached(getReferencedPsi(), settings);
return new Result<>(settings, getDependencies(settings, myComputation));
}
return null;
}
public void cancelComputation() {
myComputation.cancel();
}
@Nonnull
Object[] getDependencies(@Nonnull CodeStyleSettings settings, @Nonnull AsyncComputation computation) {
List<Object> dependencies = new ArrayList<>();
if (settings instanceof TransientCodeStyleSettings) {
dependencies.addAll(((TransientCodeStyleSettings)settings).getDependencies());
}
else {
dependencies.add(settings.getModificationTracker());
}
dependencies.add(computation.getTracker());
return ArrayUtil.toObjectArray(dependencies);
}
private static void logCached(@Nonnull PsiFile file, @Nonnull CodeStyleSettings settings) {
LOG.debug(String.format("File: %s (%s), cached: %s, tracker: %d", file.getName(), Integer.toHexString(file.hashCode()), settings, settings.getModificationTracker().getModificationCount()));
}
/**
* Always contains some result which can be obtained by {@code getCurrResult()} method. Listeners are notified after
* the computation is finished and {@code getCurrResult()} contains a stable computed value.
*/
private final class AsyncComputation {
private final AtomicBoolean myIsActive = new AtomicBoolean();
private volatile CodeStyleSettings myCurrResult;
private final
@Nonnull
CodeStyleSettingsManager mySettingsManager;
private final SimpleModificationTracker myTracker = new SimpleModificationTracker();
private final Project myProject;
private CancellablePromise<Void> myPromise;
private final List<Runnable> myScheduledRunnables = new ArrayList<>();
private AsyncComputation() {
myProject = getReferencedPsi().getProject();
mySettingsManager = CodeStyleSettingsManager.getInstance(myProject);
//noinspection deprecation
myCurrResult = mySettingsManager.getCurrentSettings();
}
private void start() {
if (isRunOnBackground()) {
myPromise = ReadAction.nonBlocking(() -> computeSettings()).expireWith(myProject).expireWhen(() -> myFileRef.get() == null)
.finishOnUiThread(ModalityState.NON_MODAL, val -> notifyCachedValueComputed()).submit(ourExecutorService);
}
else {
ReadAction.run((() -> computeSettings()));
notifyOnEdt();
}
}
public void cancel() {
if (myPromise != null && !myPromise.isDone()) {
myPromise.cancel();
}
myCurrResult = null;
}
public boolean isExpired() {
return myCurrResult == null;
}
private void schedule(@Nonnull Runnable runnable) {
if (myIsActive.get()) {
myScheduledRunnables.add(runnable);
}
else {
runnable.run();
}
}
private boolean isRunOnBackground() {
final Application application = ApplicationManager.getApplication();
return !application.isUnitTestMode() && !application.isHeadlessEnvironment() && application.isDispatchThread();
}
private void notifyOnEdt() {
final Application application = ApplicationManager.getApplication();
if (application.isDispatchThread()) {
notifyCachedValueComputed();
}
else {
application.invokeLater(() -> notifyCachedValueComputed(), ModalityState.any());
}
}
private void computeSettings() {
final PsiFile file = myFileRef.get();
if (file == null) {
LOG.warn("PSI file has expired, cancelling computation");
cancel();
return;
}
try {
myComputationLock.lock();
if (LOG.isDebugEnabled()) {
LOG.debug("Computation started for " + file.getName());
}
@SuppressWarnings("deprecation") CodeStyleSettings currSettings = mySettingsManager.getCurrentSettings();
if (currSettings != mySettingsManager.getTemporarySettings()) {
TransientCodeStyleSettings modifiableSettings = new TransientCodeStyleSettings(file, currSettings);
modifiableSettings.applyIndentOptionsFromProviders(file);
for (CodeStyleSettingsModifier modifier : CodeStyleSettingsModifier.EP_NAME.getExtensionList()) {
if (modifier.modifySettings(modifiableSettings, file)) {
LOG.debug("Modifier: " + modifier.getClass().getName());
modifiableSettings.setModifier(modifier);
currSettings = modifiableSettings;
break;
}
}
}
myCurrResult = currSettings;
myTracker.incModificationCount();
if (LOG.isDebugEnabled()) {
LOG.debug("Computation ended for " + file.getName());
}
}
finally {
myComputationLock.unlock();
}
}
@Nullable
public CodeStyleSettings getCurrResult() {
if (myIsActive.compareAndSet(false, true)) {
start();
}
return myCurrResult;
}
private SimpleModificationTracker getTracker() {
return myTracker;
}
void reset() {
myScheduledRunnables.clear();
myIsActive.set(false);
}
private void notifyCachedValueComputed() {
for (Runnable runnable : myScheduledRunnables) {
runnable.run();
}
if (!myProject.isDisposed()) {
ObjectUtil.consumeIfNotNull(myFileRef.get(), file -> {
final CodeStyleSettingsManager settingsManager = CodeStyleSettingsManager.getInstance(myProject);
settingsManager.fireCodeStyleSettingsChanged(file);
});
}
myComputation.reset();
}
}
@Nonnull
private PsiFile getReferencedPsi() {
PsiFile file = myFileRef.get();
if (file == null) {
throw new OutdatedFileReferenceException();
}
return file;
}
//
// Check provider equivalence by file ref. Other fields make no sense since AsyncComputation is a stateful object
// whose state (active=true->false) changes over time due to long computation.
//
@Override
public boolean equals(Object obj) {
return obj instanceof CodeStyleCachedValueProvider && Objects.equals(this.myFileRef.get(), ((CodeStyleCachedValueProvider)obj).myFileRef.get());
}
static class OutdatedFileReferenceException extends RuntimeException {
OutdatedFileReferenceException() {
super("Outdated file reference used to obtain settings");
}
}
}
|
{
"content_hash": "6051b252095c80d187d15d66609091cd",
"timestamp": "",
"source": "github",
"line_count": 276,
"max_line_length": 193,
"avg_line_length": 34.210144927536234,
"alnum_prop": 0.7037703876297394,
"repo_name": "consulo/consulo",
"id": "82122aaf40cf23630ebc179783d538339d998840",
"size": "9442",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/base/lang-impl/src/main/java/com/intellij/application/options/codeStyle/cache/CodeStyleCachedValueProvider.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "299"
},
{
"name": "C",
"bytes": "52718"
},
{
"name": "C++",
"bytes": "72795"
},
{
"name": "CMake",
"bytes": "854"
},
{
"name": "CSS",
"bytes": "64655"
},
{
"name": "Groovy",
"bytes": "36006"
},
{
"name": "HTML",
"bytes": "173780"
},
{
"name": "Java",
"bytes": "64026758"
},
{
"name": "Lex",
"bytes": "5909"
},
{
"name": "Objective-C",
"bytes": "23787"
},
{
"name": "Python",
"bytes": "3276"
},
{
"name": "SCSS",
"bytes": "9782"
},
{
"name": "Shell",
"bytes": "5689"
},
{
"name": "Thrift",
"bytes": "1216"
},
{
"name": "XSLT",
"bytes": "49230"
}
],
"symlink_target": ""
}
|
'use strict';
const AddressLookup = require('../../../components').addressLookup;
module.exports = config => ({
steps: {
'/address-default-one': {
behaviours: AddressLookup({
addressKey: 'address-one',
apiSettings: {
hostname: `http://localhost:${config.port}/api/postcode-test`
},
validate: {
allowedCountries: ['England']
}
}),
next: '/address-default-two'
},
'/address-default-two': {}
}
});
|
{
"content_hash": "6cd1f5aa03fafde21ff1473221f2d7ca",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 71,
"avg_line_length": 23.38095238095238,
"alnum_prop": 0.5437881873727087,
"repo_name": "UKHomeOfficeForms/hof",
"id": "eedb8f99e3aa9083fd234032a9080c926b463d5e",
"size": "491",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/functional-tests/apps/address-lookup-default.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Gherkin",
"bytes": "11222"
},
{
"name": "HTML",
"bytes": "54332"
},
{
"name": "JavaScript",
"bytes": "720812"
},
{
"name": "SCSS",
"bytes": "38558"
}
],
"symlink_target": ""
}
|
import { Decorators } from "../../Decorators";
import { TemplatedWidget } from "../Widgets/TemplatedWidget";
import { FilterStore } from "./FilterStore";
@Decorators.registerClass('Serenity.FilterWidgetBase')
export class FilterWidgetBase<TOptions> extends TemplatedWidget<TOptions> {
private store: FilterStore;
private onFilterStoreChanged: () => void;
constructor(div: JQuery, opt?: TOptions) {
super(div, opt);
this.store = new FilterStore([]);
this.onFilterStoreChanged = () => this.filterStoreChanged();
this.store.add_changed(this.onFilterStoreChanged);
}
destroy() {
if (this.store) {
this.store.remove_changed(this.onFilterStoreChanged);
this.onFilterStoreChanged = null;
this.store = null;
}
super.destroy();
}
protected filterStoreChanged() {
}
get_store(): FilterStore {
return this.store;
}
set_store(value: FilterStore): void {
if (this.store !== value) {
if (this.store != null)
this.store.remove_changed(this.onFilterStoreChanged);
this.store = value || new FilterStore([]);
this.store.add_changed(this.onFilterStoreChanged);
this.filterStoreChanged();
}
}
}
|
{
"content_hash": "2dcacc121bdeab7be012ebe5af227ef1",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 75,
"avg_line_length": 28.95744680851064,
"alnum_prop": 0.59147685525349,
"repo_name": "volkanceylan/Serenity",
"id": "4cdf8a47ea8f4054836594b1c646e3fc5b32f56e",
"size": "1363",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Serenity.Scripts/CoreLib/UI/Filtering/FilterWidgetBase.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1592"
},
{
"name": "C#",
"bytes": "3328213"
},
{
"name": "CSS",
"bytes": "198506"
},
{
"name": "HTML",
"bytes": "2818"
},
{
"name": "JavaScript",
"bytes": "638940"
},
{
"name": "Roff",
"bytes": "11586"
},
{
"name": "Shell",
"bytes": "287"
},
{
"name": "Smalltalk",
"bytes": "290"
},
{
"name": "TSQL",
"bytes": "1592"
},
{
"name": "TypeScript",
"bytes": "804757"
},
{
"name": "XSLT",
"bytes": "17702"
}
],
"symlink_target": ""
}
|
using System;
using System.Collections.Generic;
using System.Text;
namespace Surging.Core.CPlatform.Support
{
public class ServiceInvokeListenInfo
{
/// <summary>
/// 远程调用请求数
/// </summary>
public int? RemoteServiceRequests { get; set; }
/// <summary>
/// 本地调用请求数
/// </summary>
public int LocalServiceRequests { get; set; }
/// <summary>
/// 首次调用时间
/// </summary>
public DateTime FirstInvokeTime { get; set; }
/// <summary>
/// 最后一次远程调用时间
/// </summary>
public DateTime FinalRemoteInvokeTime { get; set; }
/// <summary>
/// 失败调用请求数
/// </summary>
public int FaultRemoteServiceRequests { get; set; }
/// <summary>
/// 距上次失败调用次数
/// </summary>
public int SinceFaultRemoteServiceRequests { get; set; }
/// <summary>
/// 并发数
/// </summary>
public int ConcurrentRequests { get; set; }
}
}
|
{
"content_hash": "e9175da2ddbed2976be8f3d4fae90aad",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 64,
"avg_line_length": 23.906976744186046,
"alnum_prop": 0.52431906614786,
"repo_name": "Damon-Liu/surging",
"id": "295e165c991272072571d025440e1a509348501c",
"size": "1128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Surging.Core/Surging.Core.CPlatform/Support/ServiceInvokeListenInfo.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "833376"
},
{
"name": "CSS",
"bytes": "214943"
},
{
"name": "JavaScript",
"bytes": "82937"
},
{
"name": "PowerShell",
"bytes": "468"
},
{
"name": "Smarty",
"bytes": "8169"
}
],
"symlink_target": ""
}
|
TM1py.Objects package
=====================
Submodules
----------
TM1py.Objects.Annotation module
-------------------------------
.. automodule:: TM1py.Objects.Annotation
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.Application module
--------------------------------
.. automodule:: TM1py.Objects.Application
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.Axis module
-------------------------
.. automodule:: TM1py.Objects.Axis
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.Chore module
--------------------------
.. automodule:: TM1py.Objects.Chore
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.ChoreFrequency module
-----------------------------------
.. automodule:: TM1py.Objects.ChoreFrequency
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.ChoreStartTime module
-----------------------------------
.. automodule:: TM1py.Objects.ChoreStartTime
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.ChoreTask module
------------------------------
.. automodule:: TM1py.Objects.ChoreTask
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.Cube module
-------------------------
.. automodule:: TM1py.Objects.Cube
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.Dimension module
------------------------------
.. automodule:: TM1py.Objects.Dimension
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.Element module
----------------------------
.. automodule:: TM1py.Objects.Element
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.ElementAttribute module
-------------------------------------
.. automodule:: TM1py.Objects.ElementAttribute
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.Hierarchy module
------------------------------
.. automodule:: TM1py.Objects.Hierarchy
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.MDXView module
----------------------------
.. automodule:: TM1py.Objects.MDXView
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.NativeView module
-------------------------------
.. automodule:: TM1py.Objects.NativeView
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.Process module
----------------------------
.. automodule:: TM1py.Objects.Process
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.Rules module
--------------------------
.. automodule:: TM1py.Objects.Rules
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.Server module
---------------------------
.. automodule:: TM1py.Objects.Server
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.Subset module
---------------------------
.. automodule:: TM1py.Objects.Subset
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.TM1Object module
------------------------------
.. automodule:: TM1py.Objects.TM1Object
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.User module
-------------------------
.. automodule:: TM1py.Objects.User
:members:
:undoc-members:
:show-inheritance:
TM1py.Objects.View module
-------------------------
.. automodule:: TM1py.Objects.View
:members:
:undoc-members:
:show-inheritance:
Module contents
---------------
.. automodule:: TM1py.Objects
:members:
:undoc-members:
:show-inheritance:
|
{
"content_hash": "c24170d0e9766786c1c141e961afb3c1",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 46,
"avg_line_length": 19.302197802197803,
"alnum_prop": 0.5673213777398235,
"repo_name": "MariusWirtz/TM1py",
"id": "df82006436e4eddb5c19573e167e467e2dc7d0b2",
"size": "3513",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docs/source/TM1py.Objects.rst",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "476532"
}
],
"symlink_target": ""
}
|
- v1.8.42 - (18-11-2022) Bounding box props, tfParams.boxW and tfParams.boxH added.
- v1.8.41 - (14-11-2022) imagemin post hook added.
- v1.8.40 - (23-10-2022) Photoshop 2023 support added.
- v1.8.39 - (06-10-2022) Alt prop needs to be explicitly set now.
- v1.8.38 - (24-02-2022) Accept reg:x123y123 format
- v1.8.37 - (22-02-2022) Removed alert
- v1.8.36 - (22-02-2022) Matte color support for jpgs and pngs added
- v1.8.35 - (01-12-2021) Photoshop 2022 support added
- v1.8.34 - (14-10-2021) Added getGlobalProp()
- v1.8.33 - (27-09-2021) Photoshop 2021 support added
- v1.8.29 - (06-05-2021) Parsing line break characters for text input
- v1.8.28 - (18-02-2021) Dupe bug debugging
- v1.8.27 - (14-01-2021) Debugging dupe bug
- v1.8.26 - (10-09-2020) Removed debugging output
- v1.8.25 - (10-09-2020) Support for Photoshop 2020 added
- v1.8.24 - (03-07-2020) UI palettes hidden during publishing for perfomance.
- v1.8.23 - (27-02-2020) Added `postExecutePath` prop and docs.
- v1.8.22 - (26-02-2020) Added `makeDir` property and documentation.
- v1.8.21 - (10-02-2020) Added `prefix` and `suffix` string props.
- v1.8.20 - (15-01-2020) Added pubtime prop, a timestamp of the current time.
- v1.8.19 - (06-12-2019) Command line environment variables added.
- v1.8.18 - (29-11-2019) Textfield visible bounds added.
- v1.8.17 - (28-11-2019) Properly escaping special chars in JSON parsing.
- v1.8.16 - (28-11-2019) Added `rect` type with placeholder output.
- v1.8.15 - (21-11-2019) Fixed obj prop fallback output to be a blank string.
- v1.8.14 - (21-11-2019) Fixed tf output value factor bug.
- v1.8.13 - (19-11-2019) Added the ability for obj props with default sub-props and dot notatation access from template.
- v1.8.12 - (18-11-2019) Child (nested) items will be outputted with x,y relative to parent.
- v1.8.11 - (15-11-2019) Testing publish script
- v1.8.10 - (15-11-2019) Updated publish script.
- v1.8.9 - Updated publishing script.
- v1.8.8 - `type` set to `img` for non placeholders. `src` set to '' for placeholders.
- v1.8.7 - Custom parse JSX support for templates added.
- v1.8.6 - `flags` prop added, shorthand prop definition added.
- v1.8.5 - Clone PSD name bugfix, layers to comp bounds bugfix.
- v1.8.4 - Major update. Updated local mod version of `photoshop.invoke(fn, ..)` to accept jsx strings as source. Removed legacy `findandreplace` utility command. Added JSX hook functionality. Overhauled layer comp naming convention, added `nestlevel`, `parent`, `type`, `tfParams` functionality and documentation. Added `layers-to-comps` pre JSX script.
- v1.8.3 - Removed legacy `makecomps` utility command. This functionality is better served by a standalone `jsx`.
- v1.8.2 - Cleaning up dependencies and legacy code.
- v1.8.1 - Removed legacy `flatten` utility command. Removed legacy `sel` utility command. Removed legacy `dry` utility command. Removed legacy `optimize` property. This functionality is better served by a standalone `jsx`.
- v1.8.0 - Added `wipeRelativePath`
- v1.7.9 - Working doc is duplicated so original doc is untouched by processing, `relativePath` now supports dynamic props, such as `%psdBase%`
- v1.7.8 - Added `psdWidth` and `psdHeight` var constants.
- v1.7.7 - Support for `Photoshop 2019` added
- v1.7.5 - Simplified command arg parsing to fix `-` prefixed params.
- v1.7.4 - Hard coded edit to possibly abandoned dependency `photoshop` v0.5.2 to include support for `Photoshop 2018` in `photoshop-0.5.2-edit/photoshop/lib/photoshop-eval.js`
- v1.7.3 - Fixed typo in documentation
- v1.7.2 - Extended AppleScript timeout in hardcoded dependency `photoshop` to 10 mins
- v1.7.1 - Hard coded edit to possibly abandoned dependency `photoshop` v0.5.2 to include support for `Photoshop 2017`
- v1.7.0 - Updated dependencies
- v1.6.8 - Documentation update
- v1.6.8 - Multi template support, |reverseOrder| {choppy} comp var |tlX| and |tlY| output props added. Made '.choppy' config file and |basePath| optional (defaults to "./")
- v1.6.7 - %base% prop incorporates size file handle
- v1.6.6 - Allow text suffix of {reg} layer
- v1.6.4 - Reg str bugfix
- v1.6.3 - Coord support for |reg| property
- v1.6.2 - |placeholder| export
- v1.6.1 - Select mode bugfix
- v1.5.8 - |findandreplace| utility
- v1.5.7 - Added |outputOriginX|, |outputOriginY| and |outputOriginLayer| props
- v1.5.5 - Added |boundsComp| feature. Bounds caching introduced.
- v1.5.4 - |roundOutputValues| added. Supporting new line 't' for inline defined templates
- v1.5.3 - Supporting new line 'n' for inline defined templates
- v1.5.2 - Added width and height params to force output dimensions
- v1.5.1 - Added 'makecomps' utility command, ignore backtick prefixed layers and comps
- v1.5.0 - Added 'flatten' utility command
- v1.4.9 - Active doc bugfix
- v1.4.8 - Relative psd path bugfix
- v1.4.5 - Relative psd paths accepted
- v1.4.4 - 'Verbose' arg added, provide PSD paths in command line args. Optimise bugfix.
- v1.4.3 - Fixed transform warning when reporting active psd
- v1.3.4 - Additional var |psdBase|. Enabled var swap out for |outputFilePath| prop. Template
header and footer has props swapped out with core config data.
- v1.3.3 - Fixed dry run with scale bug. Applied scale to reg pt values. Addded
|outputValueFactor| prop.
- v1.3.2 - Scale is applied to reg pts and x,y position
- v1.3.2 - Reg point bugfix
- v1.3.1 - Reg point support through {reg} layer and |reg| layer property
- v1.3.0 - Multiple size output support
- v1.2.2 - Scale outputting
- v1.2.0 - Major update with support for cropping layers nested in sets and masks.
Visible bounds calculation bugfix.
- v1.1.3 - Fix crop bounds to within doc
- v1.1.2 - More descriptive error messages
- v1.1.1 - Width result bugfix
- v1.1.0 - Dependency fix
- v1.0.8 - Sel command line arg to output selected comps only, multiple layer bounds bugfix
- v1.0.7 - Image optimisation added (for Mac)
- v1.0.6 - Export gifs with matte and color options
- v1.0.5 - Dry run mode
- v1.0.4 – Injecting into output file tags
- v1.0.3 – Added template fields |x| and |y|
|
{
"content_hash": "2416a5ca7b1bdb74dec12003e49b2285",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 355,
"avg_line_length": 66.47252747252747,
"alnum_prop": 0.7244172590510828,
"repo_name": "loksland/choppy",
"id": "64a4dd6212b9e7df5d5b149f123dd543c0ac59a8",
"size": "6066",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "CHANGELOG.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "65"
},
{
"name": "JavaScript",
"bytes": "151327"
},
{
"name": "Shell",
"bytes": "3952"
}
],
"symlink_target": ""
}
|
status: publish
tags:
- mozilla
- osl
- osx
- pdf
- pdftk
- tech
published: true
title: pdftk 1.41 for Mac OS X 10.6
type: post
meta:
aktt_tweeted: "1"
_edit_last: "2"
aktt_notify_twitter: "yes"
layout: post
---
<strong>Update:</strong> The author of pdftk, Sid Steward, left the following comment:
<blockquote>
A new version of pdftk is available (1.43) that fixes many bugs. This release also features an installer [for] OS X 10.6. Please visit to learn more and download: <a href="http://www.pdflabs.com">www.pdflabs.com</a>.
</blockquote>
This blog post will stick around for the time being, but I (the author of this blog) advise you to always run the latest version so that you can enjoy the latest bug fixes.
<em>OS X Leopard users:</em> Sorry, neither this version nor the installer offered on pdflabs.com works on OS X before 10.6. You might be able to compile from source though. Let us know if you are successful.
<hr />
Due to my being a remote employee, I get to juggle with PDF files quite a bit. A great tool for common PDF manipulations (changing page order, combining files, rotating pages etc) has proven to be <a href="http://www.pdfhacks.com/pdftk/">pdftk</a>. Sadly, a current version for Mac OS X is not available on their homepage. In addition, it is annoying (to say the least) to compile, which is why all three third-party package management systems that I know of (MacPorts, fink, as well as homebrew), last time I checked, did not have it at all, or their versions were broken.
Now I wouldn't be a geek if that kept me from compiling it myself. I took some hints from <a href="http://anoved.net/2007/11/pdftk-141-for-intel-macs/">anoved.net</a> who was nice enough to also provide a compiled binary, but sadly did not include the shared libraries it relies on.
Instead, I made an installer package that'll install pdftk itself as well as the handful of libraries you need into <code>/usr/local</code>. Once you ran this, you can open Terminal.app, and typing <code>pdftk</code> should greet you as follows:
<pre><code>$ pdftk
SYNOPSIS
pdftk <input PDF files | - | PROMPT>
[input_pw <input PDF owner passwords | PROMPT>]
[<operation> <operation arguments>]
[output <output filename | - | PROMPT>]
[encrypt_40bit | encrypt_128bit]
(...)
</code></pre>
You can download the <ins datetime="2010-03-18T16:51:12+00:00">updated</ins> package here: <strong><a href="http://fredericiana.com/downloads/pdftk1.41_OSX10.6.dmg">pdftk1.41_OSX10.6.dmg</a></strong>
(MD5 hash: <code>ea945c606b356305834edc651ddb893d</code>)
I only tested it on OS X 10.6.2, if you use it on older versions, please let me know in the comments if it worked.
|
{
"content_hash": "568b21bd68fd0cc3c53f49a034b357f8",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 573,
"avg_line_length": 59.148936170212764,
"alnum_prop": 0.7190647482014388,
"repo_name": "fwenzel/fredericiana",
"id": "197ea9d680f203c819e56ba1102442303a6e77e2",
"size": "2785",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "site/_posts/2010-03-01-pdftk-1-41-for-mac-os-x-10-6.md",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "114123"
},
{
"name": "HTML",
"bytes": "22292"
},
{
"name": "JavaScript",
"bytes": "30332"
},
{
"name": "Ruby",
"bytes": "8505"
}
],
"symlink_target": ""
}
|
package com.evolveum.midpoint.repo.sql.data.common.any;
import com.evolveum.midpoint.repo.sql.data.common.id.RAExtDateId;
import com.evolveum.midpoint.repo.sql.data.common.type.RAssignmentExtensionType;
import com.evolveum.midpoint.repo.sql.query2.definition.NotQueryable;
import com.evolveum.midpoint.repo.sql.util.RUtil;
import org.hibernate.annotations.ForeignKey;
import org.hibernate.annotations.Index;
import javax.persistence.*;
import java.sql.Timestamp;
/**
* @author lazyman
*/
@Entity
@IdClass(RAExtDateId.class)
@Table(name = "m_assignment_ext_date")
@org.hibernate.annotations.Table(appliesTo = "m_assignment_ext_date",
indexes = {@Index(name = "iAExtensionDate", columnNames = {"extensionType", "eName", "dateValue"})})
public class RAExtDate implements RAExtValue {
//owner entity
private RAssignmentExtension anyContainer;
private String ownerOid;
private Integer ownerId;
private RAssignmentExtensionType extensionType;
private boolean dynamic;
private String name;
private String type;
private RValueType valueType;
private Timestamp value;
public RAExtDate() {
}
public RAExtDate(Timestamp value) {
this.value = value;
}
@ForeignKey(name = "fk_assignment_ext_date")
@MapsId("owner")
@ManyToOne(fetch = FetchType.LAZY)
@PrimaryKeyJoinColumns({
@PrimaryKeyJoinColumn(name = "anyContainer_owner_owner_oid", referencedColumnName = "ownerOid"),
@PrimaryKeyJoinColumn(name = "anyContainer_owner_id", referencedColumnName = "owner_type")
})
@NotQueryable
public RAssignmentExtension getAnyContainer() {
return anyContainer;
}
@Id
@Column(name = "anyContainer_owner_owner_oid", length = RUtil.COLUMN_LENGTH_OID)
@NotQueryable
public String getOwnerOid() {
if (ownerOid == null && anyContainer != null) {
ownerOid = anyContainer.getOwnerOid();
}
return ownerOid;
}
@Id
@Column(name = "anyContainer_owner_id")
@NotQueryable
public Integer getOwnerId() {
if (ownerId == null && anyContainer != null) {
ownerId = anyContainer.getOwnerId();
}
return ownerId;
}
@Id
@Enumerated(EnumType.ORDINAL)
public RAssignmentExtensionType getExtensionType() {
return extensionType;
}
@Id
@Column(name = "eName", length = RUtil.COLUMN_LENGTH_QNAME)
public String getName() {
return name;
}
@Column(name = "eType", length = RUtil.COLUMN_LENGTH_QNAME)
public String getType() {
return type;
}
@Enumerated(EnumType.ORDINAL)
public RValueType getValueType() {
return valueType;
}
/**
* @return true if this property has dynamic definition
*/
@Column(name = "dynamicDef")
public boolean isDynamic() {
return dynamic;
}
@Column(name = "dateValue")
public Timestamp getValue() {
return value;
}
public void setValue(Timestamp value) {
this.value = value;
}
public void setValueType(RValueType valueType) {
this.valueType = valueType;
}
public void setName(String name) {
this.name = name;
}
public void setType(String type) {
this.type = type;
}
public void setDynamic(boolean dynamic) {
this.dynamic = dynamic;
}
public void setAnyContainer(RAssignmentExtension anyContainer) {
this.anyContainer = anyContainer;
}
public void setOwnerOid(String ownerOid) {
this.ownerOid = ownerOid;
}
public void setOwnerId(Integer ownerId) {
this.ownerId = ownerId;
}
public void setExtensionType(RAssignmentExtensionType extensionType) {
this.extensionType = extensionType;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
RAExtDate that = (RAExtDate) o;
if (dynamic != that.dynamic) return false;
if (name != null ? !name.equals(that.name) : that.name != null) return false;
if (type != null ? !type.equals(that.type) : that.type != null) return false;
if (valueType != that.valueType) return false;
if (value != null ? !value.equals(that.value) : that.value != null) return false;
return true;
}
@Override
public int hashCode() {
int result = (dynamic ? 1 : 0);
result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + (type != null ? type.hashCode() : 0);
result = 31 * result + (valueType != null ? valueType.hashCode() : 0);
result = 31 * result + (value != null ? value.hashCode() : 0);
return result;
}
}
|
{
"content_hash": "2d5e6faf3f0ad0553df3486a361cb58b",
"timestamp": "",
"source": "github",
"line_count": 174,
"max_line_length": 108,
"avg_line_length": 27.64367816091954,
"alnum_prop": 0.6384615384615384,
"repo_name": "PetrGasparik/midpoint",
"id": "7624a803022829bc15ef6684ea2a08eb63cf1d67",
"size": "5410",
"binary": false,
"copies": "3",
"ref": "refs/heads/CAS-auth",
"path": "repo/repo-sql-impl/src/main/java/com/evolveum/midpoint/repo/sql/data/common/any/RAExtDate.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "321145"
},
{
"name": "CSS",
"bytes": "234702"
},
{
"name": "HTML",
"bytes": "651627"
},
{
"name": "Java",
"bytes": "24107826"
},
{
"name": "JavaScript",
"bytes": "17224"
},
{
"name": "PLSQL",
"bytes": "2171"
},
{
"name": "PLpgSQL",
"bytes": "8169"
},
{
"name": "Shell",
"bytes": "390442"
}
],
"symlink_target": ""
}
|
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
{
"content_hash": "b074615af995e891af6b790d16304a27",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "6d139dc6e41cff94a94f1076acfb2a2147a76546",
"size": "180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Asterales/Asteraceae/Lourteigia morenoi/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
ACCEPTED
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null
|
{
"content_hash": "01d398051c92aa059de93e37f615174e",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.307692307692308,
"alnum_prop": 0.6940298507462687,
"repo_name": "mdoering/backbone",
"id": "fb4a642f22c9b7def12b9b46e974603ceb58b2b8",
"size": "211",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Bryophyta/Bryopsida/Hypnales/Fabroniaceae/Helicodontium/Helicodontium jacobi-felicis/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_151) on Wed Jul 17 09:40:07 MST 2019 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Interface org.wildfly.swarm.config.mail.mail_session.CustomSupplier (BOM: * : All 2.4.1.Final-SNAPSHOT API)</title>
<meta name="date" content="2019-07-17">
<link rel="stylesheet" type="text/css" href="../../../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Interface org.wildfly.swarm.config.mail.mail_session.CustomSupplier (BOM: * : All 2.4.1.Final-SNAPSHOT API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../org/wildfly/swarm/config/mail/mail_session/CustomSupplier.html" title="interface in org.wildfly.swarm.config.mail.mail_session">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">Thorntail API, 2.4.1.Final-SNAPSHOT</div>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?org/wildfly/swarm/config/mail/mail_session/class-use/CustomSupplier.html" target="_top">Frames</a></li>
<li><a href="CustomSupplier.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Interface org.wildfly.swarm.config.mail.mail_session.CustomSupplier" class="title">Uses of Interface<br>org.wildfly.swarm.config.mail.mail_session.CustomSupplier</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../../org/wildfly/swarm/config/mail/mail_session/CustomSupplier.html" title="interface in org.wildfly.swarm.config.mail.mail_session">CustomSupplier</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.wildfly.swarm.config.mail">org.wildfly.swarm.config.mail</a></td>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.wildfly.swarm.config.mail">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../../org/wildfly/swarm/config/mail/mail_session/CustomSupplier.html" title="interface in org.wildfly.swarm.config.mail.mail_session">CustomSupplier</a> in <a href="../../../../../../../org/wildfly/swarm/config/mail/package-summary.html">org.wildfly.swarm.config.mail</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../../org/wildfly/swarm/config/mail/package-summary.html">org.wildfly.swarm.config.mail</a> with parameters of type <a href="../../../../../../../org/wildfly/swarm/config/mail/mail_session/CustomSupplier.html" title="interface in org.wildfly.swarm.config.mail.mail_session">CustomSupplier</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code><a href="../../../../../../../org/wildfly/swarm/config/mail/MailSession.html" title="type parameter in MailSession">T</a></code></td>
<td class="colLast"><span class="typeNameLabel">MailSession.</span><code><span class="memberNameLink"><a href="../../../../../../../org/wildfly/swarm/config/mail/MailSession.html#custom-org.wildfly.swarm.config.mail.mail_session.CustomSupplier-">custom</a></span>(<a href="../../../../../../../org/wildfly/swarm/config/mail/mail_session/CustomSupplier.html" title="interface in org.wildfly.swarm.config.mail.mail_session">CustomSupplier</a> supplier)</code>
<div class="block">Install a supplied Custom object to the list of subresources</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../../org/wildfly/swarm/config/mail/mail_session/CustomSupplier.html" title="interface in org.wildfly.swarm.config.mail.mail_session">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../../../../../../../overview-tree.html">Tree</a></li>
<li><a href="../../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../../help-doc.html">Help</a></li>
</ul>
<div class="aboutLanguage">Thorntail API, 2.4.1.Final-SNAPSHOT</div>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../../index.html?org/wildfly/swarm/config/mail/mail_session/class-use/CustomSupplier.html" target="_top">Frames</a></li>
<li><a href="CustomSupplier.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2019 <a href="http://www.jboss.org">JBoss by Red Hat</a>. All rights reserved.</small></p>
</body>
</html>
|
{
"content_hash": "5a5df07b829fecc57d6bf088f6d243ce",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 462,
"avg_line_length": 45.38823529411765,
"alnum_prop": 0.6377656817003629,
"repo_name": "wildfly-swarm/wildfly-swarm-javadocs",
"id": "cb0f80c3b2ae4d269eef0a77f006f7f2fc006e21",
"size": "7716",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "2.4.1.Final-SNAPSHOT/apidocs/org/wildfly/swarm/config/mail/mail_session/class-use/CustomSupplier.html",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
#pragma once
#include <spdlog/common.h>
#include <spdlog/spdlog.h>
#include <spdlog/async_logger.h>
#include <spdlog/async.h>
#include <spdlog/fmt/fmt.h>
// custom sinks
#include "spdlog/sinks/date_and_hour_file_sink.h"
//#include "spdlog/sinks/rotating_file_with_date_sink.h"
#include "base/AFMacros.hpp"
#include "base/container/AFMap.hpp"
#include "base/AFXml.hpp"
#ifndef ARK_PLATFORM_WIN
#include <execinfo.h>
#endif
namespace ark {
// Get the basename of __FILE__ (at compile time if possible)
#if FMT_HAS_FEATURE(__builtin_strrchr)
#define SPDLOG_STRRCHR(str, sep) __builtin_strrchr(str, sep)
#else
#define SPDLOG_STRRCHR(str, sep) strrchr(str, sep)
#endif //__builtin_strrchr not defined
#ifndef SPDLOG_FILE_BASENAME
#ifdef PLATFORM_WIN
#define SPDLOG_FILE_BASENAME(file) SPDLOG_STRRCHR("\\" file, '\\') + 1
#else
#define SPDLOG_FILE_BASENAME(file) SPDLOG_STRRCHR("/" file, '/') + 1
#endif
#endif // SPDLOG_FILE_BASENAME
//SPDLOG_TRACE();
#define ARK_LOG_TRACE(my_fmt, ...) \
AFLogger::instance()->LogT( \
spdlog::source_loc{SPDLOG_FILE_BASENAME(__FILE__), __LINE__, ""}, spdlog::level::trace, my_fmt, ##__VA_ARGS__)
#define ARK_LOG_DEBUG(my_fmt, ...) \
AFLogger::instance()->LogT( \
spdlog::source_loc{SPDLOG_FILE_BASENAME(__FILE__), __LINE__, ""}, spdlog::level::debug, my_fmt, ##__VA_ARGS__)
#define ARK_LOG_INFO(my_fmt, ...) \
AFLogger::instance()->LogT( \
spdlog::source_loc{SPDLOG_FILE_BASENAME(__FILE__), __LINE__, ""}, spdlog::level::info, my_fmt, ##__VA_ARGS__)
#define ARK_LOG_WARN(my_fmt, ...) \
AFLogger::instance()->LogT( \
spdlog::source_loc{SPDLOG_FILE_BASENAME(__FILE__), __LINE__, ""}, spdlog::level::warn, my_fmt, ##__VA_ARGS__)
#define ARK_LOG_ERROR(my_fmt, ...) \
AFLogger::instance()->LogT( \
spdlog::source_loc{SPDLOG_FILE_BASENAME(__FILE__), __LINE__, ""}, spdlog::level::err, my_fmt, ##__VA_ARGS__)
#define ARK_LOG_CRITICAL(my_fmt, ...) \
AFLogger::instance()->LogT(spdlog::source_loc{SPDLOG_FILE_BASENAME(__FILE__), __LINE__, ""}, \
spdlog::level::critical, my_fmt, ##__VA_ARGS__);
#define ARK_LOG_OSS(name, content) AFLogger::instance()->LogOSS(spdlog::level::info, name, content)
enum class kLogSinkType : uint8_t
{
Date_and_hour_sink = 0, // default, rotate by hour with specific date path
Size_step_sink, // rotate by file size, unused for now
// others
};
class AFLogConfData final
{
public:
std::string output_path_{"./binlog"};
kLogSinkType sink_type_{kLogSinkType::Date_and_hour_sink};
uint16_t sink_queue_count_{1024};
bool consol_{true};
spdlog::level::level_enum level_{spdlog::level::trace};
std::string pattern_{};
};
class AFLogConf final
{
AFSmartPtrHashmap<std::string, AFLogConfData> conf_datas_;
std::string default_log_name_{"default"};
public:
void LoadLogConf(std::string const& filename)
{
AFXml xml(filename);
auto root_node = xml.GetRootNode();
for (auto logger_node = root_node.FindNode("logger"); logger_node.IsValid(); logger_node.NextNode())
{
// create AFLogConfData
auto conf_data = std::make_shared<AFLogConfData>();
auto name = logger_node.GetString("name");
conf_data->sink_queue_count_ = logger_node.GetUint32("queue_count");
conf_data->output_path_ = logger_node.GetString("path");
#ifdef ARK_RUN_MODE_DEBUG
auto mode_node = logger_node.FindNode("debug");
#else
auto mode_node = logger_node.FindNode("release");
#endif //ARK_RUN_MODE_DEBUG
conf_data->consol_ = mode_node.GetBool("console");
conf_data->level_ = static_cast<spdlog::level::level_enum>(mode_node.GetUint32("level"));
conf_data->pattern_ = mode_node.GetString("pattern");
conf_datas_.insert(name, conf_data);
}
}
std::shared_ptr<AFLogConfData> GetConfData(std::string const& name)
{
return conf_datas_.find_value(name);
}
std::string const& GetDefault() const
{
return default_log_name_;
}
};
class AFSpdlogger final
{
std::shared_ptr<AFLogConfData> log_conf_{nullptr};
std::string log_name_{};
std::shared_ptr<spdlog::logger> logger_{nullptr};
std::shared_ptr<spdlog::details::thread_pool> tp_{nullptr};
public:
explicit AFSpdlogger(std::shared_ptr<AFLogConfData> conf)
: log_conf_(conf)
{
}
~AFSpdlogger()
{
spdlog::drop(log_name_);
}
void Init(std::string const& filename)
{
log_name_ = ARK_FORMAT("{}{}{}", log_conf_->output_path_, spdlog::details::os::folder_sep, filename);
std::vector<spdlog::sink_ptr> sinks_vec;
#ifdef OPEN_FILE_LOG
switch (log_conf_->sink_type_)
{
case kLogSinkType::Date_and_hour_sink:
{
auto date_and_hour_sink = std::make_shared<spdlog::sinks::date_and_hour_file_sink_mt>(log_name_);
sinks_vec.emplace_back(date_and_hour_sink);
}
break;
default:
ARK_ASSERT_BREAK(0);
break;
}
#endif
if (log_conf_->consol_)
{
#ifdef ARK_PLATFORM_WIN
auto color_sink = std::make_shared<spdlog::sinks::wincolor_stdout_sink_mt>();
#else
auto color_sink = std::make_shared<spdlog::sinks::ansicolor_stdout_sink_mt>();
#endif // ARK_PLATFORM_WIN
sinks_vec.push_back(color_sink);
}
if (log_conf_->sink_queue_count_ > 0)
{
tp_ = std::make_shared<spdlog::details::thread_pool>(log_conf_->sink_queue_count_, 1);
logger_ =
std::make_shared<spdlog::async_logger>(log_name_, std::begin(sinks_vec), std::end(sinks_vec), tp_);
}
else
{
logger_ = std::make_shared<spdlog::logger>(log_name_, std::begin(sinks_vec), std::end(sinks_vec));
}
logger_->set_pattern(log_conf_->pattern_);
logger_->set_level(log_conf_->level_);
logger_->flush_on(log_conf_->level_);
spdlog::register_logger(logger_);
}
void Log(spdlog::level::level_enum level, std::string const& content)
{
logger_->log(level, content);
}
};
using LOG_FUNCTOR = std::function<void(spdlog::level::level_enum, std::string const&)>;
class AFLogger final : public AFSingleton<AFLogger>
{
using logger_t = std::shared_ptr<AFSpdlogger>;
// singleton
//inline static AFLogger* singleton_logger_{nullptr};
// default logger
logger_t default_logger_{nullptr};
// oss logger
logger_t oss_logger_{nullptr};
// created logger list
std::unordered_map<bus_id_t, logger_t> loggers_;
// log function
LOG_FUNCTOR log_func_;
// log configuration
std::unique_ptr<AFLogConf> log_conf_{nullptr};
public:
~AFLogger()
{
loggers_.clear();
//ARK_DELETE(AFLogger::singleton_logger_);
}
static void Init(AFLogger* logger)
{
if (logger == nullptr)
{
logger = ARK_NEW AFLogger();
}
// init singleton
AFSingleton<AFLogger>::SetInstance(logger);
}
bool InitLogger(std::string const& conf_file, std::string const& log_filename)
{
if (log_conf_ == nullptr)
{
log_conf_ = std::make_unique<AFLogConf>();
log_conf_->LoadLogConf(conf_file);
}
// init local logger
auto conf_data = log_conf_->GetConfData(log_conf_->GetDefault());
ARK_ASSERT_RET_VAL(conf_data != nullptr, false);
default_logger_ = std::make_shared<AFSpdlogger>(conf_data);
default_logger_->Init(log_filename);
// set log function for local logger
auto func = std::bind(&AFLogger::Log, this, std::placeholders::_1, std::placeholders::_2);
SetLogFunc(std::move(func));
// init oss logger
auto oss_conf_data = log_conf_->GetConfData("oss");
ARK_ASSERT_RET_VAL(oss_conf_data != nullptr, false);
oss_logger_ = std::make_shared<AFSpdlogger>(oss_conf_data);
oss_logger_->Init(log_filename);
return true;
}
logger_t CreateLogger(bus_id_t id, std::string const& name, std::string const& filename)
{
auto iter = loggers_.find(id);
if (iter != loggers_.end())
{
return iter->second;
}
// find log configuration
auto log_conf_data = log_conf_->GetConfData(name);
if (log_conf_data == nullptr)
{
return nullptr;
}
// create logger
auto new_logger = std::make_shared<AFSpdlogger>(log_conf_data);
new_logger->Init(filename);
loggers_.insert(std::make_pair(id, new_logger));
return new_logger;
}
void RemoveLogger(bus_id_t id)
{
auto iter = loggers_.find(id);
if (iter == loggers_.end())
{
return;
}
loggers_.erase(iter);
}
void SetLogFunc(LOG_FUNCTOR&& func)
{
if (func != nullptr)
{
log_func_ = func;
}
else
{
auto func = std::bind(&AFLogger::Log, this, std::placeholders::_1, std::placeholders::_2);
log_func_ = func;
}
}
template<typename... ARGS>
void LogT(spdlog::source_loc&& loc, spdlog::level::level_enum log_level, const char* my_fmt, ARGS&&... args)
{
static const std::string func_line_fmt = "[{}:{}]";
auto new_fmt = func_line_fmt + my_fmt;
auto content = ARK_FORMAT(new_fmt, loc.filename, loc.line, std::forward<ARGS>(args)...);
log_func_(log_level, content);
}
void LogOSS(spdlog::level::level_enum log_level, const std::string& name, const std::string& content)
{
static const std::string oss_fmt = "{}:{}";
auto final_content = ARK_FORMAT(oss_fmt, name, content);
oss_logger_->Log(log_level, final_content);
}
void LogStackTrace(std::string const& error)
{
#ifndef ARK_PLATFORM_WIN
Log(spdlog::level::err, "StackTrace Begin =============================");
Log(spdlog::level::err, error);
static const uint16_t default_size = 256;
void* default_data[default_size] = {0};
auto stack_num = backtrace(default_data, default_size);
auto stack_trace = backtrace_symbols(default_data, stack_num);
for (auto i = 0; i < stack_num; i++)
{
Log(spdlog::level::err, stack_trace[i]);
}
free(stack_trace);
Log(spdlog::level::err, "StackTrace End ===============================");
#endif
}
protected:
void Log(spdlog::level::level_enum log_level, std::string const content)
{
default_logger_->Log(log_level, content);
}
};
} // namespace ark
|
{
"content_hash": "eec0235e09b4adde0b22613165b3cd73",
"timestamp": "",
"source": "github",
"line_count": 359,
"max_line_length": 120,
"avg_line_length": 32.768802228412255,
"alnum_prop": 0.5414824889493369,
"repo_name": "ArkGame/ArkGameFrame",
"id": "a265f0159517b93dd5af1c06d037989a4c3f9a05",
"size": "12464",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/base/AFLogger.hpp",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ada",
"bytes": "89080"
},
{
"name": "Assembly",
"bytes": "138199"
},
{
"name": "Batchfile",
"bytes": "10057"
},
{
"name": "C",
"bytes": "824552"
},
{
"name": "C#",
"bytes": "1113258"
},
{
"name": "C++",
"bytes": "2994051"
},
{
"name": "CLIPS",
"bytes": "5291"
},
{
"name": "CMake",
"bytes": "18168"
},
{
"name": "DIGITAL Command Language",
"bytes": "27303"
},
{
"name": "Java",
"bytes": "45257"
},
{
"name": "Lua",
"bytes": "11710"
},
{
"name": "M4",
"bytes": "1572"
},
{
"name": "Makefile",
"bytes": "27066"
},
{
"name": "Module Management System",
"bytes": "1545"
},
{
"name": "Objective-C",
"bytes": "21403"
},
{
"name": "Pascal",
"bytes": "70297"
},
{
"name": "Perl",
"bytes": "3895"
},
{
"name": "Python",
"bytes": "2980"
},
{
"name": "Roff",
"bytes": "7559"
},
{
"name": "SAS",
"bytes": "1847"
},
{
"name": "Shell",
"bytes": "4376"
}
],
"symlink_target": ""
}
|
package com.ghisguth.demo;
import android.content.Context;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.os.SystemClock;
import android.util.Log;
import com.ghisguth.gfx.*;
import com.ghisguth.shared.ResourceHelper;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
public class SunV1 extends RendererBase {
private static String TAG = "Sunlight";
private final float[] triangle_vertices_data = {
// X, Y, Z, U, V
1.0f, 0.0f, -1.0f, 1.0f, 0.0f,
0.0f, 0.0f, -0.5f, 0.0f, 0.0f,
1.0f, 1.0f, -1.0f, 1.0f, 1.0f,
0.0f, 1.0f, -1.0f, 0.0f, 1.0f
};
private final int horizontalResolution = 64;
private final int verticalResolution = 32;
private final int verticesCount = horizontalResolution * verticalResolution;
private final int indicesCount = horizontalResolution * 2 * (verticalResolution - 1);
private Program program;
private Program coronaProgram;
private Texture baseTexture;
private Texture noiseTexture;
private VertexBuffer sphereVertices;
private float[] MVP_matrix = new float[16];
private float[] P_matrix = new float[16];
private float[] M_matrix = new float[16];
private float[] V_matrix = new float[16];
public SunV1(Context context) {
super(context);
setEGLContextClientVersion(2);
setRenderer(this);
setRenderMode(RENDERMODE_CONTINUOUSLY);
float[] vertices = new float[verticesCount * 5];
short[] indices = new short[indicesCount];
int index = 0;
float radius = 1.0f;
for (int j = 0; j < verticalResolution; ++j) {
double v = (double) j / (verticalResolution - 1);
double theta = v * Math.PI;
double sinTheta = Math.sin(theta);
double cosTheta = Math.cos(theta);
for (int i = 0; i < horizontalResolution; ++i) {
double u = (double) i / (horizontalResolution - 1);
double phi = 2.0f * u * Math.PI;
double sinPhi = Math.sin(phi);
double cosPhi = Math.cos(phi);
vertices[index + 0] = (float) (radius * sinTheta * cosPhi);
vertices[index + 1] = (float) (radius * sinTheta * sinPhi);
vertices[index + 2] = (float) (radius * cosTheta);
vertices[index + 3] = (float) (u);
vertices[index + 4] = (float) (v);
index += 5;
}
}
index = 0;
for (int j = 0; j < verticalResolution - 1; ++j) {
boolean dir = (j & 1) != 0;
if (dir || true) {
for (int i = 0; i < horizontalResolution; ++i) {
indices[index + 0] = (short) (j * horizontalResolution + i);
indices[index + 1] = (short) ((j + 1) * horizontalResolution + i);
index += 2;
}
} else {
for (int i = horizontalResolution - 1; i >= 0; --i) {
indices[index + 0] = (short) (j * horizontalResolution + i);
indices[index + 1] = (short) ((j + 1) * horizontalResolution + i);
index += 2;
}
}
}
sphereVertices = new VertexBuffer(vertices, indices, true);
}
@Override
public void onSurfaceCreated(GL10 unused, EGLConfig config) {
Log.e(TAG, "onSurfaceCreated");
ShaderManager.getSingletonObject().unloadAll();
ShaderManager.getSingletonObject().cleanUp();
TextureManager.getSingletonObject().unloadAll();
TextureManager.getSingletonObject().cleanUp();
loadResources();
if (program != null) {
program.load();
}
ShaderManager.getSingletonObject().unloadAllShaders();
Matrix.setLookAtM(V_matrix, 0, 0, 0, 2.0f, 0f, 0f, 0f, 0f, -1.0f, 0.0f);
}
private void loadResources() {
loadShaders();
loadTextures();
}
private void loadShaders() {
if (program != null && coronaProgram != null) {
return;
}
try {
ShaderManager shaderManager = ShaderManager.getSingletonObject();
Shader vertex = shaderManager.createVertexShader(ResourceHelper.loadRawString(openResource(R.raw.sun_vertex)));
Shader fragment = shaderManager.createFragmentShader(ResourceHelper.loadRawString(openResource(R.raw.sun_fragment)));
program = shaderManager.createShaderProgram(vertex, fragment);
vertex = shaderManager.createVertexShader(ResourceHelper.loadRawString(openResource(R.raw.sun_corona_vertex)));
fragment = shaderManager.createFragmentShader(ResourceHelper.loadRawString(openResource(R.raw.sun_corona_fragment)));
coronaProgram = shaderManager.createShaderProgram(vertex, fragment);
} catch (Exception ex) {
Log.e(TAG, "Unable to load shaders from resources " + ex.toString());
}
}
private void loadTextures() {
try {
TextureManager textureManager = TextureManager.getSingletonObject();
if (baseTexture == null) {
baseTexture = textureManager.createTexture(getResources(), R.raw.base_etc1, true, GLES20.GL_NEAREST, GLES20.GL_LINEAR, GLES20.GL_REPEAT, GLES20.GL_REPEAT);
}
if (noiseTexture == null) {
noiseTexture = textureManager.createTexture(getResources(), R.raw.noise_etc1, true, GLES20.GL_NEAREST, GLES20.GL_LINEAR, GLES20.GL_REPEAT, GLES20.GL_REPEAT);
}
} catch (Exception ex) {
Log.e(TAG, "Unable to load textures from resources " + ex.toString());
}
}
@Override
public void onSurfaceChanged(GL10 unused, int width, int height) {
Log.e(TAG, "onSurfaceChanged");
ShaderManager.getSingletonObject().cleanUp();
GLES20.glViewport(0, 0, width, height);
float scale = 0.1f;
float ratio = scale * width / height;
Matrix.frustumM(P_matrix, 0, -ratio, ratio, -scale, scale, 0.1f,
100.0f);
}
@Override
public void onDrawFrame(GL10 unused) {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if (program != null && baseTexture != null) {
if (!program.use() || !baseTexture.load() || !noiseTexture.load()) {
return;
}
float angle = getTimeDeltaByScale(600000L);
//Matrix.setIdentityM(M_matrix, 0);
Matrix.setRotateM(M_matrix, 0, 90, 1, 0, 0);
Matrix.rotateM(M_matrix, 0, 360 * angle, 0, 0, 1);
//Matrix.translateM(M_matrix, 0, 0, angle*10-5, 0);
//Matrix.translateM(M_matrix, 0, 0, 0, 1.0f);
Matrix.multiplyMM(MVP_matrix, 0, V_matrix, 0, M_matrix, 0);
Matrix.multiplyMM(MVP_matrix, 0, P_matrix, 0, MVP_matrix, 0);
baseTexture.bind(GLES20.GL_TEXTURE0, program, "sBaseTexture");
noiseTexture.bind(GLES20.GL_TEXTURE1, program, "sNoiseTexture");
sphereVertices.bind(program, "aPosition", "aTextureCoord");
GLES20.glUniformMatrix4fv(program.getUniformLocation("uMVPMatrix"), 1, false, MVP_matrix, 0);
float animationTime = getTimeDeltaByScale(790000L);
GLES20.glUniform1f(program.getUniformLocation("uTime"), animationTime);
float animationTime2 = getTimeDeltaByScale(669000L);
GLES20.glUniform1f(program.getUniformLocation("uTime2"), animationTime2);
float animationTime3 = getTimeDeltaByScale(637000L);
GLES20.glUniform1f(program.getUniformLocation("uTime3"), animationTime3);
GLES20.glEnable(GLES20.GL_CULL_FACE);
GLES20.glCullFace(GLES20.GL_BACK);
sphereVertices.draw(GLES20.GL_TRIANGLE_STRIP);
sphereVertices.unbind(program, "aPosition", "aTextureCoord");
if (coronaProgram != null && coronaProgram.use()) {
GLES20.glEnable(GLES20.GL_BLEND);
GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE);
Matrix.setRotateM(M_matrix, 0, 90, 1, 0, 0);
Matrix.rotateM(M_matrix, 0, 360 * getTimeDeltaByScale(400000L), 0, 0, 1);
float scale = 1.0f;
Matrix.scaleM(M_matrix, 0, scale, scale, scale);
Matrix.multiplyMM(MVP_matrix, 0, V_matrix, 0, M_matrix, 0);
Matrix.multiplyMM(MVP_matrix, 0, P_matrix, 0, MVP_matrix, 0);
baseTexture.bind(GLES20.GL_TEXTURE0, coronaProgram, "sBaseTexture");
noiseTexture.bind(GLES20.GL_TEXTURE1, coronaProgram, "sNoiseTexture");
sphereVertices.bind(coronaProgram, "aPosition", "aTextureCoord");
GLES20.glUniformMatrix4fv(coronaProgram.getUniformLocation("uMVPMatrix"), 1, false, MVP_matrix, 0);
GLES20.glUniform1f(coronaProgram.getUniformLocation("uTime"), animationTime);
GLES20.glUniform1f(coronaProgram.getUniformLocation("uTime2"), animationTime2);
GLES20.glUniform1f(coronaProgram.getUniformLocation("uTime3"), animationTime3);
float animationTime4 = getTimeDeltaByScale(4370000L);
GLES20.glUniform1f(coronaProgram.getUniformLocation("uTime4"), animationTime4);
GLES20.glUniform1f(coronaProgram.getUniformLocation("uLevel"), 0.5f);
sphereVertices.draw(GLES20.GL_TRIANGLE_STRIP);
sphereVertices.unbind(program, "aPosition", "aTextureCoord");
}
GLES20.glDisable(GLES20.GL_CULL_FACE);
}
}
private float getTimeDeltaByScale(long scale) {
if (scale < 1)
return 0.0f;
long time = SystemClock.uptimeMillis() % scale;
return (float) ((int) time) / (float) scale;
}
}
|
{
"content_hash": "13bb3d9766682186ea5d51d90837abd2",
"timestamp": "",
"source": "github",
"line_count": 250,
"max_line_length": 173,
"avg_line_length": 40.044,
"alnum_prop": 0.5965438018180002,
"repo_name": "msoftware/sunlight",
"id": "d35c6d776d5356b07a3e17cecae55a02f9e8e3cf",
"size": "10146",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo/src/com/ghisguth/demo/SunV1.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "GLSL",
"bytes": "9496"
},
{
"name": "Java",
"bytes": "235136"
}
],
"symlink_target": ""
}
|
var GuiPage_TvGuide = {
ItemData : null,
ChannelData : null,
selectedItem : 0,
topLeftItem : 0,
startParams : []
}
GuiPage_TvGuide.start = function(title,url,selectedItem,topLeftItem) {
//Save Start Params
this.startParams = [title,url];
//Reset Values
this.selectedItem = selectedItem;
this.topLeftItem = topLeftItem;
//Load Data
this.ChannelData = Server.getContent(url);
if (this.ChannelData.Items.length > 0) {
//Get Programs
var channelIDs = "";
for (var index = 0; index < this.ChannelData.Items.length; index++) {
if (index == this.ChannelData.Items.length-1) {
channelIDs += this.ChannelData.Items[index].Id;
} else {
channelIDs += this.ChannelData.Items[index].Id + ',';
}
}
//Sort Date - %3A is Colon
var d = new Date();
var maxStartTime = d.getUTCFullYear() + "-" + (d.getUTCMonth()+1) + "-" + d.getUTCDate() + "T"+ d.getUTCHours() +"%3A30%3A00.000Z";
var minEndTime = d.getUTCFullYear() + "-" + (d.getUTCMonth()+1) + "-" + d.getUTCDate() + "T"+ d.getUTCHours() +"%3A00%3A01.000Z";
var programURLs = Server.getServerAddr() + "/LiveTv/Programs?format=json&UserId=" + Server.getUserID() + "&MaxStartDate="+maxStartTime+"&MinEndDate="+minEndTime+"&channelIds=" + channelIDs;
alert (programURLs);
this.ItemData = Server.getContent(programURLs);
this.updateDisplayedItems();
//Update Selected Collection CSS
//this.updateSelectedItems();
//Set Focus for Key Events
document.getElementById("GuiPage_TvGuide").focus();
} else {
//Set message to user
document.getElementById("Counter").innerHTML = "";
document.getElementById("title").innerHTML = "Sorry";
document.getElementById("pageContent").className = "padding60";
document.getElementById("Content").innerHTML = "Huh.. Looks like I have no content to show you in this view I'm afraid";
//As no content focus on menu bar and null null means user can't return off the menu bar
GuiMainMenu.requested(null,null);
}
}
GuiPage_TvGuide.updateDisplayedItems = function() {
//Create Table
var d = new Date();
var programCount = 0;
var htmlToAdd = "<table id=guideTable><th>Programs</th><th colspan=3>" + d.getHours() + ":00</th><th colspan=3>" + d.getHours() + ":30</th>";
htmlToAdd += "<tr><td></td><td></td><td></td><td></td><td></td><td></td><td></td>";
for (var index = 0; index < this.ChannelData.Items.length; index++) {
htmlToAdd += "<tr><td colspan=2>"+this.ChannelData.Items[index].Name+"</td>";
while (programCount < this.ItemData.Items.length && this.ChannelData.Items[index].Id == this.ItemData.Items[programCount].ChannelId) {
htmlToAdd += "<td>"+this.ItemData.Items[programCount].Name+"</td>";
programCount++;
}
htmlToAdd += "</tr>";
}
htmlToAdd += "</table>";
document.getElementById("pageContent").innerHTML = htmlToAdd;
}
//Function sets CSS Properties so show which user is selected
GuiPage_TvGuide.updateSelectedItems = function () {
Support.updateSelectedNEW(this.ItemData.Items,this.selectedItem,this.topLeftItem,
Math.min(this.topLeftItem + this.getMaxDisplay(),this.ItemData.Items.length),"Series Selected","Series","");
}
GuiPage_TvGuide.keyDown = function() {
var keyCode = event.keyCode;
alert("Key pressed: " + keyCode);
if (document.getElementById("Notifications").style.visibility == "") {
document.getElementById("Notifications").style.visibility = "hidden";
document.getElementById("NotificationText").innerHTML = "";
widgetAPI.blockNavigation(event);
//Change keycode so it does nothing!
keyCode = "VOID";
}
switch(keyCode) {
//Need Logout Key
case tvKey.KEY_LEFT:
alert("LEFT");
this.processLeftKey();
break;
case tvKey.KEY_RIGHT:
alert("RIGHT");
this.processRightKey();
break;
case tvKey.KEY_UP:
alert("UP");
this.processUpKey();
break;
case tvKey.KEY_DOWN:
alert("DOWN");
this.processDownKey();
break;
case tvKey.KEY_PANEL_CH_UP:
case tvKey.KEY_CH_UP:
this.processChannelUpKey();
break;
case tvKey.KEY_PANEL_CH_DOWN:
case tvKey.KEY_CH_DOWN:
this.processChannelDownKey();
break;
case tvKey.KEY_RETURN:
alert("RETURN");
widgetAPI.blockNavigation(event);
Support.processReturnURLHistory();
break;
case tvKey.KEY_ENTER:
case tvKey.KEY_PANEL_ENTER:
alert("ENTER");
this.processSelectedItem();
break;
case tvKey.KEY_PLAY:
this.playSelectedItem();
break;
case tvKey.KEY_BLUE:
Support.logout();
break;
case tvKey.KEY_TOOLS:
widgetAPI.blockNavigation(event);
Support.updateURLHistory("GuiPage_TvGuide",this.startParams[0],this.startParams[1],null,null,this.selectedItem,this.topLeftItem,null);
GuiMainMenu.requested("GuiPage_TvGuide",this.ItemData.Items[this.selectedItem].Id);
break;
case tvKey.KEY_EXIT:
alert ("EXIT KEY");
widgetAPI.sendExitEvent();
break;
}
}
GuiPage_TvGuide.processSelectedItem = function(page,ItemData,startParams,selectedItem,topLeftItem) {
Support.processSelectedItem("GuiPage_TvGuide",this.ItemData,this.startParams,this.selectedItem,this.topLeftItem,null,null);
}
GuiPage_TvGuide.returnFromMusicPlayer = function() {
this.selectedItem = 0;
this.updateDisplayedItems();
this.updateSelectedItems();
}
|
{
"content_hash": "545d7d5757c72abf0576040412a17563",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 191,
"avg_line_length": 32.147239263803684,
"alnum_prop": 0.6904580152671755,
"repo_name": "ChessDragon136/MediaBrowser.SamsungUnofficial",
"id": "6d1f95c30e14de935e464a65bc4dce44329bf03d",
"size": "5240",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "MediaBrowser 3/app/javascript/GuiShelved/GuiPage_TvGuide.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "30567"
},
{
"name": "HTML",
"bytes": "18636"
},
{
"name": "JavaScript",
"bytes": "742136"
}
],
"symlink_target": ""
}
|
using System.Collections.ObjectModel;
using System.Linq.Expressions;
using System.Reflection;
using NHibernate.Hql.Ast;
using NHibernate.Linq;
using NHibernate.Linq.Functions;
using NHibernate.Linq.Visitors;
namespace NHibernate.HierarchyId.Linq
{
public class GetReparentedValueGenerator : BaseHqlGeneratorForMethod
{
public GetReparentedValueGenerator()
{
SupportedMethods = new[]
{
ReflectionHelper.GetMethodDefinition(()=> default(string).GetReparentedValue(default(string), default(string)))
};
}
public override HqlTreeNode BuildHql(MethodInfo method, Expression targetObject, ReadOnlyCollection<Expression> arguments, HqlTreeBuilder treeBuilder, IHqlExpressionVisitor visitor)
{
var arg = visitor.Visit(arguments[0]).AsExpression();
var c1 = visitor.Visit(arguments[1]).AsExpression();
var c2 = visitor.Visit(arguments[2]).AsExpression();
var mt = treeBuilder.MethodCall("hid_GetReparentedValue", arg, c1, c2);
return mt;
}
}
}
|
{
"content_hash": "6d8eb8cdb49902e40cdc877439839469",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 189,
"avg_line_length": 34.75,
"alnum_prop": 0.6807553956834532,
"repo_name": "RaveNoX/NHibernate.HierarchyId",
"id": "01660f24edf7ef0b6697e5fff1a60568180097c7",
"size": "1114",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NHibernate.HierarchyId/Linq/GetReparentedValueGenerator.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "55294"
}
],
"symlink_target": ""
}
|
package org.lemsml.jlems.viz.plot;
public interface XYLocation {
double getX();
double getY();
}
|
{
"content_hash": "cd00ca0cbf19c6562a40e4a75d26ca59",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 34,
"avg_line_length": 10.181818181818182,
"alnum_prop": 0.6517857142857143,
"repo_name": "LEMS/jLEMS",
"id": "0cafc280d5b13fc2b3c58b70dca94e11167b2d69",
"size": "112",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/org/lemsml/jlems/viz/plot/XYLocation.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "236"
},
{
"name": "CSS",
"bytes": "3302"
},
{
"name": "HTML",
"bytes": "469333"
},
{
"name": "Java",
"bytes": "1321342"
},
{
"name": "Shell",
"bytes": "249"
},
{
"name": "XSLT",
"bytes": "88542"
}
],
"symlink_target": ""
}
|
@extends('layouts.master')
@section('content')
<section id="breadcrumb">
<div class="row">
<div class="large-12 columns">
<nav aria-label="You are here:" role="navigation">
<ul class="breadcrumbs">
<li><i class="fa fa-home"></i><a href="#">Home</a></li>
<li>
<span class="show-for-sr">Current: </span> Reset Password
</li>
</ul>
</nav>
</div>
</div>
</section>
<section class="registration">
<div class="row secBg">
<div class="large-12 columns">
<div class="login-register-content">
<div class="row collapse borderBottom">
<div class="medium-6 large-centered medium-centered">
<div class="page-heading text-center">
<h3>Reset Password</h3>
</div>
</div>
</div>
<div class="row" data-equalizer="osmss8-equalizer" data-equalize-on="medium" id="test-eq" data-resize="hli7x7-eq" data-events="resize">
<div class="large-4 medium-6 large-centered medium-centered columns">
<div class="register-form">
@if(session()->has('status'))
<div data-abide-error class="success callout">
<p><i class="fa fa-info"></i> {{ session()->get('status') }} </p>
</div>
@endif
<form method="post" data-abide="pyi7za-abide" novalidate="" action="{{ url('/password/reset') }}">
{!! csrf_field() !!}
<input type="hidden" name="token" value="{{ $token }}">
<div class="input-group">
<span class="input-group-label"><i class="fa fa-envelope"></i></span>
<input type="email" class="{{ $errors->has('email') ? 'is-invalid-input' : '' }}"
value="{{ old('email') }}" name="email" id="email"
placeholder="Enter your email" required="">
<span class="form-error">Email is required</span>
@if ($errors->has('email'))
<span class="form-error is-visible">{{ $errors->first('email') }}</span>
@endif
</div>
<div class="input-group ">
<span class="input-group-label"><i class="fa fa-lock"></i></span>
<input class="" type="password" class="{{ $errors->has('password') ? 'is-invalid-input' : '' }}" name="password" placeholder="Enter your password" id="password" required>
<span class="form-error">Password is required</span>
@if ($errors->has('password'))
<span class="form-error is-visible">{{ $errors->first('password') }}</span>
@endif
</div>
<div class="input-group ">
<span class="input-group-label"><i class="fa fa-lock"></i></span>
<input class="" type="password" class="{{ $errors->has('password_confirmation') ? 'is-invalid-input' : '' }}" name="password_confirmation" placeholder="Re-type your password" id="password_confirmation">
<span class="form-error">Re-cofirm Password</span>
@if ($errors->has('password_confirmation'))
<span class="form-error is-visible">{{ $errors->first('password_confirmation') }}</span>
@endif
</div>
<button class="button expanded" type="submit" name="submit">reset Now</button>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
</section>
@endsection
|
{
"content_hash": "cdd491a7d90e63f054c54be348c2d9b8",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 242,
"avg_line_length": 56.80952380952381,
"alnum_prop": 0.3876781223805532,
"repo_name": "iamraphson/DEV-TV",
"id": "475f6978ea12b41811c6cd003f54f6cc125a0f0b",
"size": "4772",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "resources/views/auth/passwords/reset.blade.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "582"
},
{
"name": "HTML",
"bytes": "221787"
},
{
"name": "JavaScript",
"bytes": "1511888"
},
{
"name": "PHP",
"bytes": "147657"
}
],
"symlink_target": ""
}
|
[install](#install) -
[documentation](#documentation) -
[license](#license)
[which][m-which] the heck directions for
- Was the script run from a bin?
- There is some config file asociated to it?
- What is the main or/and global `node_modules`directory?
- Get me some fields of their the local and global packages
## usage
```javascript
var whech = require('whech');
whech.ext = '.js';
whech('which', function(err, spec){
if(err) throw err;
console.log(spec);
// =>
// { name: 'which',
// which: '/usr/bin/which',
// runFromBin: false,
// configFile: [Error: not found],
// localDir: '/home/jcm/code/whech/node_modules',
// globalDir: '/home/jcm/npm/lib/node_modules',
// globalPackage: { [Error: Cannot find module] code: 'MODULE_NOT_FOUND' },
// localPackage: { version: '1.0.5' } }
});
```
### documentation
The `module.exports` a function
```js
var whech = require('whech')
```
which is asynchronous. To use the `sync` version take `whech.sync`.
### whech
```js
function whech(string|object spec, function callback)
```
_arguments_
- `spec` type string or object with a name property that is a string
- `callback` type function to be called with the last `error` and `spec`
Errors are attached to `spec` a property instead of throwing.
_spec properties_
- `ext` type string, the extension of the configFile
- `name` type string the name given as a string or object property
- [`which`][m-which] type string first instance of an executable in the PATH
- `runFromBin` type boolean, wether or not `process.argv` contains `which`
- `configFile` type string, `configFile` if given
- `localDir` type string, the global dir where node_modules are installed
- `globalDir`: type string, the local dir where node_modules are installed
- `localPackage`: `require(path.join(name, 'package'))`
- `globalPackage`: `require(path.join(localDir, name, 'package'))`
_defaults_
- `configfile` will default to `name + 'file' + (spec.ext || '.js')`
### whech.sync
```js
function whechSync(string|object spec)
```
_arguments_
- `spec`, type string|object, the same as the async version
_returns_
- `spec` with same properties listed above
## install
With [npm][x-npm]
```js
npm install whech
```
### test
```js
npm test
```
### todo
- [ ] More tests
- [ ] Review and see if there is something missing
### license
[](
http://opensource.org/licenses/MIT
)
[m-which]: http://www.npmjs.com/which
[m-whech]: http://www.npmjs.com/whech
[x-npm]: http://www.npmjs.com
[x-travis]: https://travis-ci.org/stringparser/node-whech/builds
[x-license]: http://opensource.org/licenses/MIT
[b-build]: http://img.shields.io/travis/stringparser/node-whech/master.svg?style=flat-square
[b-gitter]: https://badges.gitter.im/Join%20Chat.svg
[b-version]: http://img.shields.io/npm/v/whech.svg?style=flat-square
|
{
"content_hash": "c26291bc6a681e41cfe4f00c6109c9b9",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 92,
"avg_line_length": 25.62280701754386,
"alnum_prop": 0.6864087641218761,
"repo_name": "stringparser/node-whech",
"id": "cb32ded4ec1f1d5ab8ea13f06d82ee6cafaa187a",
"size": "2996",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "readme.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "4298"
}
],
"symlink_target": ""
}
|
package dns
import "fmt"
// Version is current version of this library.
var Version = V{1, 1, 17}
// V holds the version of this library.
type V struct {
Major, Minor, Patch int
}
func (v V) String() string {
return fmt.Sprintf("%d.%d.%d", v.Major, v.Minor, v.Patch)
}
|
{
"content_hash": "690df76703003ed89520bbd5e286e275",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 58,
"avg_line_length": 18.333333333333332,
"alnum_prop": 0.6654545454545454,
"repo_name": "prateekpandey14/maya",
"id": "104af91b3522e3b5c42b0cb9b20508db14a527e2",
"size": "275",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vendor/github.com/miekg/dns/version.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "11063"
},
{
"name": "Go",
"bytes": "5532239"
},
{
"name": "HCL",
"bytes": "409"
},
{
"name": "Makefile",
"bytes": "30847"
},
{
"name": "Shell",
"bytes": "46376"
}
],
"symlink_target": ""
}
|
<?php
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
namespace Google\Protobuf\Internal;
class GPBWire
{
const TAG_TYPE_BITS = 3;
const WIRETYPE_VARINT = 0;
const WIRETYPE_FIXED64 = 1;
const WIRETYPE_LENGTH_DELIMITED = 2;
const WIRETYPE_START_GROUP = 3;
const WIRETYPE_END_GROUP = 4;
const WIRETYPE_FIXED32 = 5;
const UNKNOWN = 0;
const NORMAL_FORMAT = 1;
const PACKED_FORMAT = 2;
public static function getTagFieldNumber($tag)
{
// We have to mask because PHP has no arithmetic shift.
return ($tag >> self::TAG_TYPE_BITS) & 0x1fffffff;
}
public static function getTagWireType($tag)
{
return $tag & 0x7;
}
public static function getWireType($type)
{
switch ($type) {
case GPBType::FLOAT:
case GPBType::FIXED32:
case GPBType::SFIXED32:
return self::WIRETYPE_FIXED32;
case GPBType::DOUBLE:
case GPBType::FIXED64:
case GPBType::SFIXED64:
return self::WIRETYPE_FIXED64;
case GPBType::UINT32:
case GPBType::UINT64:
case GPBType::INT32:
case GPBType::INT64:
case GPBType::SINT32:
case GPBType::SINT64:
case GPBType::ENUM:
case GPBType::BOOL:
return self::WIRETYPE_VARINT;
case GPBType::STRING:
case GPBType::BYTES:
case GPBType::MESSAGE:
return self::WIRETYPE_LENGTH_DELIMITED;
case GPBType::GROUP:
user_error("Unsupported type.");
return 0;
default:
user_error("Unsupported type.");
return 0;
}
}
// ZigZag Transform: Encodes signed integers so that they can be effectively
// used with varint encoding.
//
// varint operates on unsigned integers, encoding smaller numbers into fewer
// bytes. If you try to use it on a signed integer, it will treat this
// number as a very large unsigned integer, which means that even small
// signed numbers like -1 will take the maximum number of bytes (10) to
// encode. zigZagEncode() maps signed integers to unsigned in such a way
// that those with a small absolute value will have smaller encoded values,
// making them appropriate for encoding using varint.
//
// int32 -> uint32
// -------------------------
// 0 -> 0
// -1 -> 1
// 1 -> 2
// -2 -> 3
// ... -> ...
// 2147483647 -> 4294967294
// -2147483648 -> 4294967295
//
// >> encode >>
// << decode <<
public static function zigZagEncode32($int32)
{
if (PHP_INT_SIZE == 8) {
$trim_int32 = $int32 & 0xFFFFFFFF;
return (($trim_int32 << 1) ^ ($int32 << 32 >> 63)) & 0xFFFFFFFF;
} else {
return ($int32 << 1) ^ ($int32 >> 31);
}
}
public static function zigZagDecode32($uint32)
{
// Fill high 32 bits.
if (PHP_INT_SIZE === 8) {
$uint32 |= ($uint32 & 0xFFFFFFFF);
}
$int32 = (($uint32 >> 1) & 0x7FFFFFFF) ^ (-($uint32 & 1));
return $int32;
}
public static function zigZagEncode64($int64)
{
if (PHP_INT_SIZE == 4) {
if (bccomp($int64, 0) >= 0) {
return bcmul($int64, 2);
} else {
return bcsub(bcmul(bcsub(0, $int64), 2), 1);
}
} else {
return ((int)$int64 << 1) ^ ((int)$int64 >> 63);
}
}
public static function zigZagDecode64($uint64)
{
if (PHP_INT_SIZE == 4) {
if (bcmod($uint64, 2) == 0) {
return bcdiv($uint64, 2, 0);
} else {
return bcsub(0, bcdiv(bcadd($uint64, 1), 2, 0));
}
} else {
return (($uint64 >> 1) & 0x7FFFFFFFFFFFFFFF) ^ (-($uint64 & 1));
}
}
public static function readInt32(&$input, &$value)
{
return $input->readVarint32($value);
}
public static function readInt64(&$input, &$value)
{
$success = $input->readVarint64($value);
if (PHP_INT_SIZE == 4 && bccomp($value, "9223372036854775807") > 0) {
$value = bcsub($value, "18446744073709551616");
}
return $success;
}
public static function readUint32(&$input, &$value)
{
return self::readInt32($input, $value);
}
public static function readUint64(&$input, &$value)
{
return self::readInt64($input, $value);
}
public static function readSint32(&$input, &$value)
{
if (!$input->readVarint32($value)) {
return false;
}
$value = GPBWire::zigZagDecode32($value);
return true;
}
public static function readSint64(&$input, &$value)
{
if (!$input->readVarint64($value)) {
return false;
}
$value = GPBWire::zigZagDecode64($value);
return true;
}
public static function readFixed32(&$input, &$value)
{
return $input->readLittleEndian32($value);
}
public static function readFixed64(&$input, &$value)
{
return $input->readLittleEndian64($value);
}
public static function readSfixed32(&$input, &$value)
{
if (!self::readFixed32($input, $value)) {
return false;
}
if (PHP_INT_SIZE === 8) {
$value |= (-($value >> 31) << 32);
}
return true;
}
public static function readSfixed64(&$input, &$value)
{
$success = $input->readLittleEndian64($value);
if (PHP_INT_SIZE == 4 && bccomp($value, "9223372036854775807") > 0) {
$value = bcsub($value, "18446744073709551616");
}
return $success;
}
public static function readFloat(&$input, &$value)
{
$data = null;
if (!$input->readRaw(4, $data)) {
return false;
}
$value = unpack('f', $data)[1];
return true;
}
public static function readDouble(&$input, &$value)
{
$data = null;
if (!$input->readRaw(8, $data)) {
return false;
}
$value = unpack('d', $data)[1];
return true;
}
public static function readBool(&$input, &$value)
{
if (!$input->readVarint64($value)) {
return false;
}
if ($value == 0) {
$value = false;
} else {
$value = true;
}
return true;
}
public static function readString(&$input, &$value)
{
$length = 0;
return $input->readVarintSizeAsInt($length) && $input->readRaw($length, $value);
}
public static function readMessage(&$input, &$message)
{
$length = 0;
if (!$input->readVarintSizeAsInt($length)) {
return false;
}
$old_limit = 0;
$recursion_limit = 0;
$input->incrementRecursionDepthAndPushLimit(
$length,
$old_limit,
$recursion_limit);
if ($recursion_limit < 0 || !$message->parseFromStream($input)) {
return false;
}
return $input->decrementRecursionDepthAndPopLimit($old_limit);
}
public static function writeTag(&$output, $tag)
{
return $output->writeTag($tag);
}
public static function writeInt32(&$output, $value)
{
return $output->writeVarint32($value, false);
}
public static function writeInt64(&$output, $value)
{
return $output->writeVarint64($value);
}
public static function writeUint32(&$output, $value)
{
return $output->writeVarint32($value, true);
}
public static function writeUint64(&$output, $value)
{
return $output->writeVarint64($value);
}
public static function writeSint32(&$output, $value)
{
$value = GPBWire::zigZagEncode32($value);
return $output->writeVarint32($value, true);
}
public static function writeSint64(&$output, $value)
{
$value = GPBWire::zigZagEncode64($value);
return $output->writeVarint64($value);
}
public static function writeFixed32(&$output, $value)
{
return $output->writeLittleEndian32($value);
}
public static function writeFixed64(&$output, $value)
{
return $output->writeLittleEndian64($value);
}
public static function writeSfixed32(&$output, $value)
{
return $output->writeLittleEndian32($value);
}
public static function writeSfixed64(&$output, $value)
{
return $output->writeLittleEndian64($value);
}
public static function writeBool(&$output, $value)
{
if ($value) {
return $output->writeVarint32(1, true);
} else {
return $output->writeVarint32(0, true);
}
}
public static function writeFloat(&$output, $value)
{
$data = pack("f", $value);
return $output->writeRaw($data, 4);
}
public static function writeDouble(&$output, $value)
{
$data = pack("d", $value);
return $output->writeRaw($data, 8);
}
public static function writeString(&$output, $value)
{
return self::writeBytes($output, $value);
}
public static function writeBytes(&$output, $value)
{
$size = strlen($value);
if (!$output->writeVarint32($size, true)) {
return false;
}
return $output->writeRaw($value, $size);
}
public static function writeMessage(&$output, $value)
{
$size = $value->byteSize();
if (!$output->writeVarint32($size, true)) {
return false;
}
return $value->serializeToStream($output);
}
public static function makeTag($number, $type)
{
return ($number << 3) | self::getWireType($type);
}
public static function tagSize($field)
{
$tag = self::makeTag($field->getNumber(), $field->getType());
return self::varint32Size($tag);
}
public static function varint32Size($value, $sign_extended = false)
{
if ($value < 0) {
if ($sign_extended) {
return 10;
} else {
return 5;
}
}
if ($value < (1 << 7)) {
return 1;
}
if ($value < (1 << 14)) {
return 2;
}
if ($value < (1 << 21)) {
return 3;
}
if ($value < (1 << 28)) {
return 4;
}
return 5;
}
public static function sint32Size($value)
{
$value = self::zigZagEncode32($value);
return self::varint32Size($value);
}
public static function sint64Size($value)
{
$value = self::zigZagEncode64($value);
return self::varint64Size($value);
}
public static function varint64Size($value)
{
if (PHP_INT_SIZE == 4) {
if (bccomp($value, 0) < 0 ||
bccomp($value, "9223372036854775807") > 0) {
return 10;
}
if (bccomp($value, 1 << 7) < 0) {
return 1;
}
if (bccomp($value, 1 << 14) < 0) {
return 2;
}
if (bccomp($value, 1 << 21) < 0) {
return 3;
}
if (bccomp($value, 1 << 28) < 0) {
return 4;
}
if (bccomp($value, '34359738368') < 0) {
return 5;
}
if (bccomp($value, '4398046511104') < 0) {
return 6;
}
if (bccomp($value, '562949953421312') < 0) {
return 7;
}
if (bccomp($value, '72057594037927936') < 0) {
return 8;
}
return 9;
} else {
if ($value < 0) {
return 10;
}
if ($value < (1 << 7)) {
return 1;
}
if ($value < (1 << 14)) {
return 2;
}
if ($value < (1 << 21)) {
return 3;
}
if ($value < (1 << 28)) {
return 4;
}
if ($value < (1 << 35)) {
return 5;
}
if ($value < (1 << 42)) {
return 6;
}
if ($value < (1 << 49)) {
return 7;
}
if ($value < (1 << 56)) {
return 8;
}
return 9;
}
}
public static function serializeFieldToStream(
$value,
$field,
$need_tag,
&$output)
{
if ($need_tag) {
if (!GPBWire::writeTag(
$output,
self::makeTag(
$field->getNumber(),
$field->getType()))) {
return false;
}
}
switch ($field->getType()) {
case GPBType::DOUBLE:
if (!GPBWire::writeDouble($output, $value)) {
return false;
}
break;
case GPBType::FLOAT:
if (!GPBWire::writeFloat($output, $value)) {
return false;
}
break;
case GPBType::INT64:
if (!GPBWire::writeInt64($output, $value)) {
return false;
}
break;
case GPBType::UINT64:
if (!GPBWire::writeUint64($output, $value)) {
return false;
}
break;
case GPBType::INT32:
if (!GPBWire::writeInt32($output, $value)) {
return false;
}
break;
case GPBType::FIXED32:
if (!GPBWire::writeFixed32($output, $value)) {
return false;
}
break;
case GPBType::FIXED64:
if (!GPBWire::writeFixed64($output, $value)) {
return false;
}
break;
case GPBType::BOOL:
if (!GPBWire::writeBool($output, $value)) {
return false;
}
break;
case GPBType::STRING:
if (!GPBWire::writeString($output, $value)) {
return false;
}
break;
// case GPBType::GROUP:
// echo "GROUP\xA";
// trigger_error("Not implemented.", E_ERROR);
// break;
case GPBType::MESSAGE:
if (!GPBWire::writeMessage($output, $value)) {
return false;
}
break;
case GPBType::BYTES:
if (!GPBWire::writeBytes($output, $value)) {
return false;
}
break;
case GPBType::UINT32:
if (PHP_INT_SIZE === 8 && $value < 0) {
$value += 4294967296;
}
if (!GPBWire::writeUint32($output, $value)) {
return false;
}
break;
case GPBType::ENUM:
if (!GPBWire::writeInt32($output, $value)) {
return false;
}
break;
case GPBType::SFIXED32:
if (!GPBWire::writeSfixed32($output, $value)) {
return false;
}
break;
case GPBType::SFIXED64:
if (!GPBWire::writeSfixed64($output, $value)) {
return false;
}
break;
case GPBType::SINT32:
if (!GPBWire::writeSint32($output, $value)) {
return false;
}
break;
case GPBType::SINT64:
if (!GPBWire::writeSint64($output, $value)) {
return false;
}
break;
default:
user_error("Unsupported type.");
return false;
}
return true;
}
}
|
{
"content_hash": "d087fe5230de6997d03df5830c695768",
"timestamp": "",
"source": "github",
"line_count": 622,
"max_line_length": 88,
"avg_line_length": 29.170418006430868,
"alnum_prop": 0.49559082892416223,
"repo_name": "grpc/grpc-ios",
"id": "034f5df92edcf40acaf5520486c06165f639de13",
"size": "18144",
"binary": false,
"copies": "13",
"ref": "refs/heads/main",
"path": "native/third_party/protobuf/php/src/Google/Protobuf/Internal/GPBWire.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "5444"
},
{
"name": "Batchfile",
"bytes": "38831"
},
{
"name": "C",
"bytes": "1342403"
},
{
"name": "C#",
"bytes": "111357"
},
{
"name": "C++",
"bytes": "11936431"
},
{
"name": "CMake",
"bytes": "34261"
},
{
"name": "CSS",
"bytes": "1579"
},
{
"name": "Cython",
"bytes": "258768"
},
{
"name": "Dockerfile",
"bytes": "185143"
},
{
"name": "Go",
"bytes": "34794"
},
{
"name": "HTML",
"bytes": "14"
},
{
"name": "Java",
"bytes": "22550"
},
{
"name": "JavaScript",
"bytes": "89695"
},
{
"name": "Objective-C",
"bytes": "770017"
},
{
"name": "Objective-C++",
"bytes": "83300"
},
{
"name": "PHP",
"bytes": "517157"
},
{
"name": "PowerShell",
"bytes": "5008"
},
{
"name": "Python",
"bytes": "4064457"
},
{
"name": "Ruby",
"bytes": "715896"
},
{
"name": "Shell",
"bytes": "781923"
},
{
"name": "Starlark",
"bytes": "849400"
},
{
"name": "Swift",
"bytes": "13168"
},
{
"name": "XSLT",
"bytes": "9846"
}
],
"symlink_target": ""
}
|
"""Module browser.
XXX TO DO:
- reparse when source changed (maybe just a button would be OK?)
(or recheck on window popup)
- add popup menu with more options (e.g. doc strings, base classes, imports)
- add base classes to class browser tree
- finish removing limitation to x.py files (ModuleBrowserTreeItem)
"""
import os
import pyclbr
import sys
from idlelib.config import idleConf
from idlelib import pyshell
from idlelib.tree import TreeNode, TreeItem, ScrolledCanvas
from idlelib.window import ListedToplevel
file_open = None # Method...Item and Class...Item use this.
# Normally pyshell.flist.open, but there is no pyshell.flist for htest.
def transform_children(child_dict, modname=None):
"""Transform a child dictionary to an ordered sequence of objects.
The dictionary maps names to pyclbr information objects.
Filter out imported objects.
Augment class names with bases.
The insertion order of the dictionary is assumed to have been in line
number order, so sorting is not necessary.
The current tree only calls this once per child_dict as it saves
TreeItems once created. A future tree and tests might violate this,
so a check prevents multiple in-place augmentations.
"""
obs = [] # Use list since values should already be sorted.
for key, obj in child_dict.items():
if modname is None or obj.module == modname:
if hasattr(obj, 'super') and obj.super and obj.name == key:
# If obj.name != key, it has already been suffixed.
supers = []
for sup in obj.super:
if type(sup) is type(''):
sname = sup
else:
sname = sup.name
if sup.module != obj.module:
sname = f'{sup.module}.{sname}'
supers.append(sname)
obj.name += '({})'.format(', '.join(supers))
obs.append(obj)
return obs
class ModuleBrowser:
"""Browse module classes and functions in IDLE.
"""
# This class is also the base class for pathbrowser.PathBrowser.
# Init and close are inherited, other methods are overridden.
# PathBrowser.__init__ does not call __init__ below.
def __init__(self, master, path, *, _htest=False, _utest=False):
"""Create a window for browsing a module's structure.
Args:
master: parent for widgets.
path: full path of file to browse.
_htest - bool; change box location when running htest.
-utest - bool; suppress contents when running unittest.
Global variables:
file_open: Function used for opening a file.
Instance variables:
name: Module name.
file: Full path and module with .py extension. Used in
creating ModuleBrowserTreeItem as the rootnode for
the tree and subsequently in the children.
"""
self.master = master
self.path = path
self._htest = _htest
self._utest = _utest
self.init()
def close(self, event=None):
"Dismiss the window and the tree nodes."
self.top.destroy()
self.node.destroy()
def init(self):
"Create browser tkinter widgets, including the tree."
global file_open
root = self.master
flist = (pyshell.flist if not (self._htest or self._utest)
else pyshell.PyShellFileList(root))
file_open = flist.open
pyclbr._modules.clear()
# create top
self.top = top = ListedToplevel(root)
top.protocol("WM_DELETE_WINDOW", self.close)
top.bind("<Escape>", self.close)
if self._htest: # place dialog below parent if running htest
top.geometry("+%d+%d" %
(root.winfo_rootx(), root.winfo_rooty() + 200))
self.settitle()
top.focus_set()
# create scrolled canvas
theme = idleConf.CurrentTheme()
background = idleConf.GetHighlight(theme, 'normal')['background']
sc = ScrolledCanvas(top, bg=background, highlightthickness=0,
takefocus=1)
sc.frame.pack(expand=1, fill="both")
item = self.rootnode()
self.node = node = TreeNode(sc.canvas, None, item)
if not self._utest:
node.update()
node.expand()
def settitle(self):
"Set the window title."
self.top.wm_title("Module Browser - " + os.path.basename(self.path))
self.top.wm_iconname("Module Browser")
def rootnode(self):
"Return a ModuleBrowserTreeItem as the root of the tree."
return ModuleBrowserTreeItem(self.path)
class ModuleBrowserTreeItem(TreeItem):
"""Browser tree for Python module.
Uses TreeItem as the basis for the structure of the tree.
Used by both browsers.
"""
def __init__(self, file):
"""Create a TreeItem for the file.
Args:
file: Full path and module name.
"""
self.file = file
def GetText(self):
"Return the module name as the text string to display."
return os.path.basename(self.file)
def GetIconName(self):
"Return the name of the icon to display."
return "python"
def GetSubList(self):
"Return ChildBrowserTreeItems for children."
return [ChildBrowserTreeItem(obj) for obj in self.listchildren()]
def OnDoubleClick(self):
"Open a module in an editor window when double clicked."
if os.path.normcase(self.file[-3:]) != ".py":
return
if not os.path.exists(self.file):
return
file_open(self.file)
def IsExpandable(self):
"Return True if Python (.py) file."
return os.path.normcase(self.file[-3:]) == ".py"
def listchildren(self):
"Return sequenced classes and functions in the module."
dir, base = os.path.split(self.file)
name, ext = os.path.splitext(base)
if os.path.normcase(ext) != ".py":
return []
try:
tree = pyclbr.readmodule_ex(name, [dir] + sys.path)
except ImportError:
return []
return transform_children(tree, name)
class ChildBrowserTreeItem(TreeItem):
"""Browser tree for child nodes within the module.
Uses TreeItem as the basis for the structure of the tree.
"""
def __init__(self, obj):
"Create a TreeItem for a pyclbr class/function object."
self.obj = obj
self.name = obj.name
self.isfunction = isinstance(obj, pyclbr.Function)
def GetText(self):
"Return the name of the function/class to display."
name = self.name
if self.isfunction:
return "def " + name + "(...)"
else:
return "class " + name
def GetIconName(self):
"Return the name of the icon to display."
if self.isfunction:
return "python"
else:
return "folder"
def IsExpandable(self):
"Return True if self.obj has nested objects."
return self.obj.children != {}
def GetSubList(self):
"Return ChildBrowserTreeItems for children."
return [ChildBrowserTreeItem(obj)
for obj in transform_children(self.obj.children)]
def OnDoubleClick(self):
"Open module with file_open and position to lineno."
try:
edit = file_open(self.obj.file)
edit.gotoline(self.obj.lineno)
except (OSError, AttributeError):
pass
def _module_browser(parent): # htest #
if len(sys.argv) > 1: # If pass file on command line.
file = sys.argv[1]
else:
file = __file__
# Add nested objects for htest.
class Nested_in_func(TreeNode):
def nested_in_class(): pass
def closure():
class Nested_in_closure: pass
ModuleBrowser(parent, file, _htest=True)
if __name__ == "__main__":
if len(sys.argv) == 1: # If pass file on command line, unittest fails.
from unittest import main
main('idlelib.idle_test.test_browser', verbosity=2, exit=False)
from idlelib.idle_test.htest import run
run(_module_browser)
|
{
"content_hash": "75bfafd196ee8de0c4dbde1d7d0a69a5",
"timestamp": "",
"source": "github",
"line_count": 249,
"max_line_length": 76,
"avg_line_length": 33.389558232931726,
"alnum_prop": 0.6025980274236228,
"repo_name": "batermj/algorithm-challenger",
"id": "3c3a53a6599a79282c0141e157712ae71cbc8c44",
"size": "8314",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "code-analysis/programming_anguage/python/source_codes/Python3.8.0/Python-3.8.0/Lib/idlelib/browser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "655185"
},
{
"name": "Batchfile",
"bytes": "127416"
},
{
"name": "C",
"bytes": "33127630"
},
{
"name": "C++",
"bytes": "1364796"
},
{
"name": "CSS",
"bytes": "3163"
},
{
"name": "Common Lisp",
"bytes": "48962"
},
{
"name": "DIGITAL Command Language",
"bytes": "26402"
},
{
"name": "DTrace",
"bytes": "2196"
},
{
"name": "Go",
"bytes": "26248"
},
{
"name": "HTML",
"bytes": "385719"
},
{
"name": "Haskell",
"bytes": "33612"
},
{
"name": "Java",
"bytes": "1084"
},
{
"name": "JavaScript",
"bytes": "20754"
},
{
"name": "M4",
"bytes": "403992"
},
{
"name": "Makefile",
"bytes": "238185"
},
{
"name": "Objective-C",
"bytes": "4934684"
},
{
"name": "PHP",
"bytes": "3513"
},
{
"name": "PLSQL",
"bytes": "45772"
},
{
"name": "Perl",
"bytes": "649"
},
{
"name": "PostScript",
"bytes": "27606"
},
{
"name": "PowerShell",
"bytes": "21737"
},
{
"name": "Python",
"bytes": "55270625"
},
{
"name": "R",
"bytes": "29951"
},
{
"name": "Rich Text Format",
"bytes": "14551"
},
{
"name": "Roff",
"bytes": "292490"
},
{
"name": "Ruby",
"bytes": "519"
},
{
"name": "Scala",
"bytes": "846446"
},
{
"name": "Shell",
"bytes": "491113"
},
{
"name": "Swift",
"bytes": "881"
},
{
"name": "TeX",
"bytes": "337654"
},
{
"name": "VBScript",
"bytes": "140"
},
{
"name": "XSLT",
"bytes": "153"
}
],
"symlink_target": ""
}
|
package be.yurimoens.runemate.ccooker.task;
import com.runemate.game.api.hybrid.local.hud.interfaces.Interfaces;
import com.runemate.game.api.hybrid.local.hud.interfaces.Inventory;
import com.runemate.game.api.script.framework.task.Task;
public class Cook extends Task {
private final int[] RAW_FOOD = { 377, 383 };
public Cook() {
add(new ClickPortable(), new HandleCookingInterface());
}
@Override
public boolean validate() {
return (!Inventory.getItems(RAW_FOOD).isEmpty()
&& Interfaces.getAt(1251, 0) != null);
}
@Override
public void execute() {
getChildren().stream().forEach(task -> {
if (task.validate()) task.execute();
});
}
}
|
{
"content_hash": "06fa276553d81e98a14ce53462326001",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 68,
"avg_line_length": 27.37037037037037,
"alnum_prop": 0.6427604871447903,
"repo_name": "yuri-moens/runemate-scripts",
"id": "d1983856887dab0f16ddfb9e1336705166af2bd0",
"size": "739",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "be/yurimoens/runemate/ccooker/task/Cook.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "226205"
}
],
"symlink_target": ""
}
|
/**
* @license Highcharts JS v9.2.2 (2021-08-24)
* @module highcharts/modules/bullet
* @requires highcharts
*
* Bullet graph series type for Highcharts
*
* (c) 2010-2021 Kacper Madej
*
* License: www.highcharts.com/license
*/
'use strict';
import '../../Series/Bullet/BulletSeries.js';
|
{
"content_hash": "6edba9b349151b64cab15f3b42697168",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 45,
"avg_line_length": 22.76923076923077,
"alnum_prop": 0.6858108108108109,
"repo_name": "cdnjs/cdnjs",
"id": "2850f86b71c2c369024c3542303123aa5f53ff6f",
"size": "296",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ajax/libs/highcharts/9.2.2/es-modules/masters/modules/bullet.src.js",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
import os
import argparse
from filelock import FileLock
from ray.air import session
import torch
import torch.nn.functional as F
from torch_geometric.datasets import Reddit, FakeDataset
from torch_geometric.loader import NeighborSampler
from torch_geometric.nn import SAGEConv
from ray import train
from ray.train.torch import TorchTrainer
from ray.air.config import ScalingConfig
from torch_geometric.transforms import RandomNodeSplit
class SAGE(torch.nn.Module):
def __init__(self, in_channels, hidden_channels, out_channels, num_layers=2):
super().__init__()
self.num_layers = num_layers
self.convs = torch.nn.ModuleList()
self.convs.append(SAGEConv(in_channels, hidden_channels))
for _ in range(self.num_layers - 2):
self.convs.append(SAGEConv(hidden_channels, hidden_channels))
self.convs.append(SAGEConv(hidden_channels, out_channels))
def forward(self, x, adjs):
for i, (edge_index, _, size) in enumerate(adjs):
x_target = x[: size[1]] # Target nodes are always placed first.
x = self.convs[i]((x, x_target), edge_index)
if i != self.num_layers - 1:
x = F.relu(x)
x = F.dropout(x, p=0.5, training=self.training)
return x.log_softmax(dim=-1)
@torch.no_grad()
def test(self, x_all, subgraph_loader):
for i in range(self.num_layers):
xs = []
for batch_size, n_id, adj in subgraph_loader:
edge_index, _, size = adj
x = x_all[n_id].to(train.torch.get_device())
x_target = x[: size[1]]
x = self.convs[i]((x, x_target), edge_index)
if i != self.num_layers - 1:
x = F.relu(x)
xs.append(x.cpu())
x_all = torch.cat(xs, dim=0)
return x_all
def train_loop_per_worker(train_loop_config):
dataset = train_loop_config["dataset_fn"]()
batch_size = train_loop_config["batch_size"]
num_epochs = train_loop_config["num_epochs"]
data = dataset[0]
train_idx = data.train_mask.nonzero(as_tuple=False).view(-1)
train_idx = train_idx.split(train_idx.size(0) // session.get_world_size())[
session.get_world_rank()
]
train_loader = NeighborSampler(
data.edge_index,
node_idx=train_idx,
sizes=[25, 10],
batch_size=batch_size,
shuffle=True,
)
# Disable distributed sampler since the train_loader has already been split above.
train_loader = train.torch.prepare_data_loader(train_loader, add_dist_sampler=False)
# Do validation on rank 0 worker only.
if session.get_world_rank() == 0:
subgraph_loader = NeighborSampler(
data.edge_index, node_idx=None, sizes=[-1], batch_size=2048, shuffle=False
)
subgraph_loader = train.torch.prepare_data_loader(
subgraph_loader, add_dist_sampler=False
)
model = SAGE(dataset.num_features, 256, dataset.num_classes)
model = train.torch.prepare_model(model)
optimizer = torch.optim.Adam(model.parameters(), lr=0.01)
x, y = data.x.to(train.torch.get_device()), data.y.to(train.torch.get_device())
for epoch in range(num_epochs):
model.train()
# ``batch_size`` is the number of samples in the current batch.
# ``n_id`` are the ids of all the nodes used in the computation. This is
# needed to pull in the necessary features just for the current batch that is
# being trained on.
# ``adjs`` is a list of 3 element tuple consisting of ``(edge_index, e_id,
# size)`` for each sample in the batch, where ``edge_index``represent the
# edges of the sampled subgraph, ``e_id`` are the ids of the edges in the
# sample, and ``size`` holds the shape of the subgraph.
# See ``torch_geometric.loader.neighbor_sampler.NeighborSampler`` for more info.
for batch_size, n_id, adjs in train_loader:
optimizer.zero_grad()
out = model(x[n_id], adjs)
loss = F.nll_loss(out, y[n_id[:batch_size]])
loss.backward()
optimizer.step()
if session.get_world_rank() == 0:
print(f"Epoch: {epoch:03d}, Loss: {loss:.4f}")
train_accuracy = validation_accuracy = test_accuracy = None
# Do validation on rank 0 worker only.
if session.get_world_rank() == 0:
model.eval()
with torch.no_grad():
out = model.module.test(x, subgraph_loader)
res = out.argmax(dim=-1) == data.y
train_accuracy = int(res[data.train_mask].sum()) / int(
data.train_mask.sum()
)
validation_accuracy = int(res[data.val_mask].sum()) / int(
data.val_mask.sum()
)
test_accuracy = int(res[data.test_mask].sum()) / int(data.test_mask.sum())
session.report(
dict(
train_accuracy=train_accuracy,
validation_accuracy=validation_accuracy,
test_accuracy=test_accuracy,
)
)
def gen_fake_dataset():
"""Returns a function to be called on each worker that returns a Fake Dataset."""
# For fake dataset, since the dataset is randomized, we create it once on the
# driver, and then send the same dataset to all the training workers.
# Use 10% of nodes for validation and 10% for testing.
fake_dataset = FakeDataset(transform=RandomNodeSplit(num_val=0.1, num_test=0.1))
def gen_dataset():
return fake_dataset
return gen_dataset
def gen_reddit_dataset():
"""Returns a function to be called on each worker that returns Reddit Dataset."""
# For Reddit dataset, we have to download the data on each node, so we create the
# dataset on each training worker.
with FileLock(os.path.expanduser("~/.reddit_dataset_lock")):
dataset = Reddit("./data/Reddit")
return dataset
def train_gnn(
num_workers=2, use_gpu=False, epochs=3, global_batch_size=32, dataset="reddit"
):
per_worker_batch_size = global_batch_size // num_workers
trainer = TorchTrainer(
train_loop_per_worker=train_loop_per_worker,
train_loop_config={
"num_epochs": epochs,
"batch_size": per_worker_batch_size,
"dataset_fn": gen_reddit_dataset
if dataset == "reddit"
else gen_fake_dataset(),
},
scaling_config=ScalingConfig(num_workers=num_workers, use_gpu=use_gpu),
)
result = trainer.fit()
print(result.metrics)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--address", required=False, type=str, help="the address to use for Ray"
)
parser.add_argument(
"--num-workers",
"-n",
type=int,
default=2,
help="Sets number of workers for training.",
)
parser.add_argument(
"--use-gpu", action="store_true", help="Whether to use GPU for training."
)
parser.add_argument(
"--epochs", type=int, default=3, help="Number of epochs to train for."
)
parser.add_argument(
"--global-batch-size",
"-b",
type=int,
default=32,
help="Global batch size to use for training.",
)
parser.add_argument(
"--dataset",
"-d",
type=str,
choices=["reddit", "fake"],
default="reddit",
help="The dataset to use. Either 'reddit' or 'fake' Defaults to 'reddit'.",
)
args, _ = parser.parse_known_args()
train_gnn(
num_workers=args.num_workers,
use_gpu=args.use_gpu,
epochs=args.epochs,
global_batch_size=args.global_batch_size,
dataset=args.dataset,
)
|
{
"content_hash": "efacdf2182ea40ebf3ff02eabcbd4d84",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 88,
"avg_line_length": 34.262008733624455,
"alnum_prop": 0.5991588070354321,
"repo_name": "ray-project/ray",
"id": "4b880607de1cdcb21433eb293748ddb438488884",
"size": "7967",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/ray/train/examples/pytorch_geometric/distributed_sage_example.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "37490"
},
{
"name": "C++",
"bytes": "5972422"
},
{
"name": "CSS",
"bytes": "10912"
},
{
"name": "Cython",
"bytes": "227477"
},
{
"name": "Dockerfile",
"bytes": "20210"
},
{
"name": "HTML",
"bytes": "30382"
},
{
"name": "Java",
"bytes": "1160849"
},
{
"name": "JavaScript",
"bytes": "1128"
},
{
"name": "Jinja",
"bytes": "6371"
},
{
"name": "Jupyter Notebook",
"bytes": "1615"
},
{
"name": "Makefile",
"bytes": "234"
},
{
"name": "PowerShell",
"bytes": "1114"
},
{
"name": "Python",
"bytes": "19539109"
},
{
"name": "Shell",
"bytes": "134583"
},
{
"name": "Starlark",
"bytes": "334862"
},
{
"name": "TypeScript",
"bytes": "190599"
}
],
"symlink_target": ""
}
|
var Jupyter = Jupyter || {};
var jprop = function(name, module_path){
Object.defineProperty(Jupyter, name, {
get: function() {
console.warn('accessing `'+name+'` is deprecated. Use `require("'+module_path+'")`');
return require(module_path);
},
enumerable: true,
configurable: false
});
}
var jglobal = function(name, module_path){
Object.defineProperty(Jupyter, name, {
get: function() {
console.warn('accessing `'+name+'` is deprecated. Use `require("'+module_path+'").'+name+'`');
return require(module_path)[name];
},
enumerable: true,
configurable: false
});
}
define(function(){
"use strict";
// expose modules
jprop('utils','base/js/utils')
//Jupyter.load_extensions = Jupyter.utils.load_extensions;
//
jprop('security','base/js/security');
jprop('keyboard','base/js/keyboard');
jprop('dialog','base/js/dialog');
jprop('mathjaxutils','notebook/js/mathjaxutils');
//// exposed constructors
jglobal('CommManager','services/kernels/comm')
jglobal('Comm','services/kernels/comm')
jglobal('NotificationWidget','base/js/notificationwidget');
jglobal('Kernel','services/kernels/kernel');
jglobal('Session','services/sessions/session');
jglobal('LoginWidget','auth/js/loginwidget');
jglobal('Page','base/js/page');
// notebook
jglobal('TextCell','notebook/js/textcell');
jglobal('OutputArea','notebook/js/outputarea');
jglobal('KeyboardManager','notebook/js/keyboardmanager');
jglobal('Completer','notebook/js/completer');
jglobal('Notebook','notebook/js/notebook');
jglobal('Tooltip','notebook/js/tooltip');
jglobal('Toolbar','notebook/js/toolbar');
jglobal('SaveWidget','notebook/js/savewidget');
jglobal('Pager','notebook/js/pager');
jglobal('QuickHelp','notebook/js/quickhelp');
jglobal('MarkdownCell','notebook/js/textcell');
jglobal('RawCell','notebook/js/textcell');
jglobal('Cell','notebook/js/cell');
jglobal('MainToolBar','notebook/js/maintoolbar');
jglobal('NotebookNotificationArea','notebook/js/notificationarea');
jglobal('NotebookTour', 'notebook/js/tour');
jglobal('MenuBar', 'notebook/js/menubar');
// tree
jglobal('SessionList','tree/js/sessionlist');
Jupyter.version = "4.3.1";
Jupyter._target = '_blank';
return Jupyter;
});
// deprecated since 4.0, remove in 5+
var IPython = Jupyter
|
{
"content_hash": "e029f4874104ae7b1e4999c07a9e1bf3",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 104,
"avg_line_length": 31.974358974358974,
"alnum_prop": 0.6459502806736167,
"repo_name": "lancezlin/ml_template_py",
"id": "44002e97078e5a6d0ada19a7835c3c78897bf42d",
"size": "2599",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/python2.7/site-packages/notebook/static/base/js/namespace.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "326933"
},
{
"name": "C++",
"bytes": "14430"
},
{
"name": "CSS",
"bytes": "7806"
},
{
"name": "FORTRAN",
"bytes": "3200"
},
{
"name": "HTML",
"bytes": "596861"
},
{
"name": "JavaScript",
"bytes": "4020233"
},
{
"name": "Jupyter Notebook",
"bytes": "517957"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "Python",
"bytes": "41191064"
},
{
"name": "Shell",
"bytes": "3373"
},
{
"name": "Smarty",
"bytes": "26298"
}
],
"symlink_target": ""
}
|
import simplejson as json
import copy
import os
import random
import six
from random import randint
try:
from urllib import parse
except ImportError:
import urlparse as parse
random.seed(os.urandom(10))
def parse_querystring(querystring):
"""
Return parsed querystring in dict
"""
if querystring is None or len(querystring) == 0:
return {}
qs_dict = parse.parse_qs(querystring, keep_blank_values=True)
for key in qs_dict:
if len(qs_dict[key]) != 1:
continue
qs_dict[key] = qs_dict[key][0]
if qs_dict[key] == '':
qs_dict[key] = True
return dict((key, qs_dict[key]) for key in qs_dict if len(key) != 0)
def trim_resource(resource):
"""
trim_resource
"""
return resource.strip(" \t\n\r/")
class MessageType(object):
"""
Message Type Enum
Check is response message or not
- [v] id
- [v] code
- [v] method
- [v] resource
- [v] sign
- [x] tunnel
Check is request message or not
- [v] id
- [x] code
- [v] method
- [v] resource
- [x] sign
- [x] tunnel
Check is direct message or not
- [v] id
- [x] code
- [v] method
- [v] resource
- [x] sign
- [v] tunnel
Check is event message or not
- [x] id
- [v] code
- [v] method
- [v] resource
- [x] sign
- [x] tunnel
Check is hook message or not
- [v] id
- [x] code
- [v] method
- [v] resource
- [v] sign
- [x] tunnel
"""
UNKNOWN = 0
RESPONSE = 1
REQUEST = 2
DIRECT = 3
EVENT = 4
HOOK = 5
FIELDS = {
RESPONSE: {
"must": ["id", "code", "method", "resource", "sign"],
"prohibit": ["tunnel"]
},
REQUEST: {
"must": ["id", "method", "resource"],
"prohibit": ["code", "sign", "tunnel"]
},
DIRECT: {
"must": ["id", "method", "resource", "tunnel"],
"prohibit": ["code", "sign"]
},
EVENT: {
"must": ["code", "method", "resource"],
"prohibit": ["id", "sign", "tunnel"]
},
HOOK: {
"must": ["id", "method", "resource", "sign"],
"prohibit": ["code", "tunnel"]
}
}
class Message(object):
"""
Message
"""
def __init__(self, message, generate_id=False):
if isinstance(message, six.string_types):
try:
message = json.loads(message)
except Exception:
raise ValueError("Invaild Message." +
"Must be a vaild JSON String")
if not isinstance(message, dict):
raise TypeError("Message must be JSON string or Dict")
# put all prop into object
for (prop, value) in message.iteritems():
setattr(self, prop, value)
if generate_id is True:
self.generate_id()
# put message type
self._type = Message.get_message_type(self.__dict__)
def generate_id(self):
setattr(self, "id", randint(0, 655350))
return self.id
def type(self):
return self._type
def to_json(self, pretty=True):
"""
to_json will call to_dict then dumps into json format
"""
data_dict = self.to_dict()
if pretty:
return json.dumps(
data_dict, sort_keys=True, indent=2)
return json.dumps(data_dict, sort_keys=True)
def to_dict(self):
"""
to_dict will clean all protected and private properties
"""
return dict(
(k, self.__dict__[k]) for k in self.__dict__ if k.find("_") != 0)
def match(self, route):
"""
Match input route and return new Message instance
with parsed content
"""
_resource = trim_resource(self.resource)
self.method = self.method.lower()
resource_match = route.resource_regex.search(_resource)
if resource_match is None:
return None
# build params and querystring
params = resource_match.groupdict()
querystring = params.pop("querystring", "")
setattr(self, "param", params)
setattr(self, "query", parse_querystring(querystring))
return copy.deepcopy(self)
def to_response(self, sign, code=200, data=None):
"""
transform message to response message
Notice: this method will return a deepcopy
"""
msg = copy.deepcopy(self)
msg.data = data
setattr(msg, 'code', code)
for _ in ["query", "param", "tunnel"]:
if not hasattr(msg, _):
continue
delattr(msg, _)
if hasattr(msg, 'sign') and isinstance(msg.sign, list):
msg.sign.append(sign)
else:
msg.sign = [sign]
msg._type = Message.get_message_type(msg.__dict__)
return msg
def to_event(self):
"""
get rid of id, sign, tunnel and update message type
Notice: this method will return a deepcopy
"""
msg = copy.deepcopy(self)
for _ in ["id", "sign", "tunnel", "query", "param"]:
if not hasattr(msg, _):
continue
delattr(msg, _)
msg._type = Message.get_message_type(msg.__dict__)
return msg
@staticmethod
def get_message_type(message):
"""
Return message's type
"""
for msg_type in MessageType.FIELDS:
if Message.is_type(msg_type, message):
return msg_type
return MessageType.UNKNOWN
@staticmethod
def is_type(msg_type, msg):
"""
Return message's type is or not
"""
for prop in MessageType.FIELDS[msg_type]["must"]:
if msg.get(prop, False) is False:
return False
for prop in MessageType.FIELDS[msg_type]["prohibit"]:
if msg.get(prop, False) is not False:
return False
return True
|
{
"content_hash": "d7dee3617f0d7d199802707ec6a2705f",
"timestamp": "",
"source": "github",
"line_count": 247,
"max_line_length": 77,
"avg_line_length": 25.178137651821864,
"alnum_prop": 0.509085061907059,
"repo_name": "imZack/sanji",
"id": "97715823d46580d7b95cb42583c8d7c9749dbcd8",
"size": "6219",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "sanji/message.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "149"
},
{
"name": "Python",
"bytes": "119228"
}
],
"symlink_target": ""
}
|
import {ActionTrust} from '../../../src/action-trust';
import {FormEvents} from './form-events';
import {installFormProxy} from './form-proxy';
import {triggerAnalyticsEvent} from '../../../src/analytics';
import {createCustomEvent} from '../../../src/event-helper';
import {installStylesForShadowRoot} from '../../../src/shadow-embed';
import {iterateCursor} from '../../../src/dom';
import {formOrNullForElement, setFormForElement} from '../../../src/form';
import {
assertAbsoluteHttpOrHttpsUrl,
assertHttpsUrl,
addParamsToUrl,
SOURCE_ORIGIN_PARAM,
isProxyOrigin,
} from '../../../src/url';
import {dev, user} from '../../../src/log';
import {getMode} from '../../../src/mode';
import {Services} from '../../../src/services';
import {toArray} from '../../../src/types';
import {
removeElement,
childElementByAttr,
ancestorElementsByTag,
} from '../../../src/dom';
import {installStyles} from '../../../src/style-installer';
import {CSS} from '../../../build/amp-form-0.1.css';
import {
getFormValidator,
isCheckValiditySupported,
} from './form-validators';
import {
FORM_VERIFY_PARAM,
getFormVerifier,
} from './form-verifiers';
import {deepMerge} from '../../../src/utils/object';
import {AmpEvents} from '../../../src/amp-events';
/** @type {string} */
const TAG = 'amp-form';
/**
* A list of external dependencies that can be included in forms.
* @type {!Array<string>}
*/
const EXTERNAL_DEPS = [
'amp-selector',
];
/** @const @enum {string} */
const FormState_ = {
INITIAL: 'initial',
VERIFYING: 'verifying',
SUBMITTING: 'submitting',
SUBMIT_ERROR: 'submit-error',
SUBMIT_SUCCESS: 'submit-success',
};
/** @const @enum {string} */
const UserValidityState = {
NONE: 'none',
USER_VALID: 'valid',
USER_INVALID: 'invalid',
};
/** @private @const {string} */
const REDIRECT_TO_HEADER = 'AMP-Redirect-To';
export class AmpForm {
/**
* Adds functionality to the passed form element and listens to submit event.
* @param {!HTMLFormElement} element
* @param {string} id
*/
constructor(element, id) {
//TODO(dvoytenko, #7063): Remove the try catch.
try {
installFormProxy(element);
} catch (e) {
dev().error(TAG, 'form proxy failed to install', e);
}
setFormForElement(element, this);
/** @private @const {string} */
this.id_ = id;
/** @const @private {!Window} */
this.win_ = element.ownerDocument.defaultView;
/** @const @private {!../../../src/service/timer-impl.Timer} */
this.timer_ = Services.timerFor(this.win_);
/** @const @private {!../../../src/service/url-replacements-impl.UrlReplacements} */
this.urlReplacement_ = Services.urlReplacementsForDoc(element);
/** @private {?Promise} */
this.dependenciesPromise_ = null;
/** @const @private {!HTMLFormElement} */
this.form_ = element;
/** @const @private {!../../../src/service/vsync-impl.Vsync} */
this.vsync_ = Services.vsyncFor(this.win_);
/** @const @private {!../../../src/service/template-impl.Templates} */
this.templates_ = Services.templatesFor(this.win_);
/** @const @private {!../../../src/service/xhr-impl.Xhr} */
this.xhr_ = Services.xhrFor(this.win_);
/** @const @private {!../../../src/service/action-impl.ActionService} */
this.actions_ = Services.actionServiceForDoc(this.form_);
/** @const @private {!../../../src/service/resources-impl.Resources} */
this.resources_ = Services.resourcesForDoc(this.form_);
/** @const @private {string} */
this.method_ = (this.form_.getAttribute('method') || 'GET').toUpperCase();
/** @const @private {string} */
this.target_ = this.form_.getAttribute('target');
/** @const @private {?string} */
this.xhrAction_ = this.getXhrUrl_('action-xhr');
/** @const @private {?string} */
this.xhrVerify_ = this.getXhrUrl_('verify-xhr');
/**
* Indicates that the action will submit to canonical or not.
* @private {boolean|undefined}
*/
this.isCanonicalAction_ = undefined;
/** @const @private {boolean} */
this.shouldValidate_ = !this.form_.hasAttribute('novalidate');
// Need to disable browser validation in order to allow us to take full
// control of this. This allows us to trigger validation APIs and reporting
// when we need to.
this.form_.setAttribute('novalidate', '');
if (!this.shouldValidate_) {
this.form_.setAttribute('amp-novalidate', '');
}
this.form_.classList.add('i-amphtml-form');
const submitButtons = this.form_.querySelectorAll('[type="submit"]');
/** @const @private {!Array<!Element>} */
this.submitButtons_ = toArray(submitButtons);
/** @private {!FormState_} */
this.state_ = FormState_.INITIAL;
const inputs = this.form_.elements;
for (let i = 0; i < inputs.length; i++) {
const name = inputs[i].name;
user().assert(name != SOURCE_ORIGIN_PARAM && name != FORM_VERIFY_PARAM,
'Illegal input name, %s found: %s', name, inputs[i]);
}
/** @const @private {!./form-validators.FormValidator} */
this.validator_ = getFormValidator(this.form_);
/** @const @private {!./form-verifiers.FormVerifier} */
this.verifier_ = getFormVerifier(
this.form_, () => this.handleXhrVerify_());
this.actions_.installActionHandler(
this.form_, this.actionHandler_.bind(this), ActionTrust.HIGH);
this.installEventHandlers_();
/** @private {?Promise} */
this.xhrSubmitPromise_ = null;
/** @private {?Promise} */
this.renderTemplatePromise_ = null;
}
/**
* Gets and validates an attribute for form request URLs.
* @param {string} attribute
* @return {?string}
* @private
*/
getXhrUrl_(attribute) {
const url = this.form_.getAttribute(attribute);
if (url) {
assertHttpsUrl(url, this.form_, attribute);
user().assert(!isProxyOrigin(url),
`form ${attribute} should not be on AMP CDN: %s`,
this.form_);
}
return url;
}
/**
* @param {!../../../src/service/action-impl.ActionInvocation} invocation
* @private
*/
actionHandler_(invocation) {
if (invocation.method == 'submit') {
this.whenDependenciesReady_().then(() => {
this.handleSubmitAction_(invocation);
});
}
}
/**
* Returns a promise that will be resolved when all dependencies used inside the form
* tag are loaded and built (e.g. amp-selector) or 2 seconds timeout - whichever is first.
* @return {!Promise}
* @private
*/
whenDependenciesReady_() {
if (this.dependenciesPromise_) {
return this.dependenciesPromise_;
}
const depElements = this.form_./*OK*/querySelectorAll(
EXTERNAL_DEPS.join(','));
// Wait for an element to be built to make sure it is ready.
const promises = toArray(depElements).map(el => el.whenBuilt());
return this.dependenciesPromise_ = this.waitOnPromisesOrTimeout_(promises,
2000);
}
/** @private */
installEventHandlers_() {
this.form_.addEventListener(
'submit', this.handleSubmitEvent_.bind(this), true);
this.form_.addEventListener('blur', e => {
checkUserValidityAfterInteraction_(dev().assertElement(e.target));
this.validator_.onBlur(e);
}, true);
const afterVerifierCommit = () => {
// Move from the VERIFYING state back to INITIAL
if (this.state_ === FormState_.VERIFYING) {
this.setState_(FormState_.INITIAL);
}
};
this.form_.addEventListener('change', e => {
this.verifier_.onCommit()
.then(updatedElements => {
updatedElements.forEach(checkUserValidityAfterInteraction_);
this.validator_.onBlur(e);
}, () => {
checkUserValidityAfterInteraction_(dev().assertElement(e.target));
})
.then(afterVerifierCommit, afterVerifierCommit);
});
this.form_.addEventListener('input', e => {
checkUserValidityAfterInteraction_(dev().assertElement(e.target));
this.validator_.onInput(e);
});
}
/**
* Triggers 'amp-form-submit' event in 'amp-analytics' and
* generates variables for form fields to be accessible in analytics
*
* @private
*/
triggerFormSubmitInAnalytics_() {
const formDataForAnalytics = {};
const formObject = this.getFormAsObject_();
for (const k in formObject) {
if (Object.prototype.hasOwnProperty.call(formObject, k)) {
formDataForAnalytics['formFields[' + k + ']'] = formObject[k].join(',');
}
}
formDataForAnalytics['formId'] = this.form_.id;
this.analyticsEvent_('amp-form-submit', formDataForAnalytics);
}
/**
* Handles submissions through action service invocations.
* e.g. <img on=tap:form.submit>
* @param {!../../../src/service/action-impl.ActionInvocation} invocation
* @private
*/
handleSubmitAction_(invocation) {
if (this.state_ == FormState_.SUBMITTING || !this.checkValidity_()) {
return;
}
// `submit` has the same trust level as the AMP Action that caused it.
this.submit_(invocation.trust);
if (this.method_ == 'GET' && !this.xhrAction_) {
// Trigger the actual submit of GET non-XHR.
this.form_.submit();
}
}
/**
* Note on stopImmediatePropagation usage here, it is important to emulate native
* browser submit event blocking. Otherwise any other submit listeners would get the
* event.
*
* For example, action service shouldn't trigger 'submit' event if form is actually
* invalid. stopImmediatePropagation allows us to make sure we don't trigger it.
*
* This prevents the default submission event in any of following cases:
* - The form is still finishing a previous submission.
* - The form is invalid.
* - Handling an XHR submission.
* - It's a non-XHR POST submission (unsupported).
*
* @param {!Event} event
* @private
*/
handleSubmitEvent_(event) {
if (this.state_ == FormState_.SUBMITTING || !this.checkValidity_()) {
event.stopImmediatePropagation();
event.preventDefault();
return;
}
if (this.xhrAction_ || this.method_ == 'POST') {
event.preventDefault();
}
// Submits caused by user input have high trust.
this.submit_(ActionTrust.HIGH);
}
/**
* Helper method that actual handles the different cases (post, get, xhr...).
* @param {ActionTrust} trust
* @private
*/
submit_(trust) {
const varSubsFields = this.getVarSubsFields_();
if (this.xhrAction_) {
this.handleXhrSubmit_(varSubsFields, trust);
} else if (this.method_ == 'POST') {
this.handleNonXhrPost_();
} else if (this.method_ == 'GET') {
this.handleNonXhrGet_(varSubsFields);
}
}
/**
* Get form fields that require variable substitutions
* @return {!IArrayLike<!HTMLInputElement>}
* @private
*/
getVarSubsFields_() {
// Fields that support var substitutions.
return this.form_.querySelectorAll('[type="hidden"][data-amp-replace]');
}
/**
* Send the verify request and control the VERIFYING state.
* @return {!Promise}
* @private
*/
handleXhrVerify_() {
if (this.state_ === FormState_.SUBMITTING) {
return Promise.resolve();
}
this.setState_(FormState_.VERIFYING);
return this.doVarSubs_(this.getVarSubsFields_())
.then(() => this.doVerifyXhr_());
}
/**
* @param {!IArrayLike<!HTMLInputElement>} varSubsFields
* @param {ActionTrust} trust
* @private
*/
handleXhrSubmit_(varSubsFields, trust) {
this.setState_(FormState_.SUBMITTING);
const p = this.doVarSubs_(varSubsFields)
.then(() => {
this.triggerFormSubmitInAnalytics_();
this.actions_.trigger(
this.form_, 'submit', /* event */ null, trust);
// After variable substitution
const values = this.getFormAsObject_();
this.renderTemplate_(values);
})
.then(() => this.doActionXhr_())
.then(response => this.handleXhrSubmitSuccess_(response),
error => {
return this.handleXhrSubmitFailure_(/** @type {!Error} */(error));
});
if (getMode().test) {
this.xhrSubmitPromise_ = p;
}
}
/**
* Perform asynchronous variable substitution on the fields that require it.
* @param {!IArrayLike<!HTMLInputElement>} varSubsFields
* @return {!Promise}
* @private
*/
doVarSubs_(varSubsFields) {
const varSubPromises = [];
for (let i = 0; i < varSubsFields.length; i++) {
varSubPromises.push(
this.urlReplacement_.expandInputValueAsync(varSubsFields[i]));
}
return this.waitOnPromisesOrTimeout_(varSubPromises, 100);
}
/**
* Send a request to the form's action endpoint.
* @return {!Promise<!../../../src/service/xhr-impl.FetchResponse>}
* @private
*/
doActionXhr_() {
return this.doXhr_(dev().assertString(this.xhrAction_), this.method_);
}
/**
* Send a request to the form's verify endpoint.
* @return {!Promise<!../../../src/service/xhr-impl.FetchResponse>}
* @private
*/
doVerifyXhr_() {
return this.doXhr_(dev().assertString(this.xhrVerify_), this.method_,
{[FORM_VERIFY_PARAM]: true});
}
/**
* Send a request to a form endpoint.
* @param {string} url
* @param {string} method
* @param {!Object<string, string>=} opt_extraFields
* @return {!Promise<!../../../src/service/xhr-impl.FetchResponse>}
* @private
*/
doXhr_(url, method, opt_extraFields) {
let xhrUrl, body;
const isHeadOrGet = method == 'GET' || method == 'HEAD';
if (isHeadOrGet) {
const values = this.getFormAsObject_();
if (opt_extraFields) {
deepMerge(values, opt_extraFields);
}
xhrUrl = addParamsToUrl(url, values);
} else {
xhrUrl = url;
body = new FormData(this.form_);
for (const key in opt_extraFields) {
body.append(key, opt_extraFields[key]);
}
}
return this.xhr_.fetch(xhrUrl, {
body,
method,
credentials: 'include',
headers: {
Accept: 'application/json',
},
});
}
/**
* Transition the form to the submit success state.
* @param {!../../../src/service/xhr-impl.FetchResponse} response
* @return {!Promise}
* @private visible for testing
*/
handleXhrSubmitSuccess_(response) {
return response.json().then(json => {
this.triggerAction_(/* success */ true, json);
this.analyticsEvent_('amp-form-submit-success');
this.setState_(FormState_.SUBMIT_SUCCESS);
this.renderTemplate_(json || {});
this.maybeHandleRedirect_(response);
}, error => {
user().error(TAG, `Failed to parse response JSON: ${error}`);
});
}
/**
* Transition the form the the submit error state.
* @param {!Error} error
* @private
*/
handleXhrSubmitFailure_(error) {
let promise;
if (error && error.response) {
promise = error.response.json().catch(() => null);
} else {
promise = Promise.resolve(null);
}
return promise.then(responseJson => {
this.triggerAction_(/* success */ false, responseJson);
this.analyticsEvent_('amp-form-submit-error');
this.setState_(FormState_.SUBMIT_ERROR);
this.renderTemplate_(responseJson || {});
this.maybeHandleRedirect_(error.response);
user().error(TAG, `Form submission failed: ${error}`);
});
}
/** @private */
handleNonXhrPost_() {
// non-XHR POST requests are not supported.
user().assert(false,
'Only XHR based (via action-xhr attribute) submissions are support ' +
'for POST requests. %s',
this.form_);
}
/**
* Executes variable substitutions on the passed fields.
* @param {IArrayLike<!HTMLInputElement>} varSubsFields
* @private
*/
handleNonXhrGet_(varSubsFields) {
// Non-xhr GET requests replacement should happen synchronously.
for (let i = 0; i < varSubsFields.length; i++) {
this.urlReplacement_.expandInputValueSync(varSubsFields[i]);
}
this.triggerFormSubmitInAnalytics_();
}
/**
* @private
* @return {boolean} False if the form is invalid.
*/
checkValidity_() {
if (isCheckValiditySupported(this.win_.document)) {
// Validity checking should always occur, novalidate only circumvent
// reporting and blocking submission on non-valid forms.
const isValid = checkUserValidityOnSubmission(this.form_);
if (this.shouldValidate_ && !isValid) {
this.vsync_.run({
measure: undefined,
mutate: reportValidity,
}, {
validator: this.validator_,
});
return false;
}
}
return true;
}
/**
* Handles response redirect throught the AMP-Redirect-To response header.
* @param {../../../src/service/xhr-impl.FetchResponse} response
* @private
*/
maybeHandleRedirect_(response) {
if (!response || !response.headers) {
return;
}
const redirectTo = response.headers.get(REDIRECT_TO_HEADER);
if (redirectTo) {
user().assert(this.target_ != '_blank',
'Redirecting to target=_blank using AMP-Redirect-To is currently ' +
'not supported, use target=_top instead. %s', this.form_);
try {
assertAbsoluteHttpOrHttpsUrl(redirectTo);
assertHttpsUrl(redirectTo, 'AMP-Redirect-To', 'Url');
} catch (e) {
user().assert(false, 'The `AMP-Redirect-To` header value must be an ' +
'absolute URL starting with https://. Found %s', redirectTo);
}
this.win_.top.location.href = redirectTo;
}
}
/**
* Triggers either a submit-success or submit-error action with response data.
* @param {boolean} success
* @param {?JsonObject} json
* @private
*/
triggerAction_(success, json) {
const name = success ? FormState_.SUBMIT_SUCCESS : FormState_.SUBMIT_ERROR;
const event =
createCustomEvent(this.win_, `${TAG}.${name}`, {response: json});
this.actions_.trigger(this.form_, name, event, ActionTrust.HIGH);
}
/**
* Returns a race promise between resolving all promises or timing out.
* @param {!Array<!Promise>} promises
* @param {number} timeout
* @return {!Promise}
* @private
*/
waitOnPromisesOrTimeout_(promises, timeout) {
return Promise.race(
[Promise.all(promises), this.timer_.promise(timeout)]);
}
/**
* @param {string} eventType
* @param {!Object<string, string>=} opt_vars A map of vars and their values.
* @private
*/
analyticsEvent_(eventType, opt_vars) {
triggerAnalyticsEvent(this.form_, eventType, opt_vars);
}
/**
* Returns form data as an object.
* @return {!JsonObject}
* @private
*/
getFormAsObject_() {
const data = /** @type {!JsonObject} */ ({});
const inputs = this.form_.elements;
const submittableTagsRegex = /^(?:input|select|textarea)$/i;
const unsubmittableTypesRegex = /^(?:button|image|file|reset)$/i;
const checkableType = /^(?:checkbox|radio)$/i;
for (let i = 0; i < inputs.length; i++) {
const input = inputs[i];
if (!input.name || isDisabled_(input) ||
!submittableTagsRegex.test(input.tagName) ||
unsubmittableTypesRegex.test(input.type) ||
(checkableType.test(input.type) && !input.checked)) {
continue;
}
if (data[input.name] === undefined) {
data[input.name] = [];
}
data[input.name].push(input.value);
}
return data;
}
/**
* Adds proper classes for the state passed.
* @param {!FormState_} newState
* @private
*/
setState_(newState) {
const previousState = this.state_;
this.form_.classList.remove(`amp-form-${previousState}`);
this.form_.classList.add(`amp-form-${newState}`);
this.cleanupRenderedTemplate_(previousState);
this.state_ = newState;
this.submitButtons_.forEach(button => {
if (newState == FormState_.SUBMITTING) {
button.setAttribute('disabled', '');
} else {
button.removeAttribute('disabled');
}
});
}
/**
* @param {!JsonObject} data
* @private
*/
renderTemplate_(data) {
const container = this.form_./*OK*/querySelector(`[${this.state_}]`);
let p = null;
if (container) {
const messageId = `rendered-message-${this.id_}`;
container.setAttribute('role', 'alert');
container.setAttribute('aria-labeledby', messageId);
container.setAttribute('aria-live', 'assertive');
if (this.templates_.hasTemplate(container)) {
p = this.templates_.findAndRenderTemplate(container, data)
.then(rendered => {
rendered.id = messageId;
rendered.setAttribute('i-amphtml-rendered', '');
container.appendChild(rendered);
const renderedEvent = createCustomEvent(
this.win_,
AmpEvents.DOM_UPDATE,
/* detail */ null,
{bubbles: true});
container.dispatchEvent(renderedEvent);
});
} else {
// TODO(vializ): This is to let AMP know that the AMP elements inside
// this container are now visible so they get scheduled for layout.
// This will be unnecessary when the AMP Layers implementation is
// complete. We call mutateElement here and not where the template is
// made visible so that we don't do redundant layout work when a
// template is rendered too.
this.resources_.mutateElement(container, () => {});
p = Promise.resolve();
}
}
if (getMode().test) {
this.renderTemplatePromise_ = p;
}
}
/**
* Removes the template for the passed state.
* @param {!FormState_} state
* @private
*/
cleanupRenderedTemplate_(state) {
const container = this.form_./*OK*/querySelector(`[${state}]`);
if (!container) {
return;
}
const previousRender = childElementByAttr(container, 'i-amphtml-rendered');
if (previousRender) {
removeElement(previousRender);
}
}
/**
* Returns a promise that resolves when xhr submit finishes. The promise
* will be null if xhr submit has not started.
* @visibleForTesting
*/
xhrSubmitPromiseForTesting() {
return this.xhrSubmitPromise_;
}
/**
* Returns a promise that resolves when tempalte render finishes. The promise
* will be null if the template render has not started.
* @visibleForTesting
*/
renderTemplatePromiseForTesting() {
return this.renderTemplatePromise_;
}
}
/**
* Reports validity of the form passed through state object.
* @param {!Object} state
*/
function reportValidity(state) {
state.validator.report();
}
/**
* Checks user validity for all inputs, fieldsets and the form.
* @param {!HTMLFormElement} form
* @return {boolean} Whether the form is currently valid or not.
*/
function checkUserValidityOnSubmission(form) {
const elements = form.querySelectorAll('input,select,textarea,fieldset');
iterateCursor(elements, element => checkUserValidity(element));
return checkUserValidity(form);
}
/**
* Returns the user validity state of the element.
* @param {!Element} element
* @return {string}
*/
function getUserValidityStateFor(element) {
if (element.classList.contains('user-valid')) {
return UserValidityState.USER_VALID;
} else if (element.classList.contains('user-invalid')) {
return UserValidityState.USER_INVALID;
}
return UserValidityState.NONE;
}
/**
* Updates class names on the element to reflect the active invalid types on it.
*
* @param {!Element} element
*/
function updateInvalidTypesClasses(element) {
if (!element.validity) {
return;
}
for (const validationType in element.validity) {
element.classList.toggle(validationType, element.validity[validationType]);
}
}
/**
* Checks user validity which applies .user-valid and .user-invalid AFTER the user
* interacts with the input by moving away from the input (blur) or by changing its
* value (input).
*
* See :user-invalid spec for more details:
* https://drafts.csswg.org/selectors-4/#user-pseudos
*
* The specs are still not fully specified. The current solution tries to follow a common
* sense approach for when to apply these classes. As the specs gets clearer, we should
* strive to match it as much as possible.
*
* @param {!Element} element
* @param {boolean=} propagate Whether to propagate the user validity to ancestors.
* @return {boolean} Whether the element is valid or not.
*/
function checkUserValidity(element, propagate = false) {
// TODO(mkhatib, #6930): Implement basic validation for custom inputs like
// amp-selector.
// If this is not a field type with checkValidity don't do anything.
if (!element.checkValidity) {
return true;
}
let shouldPropagate = false;
const previousValidityState = getUserValidityStateFor(element);
const isCurrentlyValid = element.checkValidity();
if (previousValidityState != UserValidityState.USER_VALID &&
isCurrentlyValid) {
element.classList.add('user-valid');
element.classList.remove('user-invalid');
// Don't propagate user-valid unless it was marked invalid before.
shouldPropagate = previousValidityState == UserValidityState.USER_INVALID;
} else if (previousValidityState != UserValidityState.USER_INVALID &&
!isCurrentlyValid) {
element.classList.add('user-invalid');
element.classList.remove('user-valid');
// Always propagate an invalid state change. One invalid input field is
// guaranteed to make the fieldset and form invalid as well.
shouldPropagate = true;
}
updateInvalidTypesClasses(element);
if (propagate && shouldPropagate) {
// Propagate user validity to ancestor fieldsets.
const ancestors = ancestorElementsByTag(element, 'fieldset');
for (let i = 0; i < ancestors.length; i++) {
checkUserValidity(ancestors[i]);
}
// Also update the form user validity.
if (element.form) {
checkUserValidity(element.form);
}
}
return isCurrentlyValid;
}
/**
* Responds to user interaction with an input by checking user validity of the input
* and possibly its input-related ancestors (e.g. feildset, form).
* @param {!Element} input
* @private visible for testing.
*/
export function checkUserValidityAfterInteraction_(input) {
checkUserValidity(input, /* propagate */ true);
}
/**
* Checks if a field is disabled.
* @param {!Element} element
* @private
*/
function isDisabled_(element) {
if (element.disabled) {
return true;
}
const ancestors = ancestorElementsByTag(element, 'fieldset');
for (let i = 0; i < ancestors.length; i++) {
if (ancestors[i].disabled) {
return true;
}
}
return false;
}
/**
* Bootstraps the amp-form elements
*/
export class AmpFormService {
/**
* @param {!../../../src/service/ampdoc-impl.AmpDoc} ampdoc
*/
constructor(ampdoc) {
/** @const @private {!Promise} */
this.whenInitialized_ = this.installStyles_(ampdoc)
.then(() => this.installHandlers_(ampdoc));
// Dispatch a test-only event for integration tests.
if (getMode().test) {
this.whenInitialized_.then(() => {
const win = ampdoc.win;
const event = createCustomEvent(
win, FormEvents.SERVICE_INIT, null, {bubbles: true});
win.dispatchEvent(event);
});
}
}
/**
* Returns a promise that resolves when all form implementations (if any)
* have been upgraded.
* @return {!Promise}
*/
whenInitialized() {
return this.whenInitialized_;
}
/**
* Install the amp-form CSS
* @param {!../../../src/service/ampdoc-impl.AmpDoc} ampdoc
* @return {!Promise}
* @private
*/
installStyles_(ampdoc) {
return new Promise(resolve => {
if (ampdoc.isSingleDoc()) {
const root = /** @type {!Document} */ (ampdoc.getRootNode());
installStyles(root, CSS, resolve);
} else {
const root = /** @type {!ShadowRoot} */ (ampdoc.getRootNode());
installStylesForShadowRoot(root, CSS);
resolve();
}
});
}
/**
* Install the event handlers
* @param {!../../../src/service/ampdoc-impl.AmpDoc} ampdoc
* @return {!Promise}
* @private
*/
installHandlers_(ampdoc) {
return ampdoc.whenReady().then(() => {
this.installSubmissionHandlers_(
ampdoc.getRootNode().querySelectorAll('form'));
this.installGlobalEventListener_(ampdoc.getRootNode());
});
}
/**
* Install submission handler on all forms in the document.
* @param {?IArrayLike<T>} forms
* @previousValidityState
* @template T
* @private
*/
installSubmissionHandlers_(forms) {
if (!forms) {
return;
}
iterateCursor(forms, (form, index) => {
const existingAmpForm = formOrNullForElement(form);
if (!existingAmpForm) {
new AmpForm(form, `amp-form-${index}`);
}
});
}
/**
* Listen for DOM updated messages sent to the document.
* @param {!Document|!ShadowRoot} doc
* @private
*/
installGlobalEventListener_(doc) {
doc.addEventListener(AmpEvents.DOM_UPDATE, () => {
this.installSubmissionHandlers_(doc.querySelectorAll('form'));
});
}
}
AMP.registerServiceForDoc(TAG, AmpFormService);
|
{
"content_hash": "490986ade2ddb3e67aac7e681e13d2a9",
"timestamp": "",
"source": "github",
"line_count": 984,
"max_line_length": 92,
"avg_line_length": 29.885162601626018,
"alnum_prop": 0.6314482946237291,
"repo_name": "wjfang/amphtml",
"id": "adfcbbb042fc602ff7ee79ff655125d22443673a",
"size": "30034",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "extensions/amp-form/0.1/amp-form.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "74104"
},
{
"name": "Go",
"bytes": "7459"
},
{
"name": "HTML",
"bytes": "913737"
},
{
"name": "Java",
"bytes": "36596"
},
{
"name": "JavaScript",
"bytes": "8021104"
},
{
"name": "Protocol Buffer",
"bytes": "29956"
},
{
"name": "Python",
"bytes": "74497"
},
{
"name": "Ruby",
"bytes": "7342"
},
{
"name": "Shell",
"bytes": "6942"
},
{
"name": "Yacc",
"bytes": "20788"
}
],
"symlink_target": ""
}
|
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="OptimLib: a C++ numerical optimization library.">
<meta name="author" content="Keith O'Hara">
<meta name="keywords" content="Optimization, C++, C++11, Differential Evolution, Particle Swarm Optimization, Root Finding, OpenMP, Parallel Optimization, BFGS, L-BFGS, Keith O'Hara, Economics, Econometrics, Research, NYU, New York University" />
<link rel="shortcut icon" type="image/x-icon" href="siteicon.ico">
<title>OptimLib: Settings</title>
<!-- Bootstrap Core CSS -->
<link href="css/bootstrap.min.css" rel="stylesheet">
<!-- Custom CSS -->
<link href="css/modern-business.css" rel="stylesheet">
<!-- Custom Fonts -->
<link href="font-awesome/css/font-awesome.min.css" rel="stylesheet" type="text/css">
<!-- Additional Settings -->
<link href="css/kthohr_settings.css" rel="stylesheet">
<!-- Syntax Highlighter -->
<script type="text/javascript" src="js/syntaxhighlighter.js"></script>
<link type="text/css" rel="stylesheet" href="css/swift_theme.css">
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-93902857-1', 'auto');
ga('send', 'pageview');
</script>
<!-- MathJax -->
<script type="text/x-mathjax-config">
MathJax.Hub.Config({tex2jax: {inlineMath: [['$','$'], ['\\(','\\)']]}});
</script>
<script type="text/javascript" async
src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_CHTML">
</script>
<script async defer src="https://buttons.github.io/buttons.js"></script>
<script src="js/jquery.js"></script>
<script>
$(function(){
$("#mynavbar").load("navbar.html")
$("#optimhead").load("optimlib_header.html")
$("#myfooter").load("footer.html")
});
</script>
</head>
<style>
pre {
display: inline-block;
}
</style>
<body>
<!-- Navigation -->
<div id="mynavbar"></div>
<!-- Page Content -->
<div class="container">
<!-- Page Heading/Breadcrumbs -->
<div id="optimhead"></div>
<!-- -->
<div class="row">
<div class="col-md-2"></div>
<div class="col-md-8">
<h3 style="text-align: left;"><strong style="font-size: 120%;">OptimLib: Algorithm Settings</strong></h3>
<hr>
<p><strong>Struct definition:</strong></p>
<pre class="brush: cpp;">
struct gd_settings_t
{
// step size, or 'learning rate'
double step_size = 0.1;
// decay
bool step_decay = false;
uint_t step_decay_periods = 10;
double step_decay_val = 0.5;
// momentum parameter
double momentum_par = 0.9;
// Ada parameters
double norm_term = 10e-08;
double ada_rho = 0.9;
bool ada_max = false;
// Adam parameters
double adam_beta_1 = 0.9;
double adam_beta_2 = 0.999;
};
struct algo_settings_t
{
// general
int conv_failure_switch = 0;
int iter_max = 2000;
double err_tol = 1E-08;
bool vals_bound = false;
arma::vec lower_bounds;
arma::vec upper_bounds;
// returned by algorithms
double opt_value; // will be returned by the optimization algorithm
arma::vec zero_values; // will be returned by the root-finding method
int opt_iter;
double opt_err;
// SUMT parameter
double sumt_par_eta = 10.0;
// CG
int cg_method = 2;
double cg_restart_threshold = 0.1;
// DE
int de_n_pop = 200;
int de_n_pop_best = 6;
int de_n_gen = 1000;
int de_pmax = 4;
int de_max_fn_eval = 100000;
int de_mutation_method = 1; // 1 = rand; 2 = best
int de_check_freq = -1;
double de_par_F = 0.8;
double de_par_CR = 0.9;
double de_par_F_l = 0.1;
double de_par_F_u = 1.0;
double de_par_tau_F = 0.1;
double de_par_tau_CR = 0.1;
arma::vec de_initial_lb; // this will default to -0.5
arma::vec de_initial_ub; // this will default to 0.5
// GD
int gd_method = 0;
gd_settings_t gd_settings;
// L-BFGS
int lbfgs_par_M = 10;
// Nelder-Mead
bool nm_adaptive= true;
double nm_par_alpha = 1.0; // reflection parameter
double nm_par_beta = 0.5; // contraction parameter
double nm_par_gamma = 2.0; // expansion parameter
double nm_par_delta = 0.5; // shrinkage parameter
// PSO
bool pso_center_particle = true;
int pso_n_pop = 100;
int pso_n_gen = 1000;
int pso_inertia_method = 1; // 1 for linear decreasing between w_min and w_max; 2 for dampening
double pso_par_initial_w = 1.0;
double pso_par_w_damp = 0.99;
double pso_par_w_min = 0.10;
double pso_par_w_max = 0.99;
int pso_velocity_method = 1; // 1 for fixed; 2 for linear
double pso_par_c_cog = 2.0;
double pso_par_c_soc = 2.0;
double pso_par_initial_c_cog = 2.5;
double pso_par_final_c_cog = 0.5;
double pso_par_initial_c_soc = 0.5;
double pso_par_final_c_soc = 2.5;
arma::vec pso_initial_lb; // this will default to -0.5
arma::vec pso_initial_ub; // this will default to 0.5
};
</pre>
<hr>
</div>
</div>
</div>
<div id="myfooter"></div>
<!-- jQuery -->
<!--<script src="js/jquery.js"></script>-->
<!-- Bootstrap Core JavaScript -->
<script src="js/bootstrap.min.js"></script>
</body>
</html>
|
{
"content_hash": "61d17c42062b3ab3e4942d9664965a97",
"timestamp": "",
"source": "github",
"line_count": 232,
"max_line_length": 250,
"avg_line_length": 24.987068965517242,
"alnum_prop": 0.5970329480765914,
"repo_name": "kthohr/kthohr.github.io",
"id": "224ed1b66a8646f47033e6049045e947f2c5b692",
"size": "5797",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "optimlib_docs_settings.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "15380"
},
{
"name": "HTML",
"bytes": "569414"
},
{
"name": "JavaScript",
"bytes": "250638"
},
{
"name": "PHP",
"bytes": "932"
},
{
"name": "R",
"bytes": "102430"
}
],
"symlink_target": ""
}
|
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Friday Homework</title>
<meta name="viewport" content="width=1024">
<link rel="stylesheet" href="dist/coderdeck-core.min.css" type="text/css">
<link rel="stylesheet" id='style-theme-link' href="src/css/coderdeck.css" type="text/css" >
<script src='dist/jquery.min.js'></script>
<script src="dist/modernizr.js"></script>
<link href='http://fonts.googleapis.com/css?family=Oswald:400,700,300' rel='stylesheet' type='text/css'>
<!-- Replace path with correct path to Modernizr file. -->
<script src='dist/jquery.min.js'></script>
<script src="dist/modernizr.js"></script>
</head>
</head>
<body class="deck-container">
<script type='text/coderdeck' id='coderdeck-default'>
<html>
<head>
<script src='src/jquery.min.js'>SCRIPTEND
</head>
<body>
CODE
</body>
</html>
</script>
<script type='text/coderdeck' id='coderdeck-style-example'>
<html>
<title>test</title>
<style>
CODE
</style>
<body>
<h1>I'm a H1 heading</h1>
<h2>I'm a H2 heading</h2>
<h3>Pargraph of text <p> here</p>
<div class='stuff'>I'm a div <div> with class "stuff"</div>
<div id='my-div'>I'm a <div> with id "my-div"</div>
</body>
</html>
</script>
<article class="slide slide-list">
<h2>Homework Review</h2>
<script>
(function($){
$.fn.shuffle = function() {
var allElems = this.get(),
getRandom = function(max) {
return Math.floor(Math.random() * max);
},
shuffled = $.map(allElems, function(){
var random = getRandom(allElems.length),
randEl = $(allElems[random]).clone(true)[0];
allElems.splice(random, 1);
return randEl;
});
this.each(function(i){
$(this).replaceWith($(shuffled[i]));
});
return $(shuffled);
};
})(jQuery);
function shuffleHomework(amt) {
if(amt <= 0) return;
$("#homework li").shuffle();
setTimeout(function() { shuffleHomework(amt - 1); },200);
}
</script>
<a href='javascript:shuffleHomework(10);'>Shuffle</a>
<ul id='homework'>
<li><a href='http://zacarroll.github.io/homework/'>Zac Carroll</a></li>
<li><a href='http://kjcatz.github.io/homework/'>Karyll Catubig</a></li>
<li><a href='http://luciac.github.io/homework/'>Lucia Conchello</a></li>
<li><a href='http://samday234.github.io/homework/'>Sam Day</a></li>
<li><a href='http://pmgill.github.io/homework/'>Pauline Gill</a></li>
<li><a href='http://tezzica.github.io/homework/'>Tess Hughes</a></li>
<li><a href='http://rachelclaire.github.io/homework/'>Rachel Ladine</a></li>
<li><a href='http://eblee.github.io/homework/'>Esther Lee</a></li>
<li><a href='http://dfmonteiro2.github.io/homework/'>Douglas Monteiro</a></li>
<li><a href='http://montakan.github.io/homework/'>Montakan Namthong</a></li>
<li><a href='http://yoongramcan.github.io/homework/'>Pink Nye</a></li>
<li><a href='http://krystalperez.github.io/homework/'>Krystal Perez</a></li>
<li><a href='http://josephsoto.github.io/homework/'>Joe Soto</a></li>
<li><a href='http://ystrohm.github.io/homework/'>Yuri Strohm</a></li>
<li><a href='http://amyszatkowski.github.io/homework/'>Amy Szatkowski</a></li>
<li><a href='http://athoen.github.io/homework/'>Andrea Thoen</a></li>
</ul>
</article>
<script src='dist/coderdeck.min.js'></script>
<!-- Prettify -->
<script src="src/prettify.js"></script>
<script>
$(function() {
$.deck('.slide');
});
</script>
</body>
</html>
|
{
"content_hash": "b4dfccfb41248f31a7bdff7b64304ec5",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 105,
"avg_line_length": 29.59504132231405,
"alnum_prop": 0.615470538955599,
"repo_name": "marthar/webspring-2015",
"id": "df76de9b9a0671931ee36cae42384faae756205f",
"size": "3581",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "homework-friday.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "32702"
},
{
"name": "HTML",
"bytes": "231621"
},
{
"name": "JavaScript",
"bytes": "694170"
},
{
"name": "Makefile",
"bytes": "3774"
}
],
"symlink_target": ""
}
|
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_121) on Sat Feb 04 22:35:33 MST 2017 -->
<title>E-Index</title>
<meta name="date" content="2017-02-04">
<link rel="stylesheet" type="text/css" href="../stylesheet.css" title="Style">
<script type="text/javascript" src="../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="E-Index";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li>Use</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-3.html">Prev Letter</a></li>
<li><a href="index-5.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-4.html" target="_top">Frames</a></li>
<li><a href="index-4.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="contentContainer"><a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">E</a> <a href="index-5.html">G</a> <a href="index-6.html">O</a> <a href="index-7.html">P</a> <a name="I:E">
<!-- -->
</a>
<h2 class="title">E</h2>
<dl>
<dt><span class="memberNameLink"><a href="../ca/ljz/winter/utils/EncryptionUtils.html#encrypt-java.lang.String-java.lang.String-">encrypt(String, String)</a></span> - Static method in class ca.ljz.winter.utils.<a href="../ca/ljz/winter/utils/EncryptionUtils.html" title="class in ca.ljz.winter.utils">EncryptionUtils</a></dt>
<dd> </dd>
<dt><a href="../ca/ljz/winter/utils/EncryptionUtils.html" title="class in ca.ljz.winter.utils"><span class="typeNameLink">EncryptionUtils</span></a> - Class in <a href="../ca/ljz/winter/utils/package-summary.html">ca.ljz.winter.utils</a></dt>
<dd> </dd>
<dt><span class="memberNameLink"><a href="../ca/ljz/winter/utils/EncryptionUtils.html#EncryptionUtils--">EncryptionUtils()</a></span> - Constructor for class ca.ljz.winter.utils.<a href="../ca/ljz/winter/utils/EncryptionUtils.html" title="class in ca.ljz.winter.utils">EncryptionUtils</a></dt>
<dd> </dd>
</dl>
<a href="index-1.html">A</a> <a href="index-2.html">B</a> <a href="index-3.html">C</a> <a href="index-4.html">E</a> <a href="index-5.html">G</a> <a href="index-6.html">O</a> <a href="index-7.html">P</a> </div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../overview-summary.html">Overview</a></li>
<li>Package</li>
<li>Class</li>
<li>Use</li>
<li><a href="../overview-tree.html">Tree</a></li>
<li><a href="../deprecated-list.html">Deprecated</a></li>
<li class="navBarCell1Rev">Index</li>
<li><a href="../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li><a href="index-3.html">Prev Letter</a></li>
<li><a href="index-5.html">Next Letter</a></li>
</ul>
<ul class="navList">
<li><a href="../index.html?index-files/index-4.html" target="_top">Frames</a></li>
<li><a href="index-4.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
</body>
</html>
|
{
"content_hash": "db1bd792af0203b84770cc3b7a6f726e",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 325,
"avg_line_length": 38,
"alnum_prop": 0.6327661258409181,
"repo_name": "ljzca/Winter",
"id": "62acd2ed251fef77dfbb7b509896f2ac635e2d58",
"size": "5054",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "winter-core/doc/index-files/index-4.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "25684"
},
{
"name": "HTML",
"bytes": "637615"
},
{
"name": "Java",
"bytes": "26383"
},
{
"name": "JavaScript",
"bytes": "1654"
}
],
"symlink_target": ""
}
|
define([
'./GLConsts',
'./Utilities',
], function (
glc,
util) {
"use strict";
const info = {};
var UIType = {
ENUM: 0, // a specific enum
ARRAY: 1, // array of values (tightly packed)
BOOL: 2,
LONG: 3,
ULONG: 4,
COLORMASK: 5, // 4 bools
OBJECT: 6, // some WebGL object (texture/program/etc)
WH: 7, // width x height (array with 2 values)
RECT: 8, // x, y, w, h (array with 4 values)
STRING: 9, // some arbitrary string
COLOR: 10, // 4 floats
FLOAT: 11,
BITMASK: 12, // 32bit boolean mask
RANGE: 13, // 2 floats
MATRIX: 14 // 2x2, 3x3, or 4x4 matrix
};
var UIInfo = function (type, values) {
this.type = type;
this.values = values;
};
var FunctionType = {
GENERIC: 0,
DRAW: 1
};
var FunctionInfo = function (staticgl, name, returnType, args, type) {
this.name = name;
this.returnType = returnType;
this.args = args;
this.type = type;
};
FunctionInfo.prototype.getArgs = function (call) {
return this.args;
};
var FunctionParam = function (staticgl, name, ui) {
this.name = name;
this.ui = ui;
};
const textureTypes = new UIInfo(UIType.ENUM, [
"BYTE",
"FLOAT",
"FLOAT_32_UNSIGNED_INT_24_8_REV",
"HALF_FLOAT",
"INT",
"SHORT",
"UNSIGNED_BYTE",
"UNSIGNED_INT",
"UNSIGNED_INT_10F_11F_11F_REV",
"UNSIGNED_INT_24_8",
"UNSIGNED_INT_2_10_10_10_REV",
"UNSIGNED_INT_5_9_9_9_REV",
"UNSIGNED_SHORT",
"UNSIGNED_INT",
"UNSIGNED_SHORT_4_4_4_4",
"UNSIGNED_SHORT_5_5_5_1",
"UNSIGNED_SHORT_5_6_5",
]);
const unsizedColorTextureFormats = [
"RGB",
"RGBA",
"LUMINANCE_ALPHA",
"LUMINANCE",
"ALPHA",
];
const sizedColorTextureFormats = [
"RED",
"RED_INTEGER",
"RG",
"RG_INTEGER",
"RGB",
"RGB_INTEGER",
"RGBA",
"RGBA_INTEGER",
];
const sizedDepthTextureFormats = [
"DEPTH_COMPONENT",
"DEPTH_STENCIL",
];
const textureFormats = [
...unsizedColorTextureFormats,
...sizedColorTextureFormats,
...sizedDepthTextureFormats,
];
const drawModes = [
"POINTS",
"LINE_STRIP",
"LINE_LOOP",
"LINES",
"TRIANGLES",
"TRIANGLE_STRIP",
"TRIANGLE_FAN",
];
const elementTypes = [
"UNSIGNED_BYTE",
"UNSIGNED_SHORT",
"UNSIGNED_INT",
];
const texParamNames = [
"TEXTURE_BASE_LEVEL",
"TEXTURE_COMPARE_FUNC",
"TEXTURE_COMPARE_MODE",
"TEXTURE_MIN_FILTER",
"TEXTURE_MAG_FILTER",
"TEXTURE_MIN_LOD",
"TEXTURE_MAX_LOD",
"TEXTURE_MAX_LEVEL",
"TEXTURE_WRAP_S",
"TEXTURE_WRAP_T",
"TEXTURE_WRAP_R",
"TEXTURE_MAX_ANISOTROPY_EXT",
];
const samplerParamNames = [
"TEXTURE_WRAP_S",
"TEXTURE_WRAP_T",
"TEXTURE_WRAP_R",
"TEXTURE_MIN_FILTER",
"TEXTURE_MAG_FILTER",
"TEXTURE_MIN_LOD",
"TEXTURE_MAX_LOD",
"TEXTURE_COMPARE_MODE",
"TEXTURE_COMPARE_FUNC",
];
const bufferTargets = [
"ARRAY_BUFFER",
"COPY_READ_BUFFER",
"COPY_WRITE_BUFFER",
"ELEMENT_ARRAY_BUFFER",
"PIXEL_PACK_BUFFER",
"PIXEL_UNPACK_BUFFER",
"TRANSFORM_FEEDBACK_BUFFER",
"UNIFORM_BUFFER",
];
const framebufferTargets = [
"DRAW_FRAMEBUFFER",
"READ_FRAMEBUFFER",
"FRAMEBUFFER",
];
const texture2DTargets = [
"TEXTURE_2D",
"TEXTURE_CUBE_MAP",
];
const texture3DTargets = [
"TEXTURE_3D",
"TEXTURE_2D_ARRAY",
];
const bindTextureTargets = [
...texture2DTargets,
...texture3DTargets,
];
const faceTextureTargets = [
"TEXTURE_2D",
"TEXTURE_CUBE_MAP_POSITIVE_X",
"TEXTURE_CUBE_MAP_NEGATIVE_X",
"TEXTURE_CUBE_MAP_POSITIVE_Y",
"TEXTURE_CUBE_MAP_NEGATIVE_Y",
"TEXTURE_CUBE_MAP_POSITIVE_Z",
"TEXTURE_CUBE_MAP_NEGATIVE_Z",
];
const colorAttachments = [
"COLOR_ATTACHMENT0",
"COLOR_ATTACHMENT1",
"COLOR_ATTACHMENT2",
"COLOR_ATTACHMENT3",
"COLOR_ATTACHMENT4",
"COLOR_ATTACHMENT5",
"COLOR_ATTACHMENT6",
"COLOR_ATTACHMENT7",
"COLOR_ATTACHMENT8",
"COLOR_ATTACHMENT9",
"COLOR_ATTACHMENT10",
"COLOR_ATTACHMENT11",
"COLOR_ATTACHMENT12",
"COLOR_ATTACHMENT13",
"COLOR_ATTACHMENT14",
"COLOR_ATTACHMENT15",
];
const attachments = [
...colorAttachments,
"DEPTH_ATTACHMENT",
"STENCIL_ATTACHMENT",
"DEPTH_STENCIL_ATTACHMENT",
];
const colorRenderableFormats = [
"R8",
"R8UI",
"R8I",
"R16UI",
"R16I",
"R32UI",
"R32I",
"RG8",
"RG8UI",
"RG8I",
"RG16UI",
"RG16I",
"RG32UI",
"RG32I",
"RGB8",
"RGB565",
"RGBA8",
"SRGB8_ALPHA8",
"RGB5_A1",
"RGBA4",
"RGB10_A2",
"RGBA8UI",
"RGBA8I",
"RGB10_A2UI",
"RGBA16UI",
"RGBA16I",
"RGBA32I",
"RGBA32UI",
"RGBA16F",
"RGBA32F"
];
const depthRenderableFormats = [
"DEPTH_COMPONENT16",
"DEPTH_COMPONENT24",
"DEPTH_COMPONENT32F",
];
const stencilRenderableFormats = [
"DEPTH24_STENCIL8",
"DEPTH32F_STENCIL8",
"STENCIL_INDEX8",
];
const renderableFormats = [
...colorRenderableFormats,
...depthRenderableFormats,
...stencilRenderableFormats,
];
const unsizedColorTextureInternalFormats = [
"RGB",
"RGBA",
"LUMINANCE_ALPHA",
"LUMINANCE",
"ALPHA",
];
const sizedColorTextureInternalFormats = [
"R8",
"R8_SNORM",
"R16F",
"R32F",
"R8UI",
"R8I",
"R16UI",
"R16I",
"R32UI",
"R32I",
"RG8",
"RG8_SNORM",
"RG16F",
"RG32F",
"RG8UI",
"RG8I",
"RG16UI",
"RG16I",
"RG32UI",
"RG32I",
"RGB8",
"SRGB8",
"RGB565",
"RGB8_SNORM",
"R11F_G11F_B10F",
"RGB9_E5",
"RGB16F",
"RGB32F",
"RGB8UI",
"RGB8I",
"RGB16UI",
"RGB16I",
"RGB32UI",
"RGB32I",
"RGBA8",
"SRGB8_ALPHA8",
"RGBA8_SNORM",
"RGB5_A1",
"RGBA4",
"RGB10_A2",
"RGBA16F",
"RGBA32F",
"RGBA8UI",
"RGBA8I",
"RGB10_A2UI",
"RGBA16UI",
"RGBA16I",
"RGBA32I",
"RGBA32UI",
];
const depthTextureInternalFormats = [
"DEPTH_COMPONENT16",
"DEPTH_COMPONENT24",
"DEPTH_COMPONENT32F",
"DEPTH24_STENCIL8",
"DEPTH32F_STENCIL8",
];
const compressedTextureInternalFormats = [
"COMPRESSED_R11_EAC",
"COMPRESSED_SIGNED_R11_EAC",
"COMPRESSED_RG11_EAC",
"COMPRESSED_SIGNED_RG11_EAC",
"COMPRESSED_RGB8_ETC2",
"COMPRESSED_SRGB8_ETC2",
"COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2",
"COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2",
"COMPRESSED_RGBA8_ETC2_EAC",
"COMPRESSED_SRGB8_ALPHA8_ETC2_EAC",
];
const sizedTextureInternalFormats = [
...sizedColorTextureInternalFormats,
...depthTextureInternalFormats,
...compressedTextureInternalFormats,
];
const allUncompressedTextureInternalFormats = [
...unsizedColorTextureInternalFormats,
...sizedColorTextureInternalFormats,
...depthTextureInternalFormats,
];
const allTextureInternalFormats = [
...unsizedColorTextureInternalFormats,
...sizedColorTextureInternalFormats,
...depthTextureInternalFormats,
];
const queryTargets = [
"ANY_SAMPLES_PASSED",
"ANY_SAMPLES_PASSED_CONSERVATIVE",
"TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN",
];
const readBufferEnums = [
"BACK",
"NONE",
...colorAttachments,
];
const textureUnits = [
"TEXTURE0",
"TEXTURE1",
"TEXTURE2",
"TEXTURE3",
"TEXTURE4",
"TEXTURE5",
"TEXTURE6",
"TEXTURE7",
"TEXTURE8",
"TEXTURE9",
"TEXTURE10",
"TEXTURE11",
"TEXTURE12",
"TEXTURE13",
"TEXTURE14",
"TEXTURE15",
"TEXTURE16",
"TEXTURE17",
"TEXTURE18",
"TEXTURE19",
"TEXTURE20",
"TEXTURE21",
"TEXTURE22",
"TEXTURE23",
"TEXTURE24",
"TEXTURE25",
"TEXTURE26",
"TEXTURE27",
"TEXTURE28",
"TEXTURE29",
"TEXTURE30",
"TEXTURE31",
];
const blendEquations = [
"FUNC_ADD",
"FUNC_SUBTRACT",
"FUNC_REVERSE_SUBTRACT",
"MIN",
"MAX",
];
const capabilities = [
"BLEND",
"CULL_FACE",
"DEPTH_TEST",
"DITHER",
"POLYGON_OFFSET_FILL",
"PRIMITIVE_RESTART_FIXED_INDEX",
"RASTERIZER_DISCARD",
"SAMPLE_ALPHA_TO_COVERAGE",
"SAMPLE_COVERAGE",
"SCISSOR_TEST",
"STENCIL_TEST",
];
function setupFunctionInfos(gl) {
if (info.functions) {
return;
}
var functionInfos = [
new FunctionInfo(gl, "activeTexture", null, [
new FunctionParam(gl, "texture", new UIInfo(UIType.ENUM, textureUnits))
]),
new FunctionInfo(gl, "attachShader", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "shader", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "bindAttribLocation", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "index", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "name", new UIInfo(UIType.STRING))
]),
new FunctionInfo(gl, "bindBuffer", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, bufferTargets)),
new FunctionParam(gl, "buffer", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "bindFramebuffer", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, framebufferTargets)),
new FunctionParam(gl, "framebuffer", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "bindRenderbuffer", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, ["RENDERBUFFER"])),
new FunctionParam(gl, "renderbuffer", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "bindTexture", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, bindTextureTargets)),
new FunctionParam(gl, "texture", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "blendColor", null, new UIInfo(UIType.COLOR)),
new FunctionInfo(gl, "blendEquation", null, [
new FunctionParam(gl, "mode", new UIInfo(UIType.ENUM, blendEquations)),
]),
new FunctionInfo(gl, "blendEquationSeparate", null, [
new FunctionParam(gl, "modeRGB", new UIInfo(UIType.ENUM, blendEquations)),
new FunctionParam(gl, "modeAlpha", new UIInfo(UIType.ENUM, blendEquations)),
]),
new FunctionInfo(gl, "blendFunc", null, [
new FunctionParam(gl, "sfactor", new UIInfo(UIType.ENUM, ["ZERO", "ONE", "SRC_COLOR", "ONE_MINUS_SRC_COLOR", "DST_COLOR", "ONE_MINUS_DST_COLOR", "SRC_ALPHA", "ONE_MINUS_SRC_ALPHA", "DST_ALPHA", "ONE_MINUS_DST_ALPHA", "CONSTANT_COLOR", "ONE_MINUS_CONSTANT_COLOR", "CONSTANT_ALPHA", "ONE_MINUS_CONSTANT_ALPHA", "SRC_ALPHA_SATURATE"])),
new FunctionParam(gl, "dfactor", new UIInfo(UIType.ENUM, ["ZERO", "ONE", "SRC_COLOR", "ONE_MINUS_SRC_COLOR", "DST_COLOR", "ONE_MINUS_DST_COLOR", "SRC_ALPHA", "ONE_MINUS_SRC_ALPHA", "DST_ALPHA", "ONE_MINUS_DST_ALPHA. GL_CONSTANT_COLOR", "ONE_MINUS_CONSTANT_COLOR", "CONSTANT_ALPHA", "ONE_MINUS_CONSTANT_ALPHA"]))
]),
new FunctionInfo(gl, "blendFuncSeparate", null, [
new FunctionParam(gl, "srcRGB", new UIInfo(UIType.ENUM, ["ZERO", "ONE", "SRC_COLOR", "ONE_MINUS_SRC_COLOR", "DST_COLOR", "ONE_MINUS_DST_COLOR", "SRC_ALPHA", "ONE_MINUS_SRC_ALPHA", "DST_ALPHA", "ONE_MINUS_DST_ALPHA", "CONSTANT_COLOR", "ONE_MINUS_CONSTANT_COLOR", "CONSTANT_ALPHA", "ONE_MINUS_CONSTANT_ALPHA", "SRC_ALPHA_SATURATE"])),
new FunctionParam(gl, "dstRGB", new UIInfo(UIType.ENUM, ["ZERO", "ONE", "SRC_COLOR", "ONE_MINUS_SRC_COLOR", "DST_COLOR", "ONE_MINUS_DST_COLOR", "SRC_ALPHA", "ONE_MINUS_SRC_ALPHA", "DST_ALPHA", "ONE_MINUS_DST_ALPHA. GL_CONSTANT_COLOR", "ONE_MINUS_CONSTANT_COLOR", "CONSTANT_ALPHA", "ONE_MINUS_CONSTANT_ALPHA"])),
new FunctionParam(gl, "srcAlpha", new UIInfo(UIType.ENUM, ["ZERO", "ONE", "SRC_COLOR", "ONE_MINUS_SRC_COLOR", "DST_COLOR", "ONE_MINUS_DST_COLOR", "SRC_ALPHA", "ONE_MINUS_SRC_ALPHA", "DST_ALPHA", "ONE_MINUS_DST_ALPHA", "CONSTANT_COLOR", "ONE_MINUS_CONSTANT_COLOR", "CONSTANT_ALPHA", "ONE_MINUS_CONSTANT_ALPHA", "SRC_ALPHA_SATURATE"])),
new FunctionParam(gl, "dstAlpha", new UIInfo(UIType.ENUM, ["ZERO", "ONE", "SRC_COLOR", "ONE_MINUS_SRC_COLOR", "DST_COLOR", "ONE_MINUS_DST_COLOR", "SRC_ALPHA", "ONE_MINUS_SRC_ALPHA", "DST_ALPHA", "ONE_MINUS_DST_ALPHA. GL_CONSTANT_COLOR", "ONE_MINUS_CONSTANT_COLOR", "CONSTANT_ALPHA", "ONE_MINUS_CONSTANT_ALPHA"]))
]),
new FunctionInfo(gl, "bufferData", null, null), // handled specially below
new FunctionInfo(gl, "bufferSubData", null, null), // handled specially below
new FunctionInfo(gl, "checkFramebufferStatus", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, framebufferTargets))
]),
new FunctionInfo(gl, "clear", null, [
new FunctionParam(gl, "mask", new UIInfo(UIType.BITMASK, ["COLOR_BUFFER_BIT", "DEPTH_BUFFER_BIT", "STENCIL_BUFFER_BIT"]))
]),
new FunctionInfo(gl, "clearColor", null, new UIInfo(UIType.COLOR)),
new FunctionInfo(gl, "clearDepth", null, [
new FunctionParam(gl, "depth", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "clearStencil", null, [
new FunctionParam(gl, "s", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "colorMask", null, new UIInfo(UIType.COLORMASK)),
new FunctionInfo(gl, "compileShader", null, [
new FunctionParam(gl, "shader", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "copyBufferSubData", null, [
new FunctionParam(gl, "readTarget", new UIInfo(UIType.ENUM, bufferTargets)),
new FunctionParam(gl, "writeTarget", new UIInfo(UIType.ENUM, bufferTargets)),
new FunctionParam(gl, "readOffset", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "writeOffset",new UIInfo(UIType.LONG)),
new FunctionParam(gl, "size", new UIInfo(UIType.LONG)),
]),
new FunctionInfo(gl, "copyTexImage2D", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, faceTextureTargets)),
new FunctionParam(gl, "level", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "internalformat", new UIInfo(UIType.ENUM, ["ALPHA", "LUMINANCE", "LUMINANCE_ALPHA", "RGB", "RGBA", "R8", "RG8", "RGB565", "RGB8", "RGBA4", "RGB5_A1", "RGBA8", "RGB10_A2", "SRGB8", "SRGB8_ALPHA8", "R8I", "R8UI", "R16I", "R16UI", "R32I", "R32UI", "RG8I", "RG8UI", "RG16I", "RG16UI", "RG32I", "RG32UI", "RGBA8I", "RGBA8UI", "RGB10_A2UI", "RGBA16I", "RGBA16UI", "RGBA32I", "RGBA32UI"])),
new FunctionParam(gl, "x", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "y", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "width", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "height", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "border", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "copyTexSubImage2D", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, faceTextureTargets)),
new FunctionParam(gl, "level", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "xoffset", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "yoffset", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "x", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "y", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "width", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "height", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "createBuffer", null, [
]),
new FunctionInfo(gl, "createFramebuffer", null, [
]),
new FunctionInfo(gl, "createProgram", null, [
]),
new FunctionInfo(gl, "createRenderbuffer", null, [
]),
new FunctionInfo(gl, "createShader", null, [
new FunctionParam(gl, "type", new UIInfo(UIType.ENUM, ["VERTEX_SHADER", "FRAGMENT_SHADER"]))
]),
new FunctionInfo(gl, "createTexture", null, [
]),
new FunctionInfo(gl, "cullFace", null, [
new FunctionParam(gl, "mode", new UIInfo(UIType.ENUM, ["FRONT", "BACK", "FRONT_AND_BACK"]))
]),
new FunctionInfo(gl, "deleteBuffer", null, [
new FunctionParam(gl, "buffer", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "deleteFramebuffer", null, [
new FunctionParam(gl, "framebuffer", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "deleteProgram", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "deleteRenderbuffer", null, [
new FunctionParam(gl, "renderbuffer", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "deleteShader", null, [
new FunctionParam(gl, "shader", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "deleteTexture", null, [
new FunctionParam(gl, "texture", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "depthFunc", null, [
new FunctionParam(gl, "func", new UIInfo(UIType.ENUM, ["NEVER", "LESS", "LEQUAL", "GREATER", "GEQUAL", "EQUAL", "NOTEQUAL", "ALWAYS"]))
]),
new FunctionInfo(gl, "depthMask", null, [
new FunctionParam(gl, "flag", new UIInfo(UIType.BOOL))
]),
new FunctionInfo(gl, "depthRange", null, [
new FunctionParam(gl, "zNear", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "zFar", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "detachShader", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "shader", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "disable", null, [
new FunctionParam(gl, "cap", new UIInfo(UIType.ENUM, capabilities)),
]),
new FunctionInfo(gl, "disableVertexAttribArray", null, [
new FunctionParam(gl, "index", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "drawArrays", null, [
new FunctionParam(gl, "mode", new UIInfo(UIType.ENUM, drawModes)),
new FunctionParam(gl, "first", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "count", new UIInfo(UIType.LONG))
], FunctionType.DRAW),
new FunctionInfo(gl, "drawElements", null, [
new FunctionParam(gl, "mode", new UIInfo(UIType.ENUM, drawModes)),
new FunctionParam(gl, "count", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "type", new UIInfo(UIType.ENUM, elementTypes)),
new FunctionParam(gl, "offset", new UIInfo(UIType.LONG))
], FunctionType.DRAW),
new FunctionInfo(gl, "enable", null, [
new FunctionParam(gl, "cap", new UIInfo(UIType.ENUM, capabilities)),
]),
new FunctionInfo(gl, "enableVertexAttribArray", null, [
new FunctionParam(gl, "index", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "finish", null, [
]),
new FunctionInfo(gl, "flush", null, [
]),
new FunctionInfo(gl, "framebufferRenderbuffer", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, framebufferTargets)),
new FunctionParam(gl, "attachment", new UIInfo(UIType.ENUM, attachments)),
new FunctionParam(gl, "renderbuffertarget", new UIInfo(UIType.ENUM, ["RENDERBUFFER"])),
new FunctionParam(gl, "renderbuffer", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "framebufferTexture2D", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, framebufferTargets)),
new FunctionParam(gl, "attachment", new UIInfo(UIType.ENUM, attachments)),
new FunctionParam(gl, "textarget", new UIInfo(UIType.ENUM, faceTextureTargets)),
new FunctionParam(gl, "texture", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "level", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "frontFace", null, [
new FunctionParam(gl, "mode", new UIInfo(UIType.ENUM, ["CW", "CCW"]))
]),
new FunctionInfo(gl, "generateMipmap", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, bindTextureTargets))
]),
new FunctionInfo(gl, "getActiveAttrib", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "index", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "getActiveUniform", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "index", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "getAttachedShaders", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "getAttribLocation", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "name", new UIInfo(UIType.STRING))
]),
new FunctionInfo(gl, "getParameter", null, [
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, [
"ACTIVE_TEXTURE",
"ALIASED_LINE_WIDTH_RANGE",
"ALIASED_POINT_SIZE_RANGE",
"ALPHA_BITS",
"ARRAY_BUFFER_BINDING",
"BLEND",
"BLEND_COLOR",
"BLEND_DST_ALPHA",
"BLEND_DST_RGB",
"BLEND_EQUATION_ALPHA",
"BLEND_EQUATION_RGB",
"BLEND_SRC_ALPHA",
"BLEND_SRC_RGB",
"BLUE_BITS",
"COLOR_CLEAR_VALUE",
"COLOR_WRITEMASK",
"COMPRESSED_TEXTURE_FORMATS",
"COPY_READ_BUFFER_BINDING",
"COPY_WRITE_BUFFER_BINDING",
"CULL_FACE",
"CULL_FACE_MODE",
"CURRENT_PROGRAM",
"DEPTH_BITS",
"DEPTH_CLEAR_VALUE",
"DEPTH_FUNC",
"DEPTH_RANGE",
"DEPTH_TEST",
"DEPTH_WRITEMASK",
"DITHER",
"DRAW_BUFFER0",
"DRAW_BUFFER1",
"DRAW_BUFFER2",
"DRAW_BUFFER3",
"DRAW_BUFFER4",
"DRAW_BUFFER5",
"DRAW_BUFFER6",
"DRAW_BUFFER7",
"DRAW_BUFFER8",
"DRAW_BUFFER9",
"DRAW_BUFFER10",
"DRAW_BUFFER11",
"DRAW_BUFFER12",
"DRAW_BUFFER13",
"DRAW_BUFFER14",
"DRAW_BUFFER15",
"DRAW_FRAMEBUFFER_BINDING",
"ELEMENT_ARRAY_BUFFER_BINDING",
"FRAGMENT_SHADER_DERIVATIVE_HINT",
"FRONT_FACE",
"GENERATE_MIPMAP_HINT",
"GREEN_BITS",
"IMPLEMENTATION_COLOR_READ_FORMAT",
"IMPLEMENTATION_COLOR_READ_TYPE",
"LINE_WIDTH",
"MAJOR_VERSION",
"MAX_3D_TEXTURE_SIZE",
"MAX_ARRAY_TEXTURE_LAYERS",
"MAX_COLOR_ATTACHMENTS",
"MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS",
"MAX_COMBINED_TEXTURE_IMAGE_UNITS",
"MAX_COMBINED_UNIFORM_BLOCKS",
"MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS",
"MAX_CUBE_MAP_TEXTURE_SIZE",
"MAX_DRAW_BUFFERS",
"MAX_ELEMENT_INDEX",
"MAX_ELEMENTS_INDICES",
"MAX_ELEMENTS_VERTICES",
"MAX_FRAGMENT_INPUT_COMPONENTS",
"MAX_FRAGMENT_UNIFORM_BLOCKS",
"MAX_FRAGMENT_UNIFORM_COMPONENTS",
"MAX_FRAGMENT_UNIFORM_VECTORS",
"MAX_PROGRAM_TEXEL_OFFSET",
"MAX_RENDERBUFFER_SIZE",
"MAX_SAMPLES",
"MAX_SERVER_WAIT_TIMEOUT",
"MAX_TEXTURE_IMAGE_UNITS",
"MAX_TEXTURE_LOD_BIAS",
"MAX_TEXTURE_SIZE",
"MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS",
"MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS",
"MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS",
"MAX_UNIFORM_BLOCK_SIZE",
"MAX_UNIFORM_BUFFER_BINDINGS",
"MAX_VARYING_COMPONENTS",
"MAX_VARYING_VECTORS",
"MAX_VERTEX_ATTRIBS",
"MAX_VERTEX_TEXTURE_IMAGE_UNITS",
"MAX_VERTEX_OUTPUT_COMPONENTS",
"MAX_VERTEX_UNIFORM_BLOCKS",
"MAX_VERTEX_UNIFORM_COMPONENTS",
"MAX_VERTEX_UNIFORM_VECTORS",
"MAX_VIEWPORT_DIMS",
"MIN_PROGRAM_TEXEL_OFFSET",
"MINOR_VERSION",
"NUM_COMPRESSED_TEXTURE_FORMATS",
"NUM_EXTENSIONS",
"NUM_PROGRAM_BINARY_FORMATS",
"NUM_SHADER_BINARY_FORMATS",
"PACK_ALIGNMENT",
"PACK_ROW_LENGTH",
"PACK_SKIP_PIXELS",
"PACK_SKIP_ROWS",
"PIXEL_PACK_BUFFER_BINDING",
"PIXEL_UNPACK_BUFFER_BINDING",
"POLYGON_OFFSET_FACTOR",
"POLYGON_OFFSET_FILL",
"POLYGON_OFFSET_UNITS",
"PRIMITIVE_RESTART_FIXED_INDEX",
"PROGRAM_BINARY_FORMATS",
"RASTERIZER_DISCARD",
"READ_BUFFER",
"READ_FRAMEBUFFER_BINDING",
"RED_BITS",
"RENDERBUFFER_BINDING",
"SAMPLE_ALPHA_TO_COVERAGE",
"SAMPLE_BUFFERS",
"SAMPLE_COVERAGE",
"SAMPLE_COVERAGE_INVERT",
"SAMPLE_COVERAGE_VALUE",
"SAMPLER_BINDING",
"SAMPLES",
"SCISSOR_BOX",
"SCISSOR_TEST",
"SHADER_BINARY_FORMATS",
"SHADER_COMPILER",
"STENCIL_BACK_FAIL",
"STENCIL_BACK_FUNC",
"STENCIL_BACK_PASS_DEPTH_FAIL",
"STENCIL_BACK_PASS_DEPTH_PASS",
"STENCIL_BACK_REF",
"STENCIL_BACK_VALUE_MASK",
"STENCIL_BACK_WRITEMASK",
"STENCIL_BITS",
"STENCIL_CLEAR_VALUE",
"STENCIL_FAIL",
"STENCIL_FUNC",
"STENCIL_PASS_DEPTH_FAIL",
"STENCIL_PASS_DEPTH_PASS",
"STENCIL_REF",
"STENCIL_TEST",
"STENCIL_VALUE_MASK",
"STENCIL_WRITEMASK",
"SUBPIXEL_BITS",
"TEXTURE_BINDING_2D",
"TEXTURE_BINDING_2D_ARRAY",
"TEXTURE_BINDING_3D",
"TEXTURE_BINDING_CUBE_MAP",
"TRANSFORM_FEEDBACK_BINDING",
"TRANSFORM_FEEDBACK_ACTIVE",
"TRANSFORM_FEEDBACK_BUFFER_BINDING",
"TRANSFORM_FEEDBACK_PAUSED",
"UNIFORM_BUFFER_BINDING",
"UNIFORM_BUFFER_START",
"UNPACK_ROW_LENGTH",
"UNPACK_SKIP_ROWS",
"VERTEX_ARRAY_BINDING",
])),
]),
new FunctionInfo(gl, "getBufferParameter", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, bufferTargets)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["BUFFER_ACCESS_FLAGS", "BUFFER_MAPPED", "BUFFER_MAP_LENGTH", "BUFFER_MAP_OFFSET", "BUFFER_SIZE", "BUFFER_USAGE"])),
]),
new FunctionInfo(gl, "getBufferSubData", null, null), // handled specially below
new FunctionInfo(gl, "getError", null, [
]),
new FunctionInfo(gl, "getSupportedExtensions", null, [
]),
new FunctionInfo(gl, "getExtension", null, [
new FunctionParam(gl, "name", new UIInfo(UIType.STRING))
]),
new FunctionInfo(gl, "getFramebufferAttachmentParameter", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, framebufferTargets)),
new FunctionParam(gl, "attachment", new UIInfo(UIType.ENUM, attachments)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE", "FRAMEBUFFER_ATTACHMENT_OBJECT_NAME", "FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL", "FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE"]))
]),
new FunctionInfo(gl, "getProgramParameter", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["ACTIVE_ATTRIBUTES", "ACTIVE_ATTRIBUTE_MAX_LENGTH", "ACTIVE_UNIFORMS", "ACTIVE_UNIFORM_BLOCKS", "ACTIVE_UNIFORM_BLOCK_MAX_NAME_LENGTH", "ACTIVE_UNIFORM_MAX_LENGTH", "ATTACHED_SHADERS", "DELETE_STATUS", "INFO_LOG_LENGTH", "LINK_STATUS", "PROGRAM_BINARY_RETRIEVABLE_HINT", "TRANSFORM_FEEDBACK_BUFFER_MODE", "TRANSFORM_FEEDBACK_VARYINGS", "TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH", "VALIDATE_STATUS"]))
]),
new FunctionInfo(gl, "getProgramInfoLog", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "getRenderbufferParameter", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, ["RENDERBUFFER"])),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["RENDERBUFFER_WIDTH", "RENDERBUFFER_HEIGHT", "RENDERBUFFER_INTERNAL_FORMAT", "RENDERBUFFER_RED_SIZE", "RENDERBUFFER_GREEN_SIZE", "RENDERBUFFER_BLUE_SIZE", "RENDERBUFFER_ALPHA_SIZE", "RENDERBUFFER_DEPTH_SIZE", "RENDERBUFFER_STENCIL_SIZE"]))
]),
new FunctionInfo(gl, "getShaderParameter", null, [
new FunctionParam(gl, "shader", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["SHADER_TYPE", "DELETE_STATUS", "COMPILE_STATUS", "INFO_LOG_LENGTH", "SHADER_SOURCE_LENGTH"]))
]),
new FunctionInfo(gl, "getShaderInfoLog", null, [
new FunctionParam(gl, "shader", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "getShaderSource", null, [
new FunctionParam(gl, "shader", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "getTexParameter", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, bindTextureTargets)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, texParamNames))
]),
new FunctionInfo(gl, "getUniform", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)) // TODO: find a way to treat this as an integer? browsers don't like this...
]),
new FunctionInfo(gl, "getUniformLocation", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "name", new UIInfo(UIType.STRING))
]),
new FunctionInfo(gl, "getVertexAttrib", null, [
new FunctionParam(gl, "index", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["VERTEX_ATTRIB_ARRAY_BUFFER_BINDING", "VERTEX_ATTRIB_ARRAY_ENABLED", "VERTEX_ATTRIB_ARRAY_SIZE", "VERTEX_ATTRIB_ARRAY_STRIDE", "VERTEX_ATTRIB_ARRAY_TYPE", "VERTEX_ATTRIB_ARRAY_NORMALIZED", "CURRENT_VERTEX_ATTRIB"]))
]),
new FunctionInfo(gl, "getVertexAttribOffset", null, [
new FunctionParam(gl, "index", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["VERTEX_ATTRIB_ARRAY_POINTER"]))
]),
new FunctionInfo(gl, "hint", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, ["GENERATE_MIPMAP_HINT", "FRAGMENT_SHADER_DERIVATIVE_HINT_OES"])),
new FunctionParam(gl, "mode", new UIInfo(UIType.ENUM, ["FASTEST", "NICEST", "DONT_CARE"]))
]),
new FunctionInfo(gl, "isBuffer", null, [
new FunctionParam(gl, "buffer", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "isEnabled", null, [
new FunctionParam(gl, "cap", new UIInfo(UIType.ENUM, capabilities)),
]),
new FunctionInfo(gl, "isFramebuffer", null, [
new FunctionParam(gl, "framebuffer", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "isProgram", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "isRenderbuffer", null, [
new FunctionParam(gl, "renderbuffer", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "isShader", null, [
new FunctionParam(gl, "shader", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "isTexture", null, [
new FunctionParam(gl, "texture", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "lineWidth", null, [
new FunctionParam(gl, "width", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "linkProgram", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "pixelStorei", null, [
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["PACK_ALIGNMENT", "UNPACK_ALIGNMENT", "UNPACK_COLORSPACE_CONVERSION_WEBGL", "UNPACK_FLIP_Y_WEBGL", "UNPACK_PREMULTIPLY_ALPHA_WEBGL", "PACK_ROW_LENGTH", "PACK_SKIP_PIXELS", "PACK_SKIP_ROWS", "UNPACK_IMAGE_HEIGHT", "UNPACK_ROW_LENGTH", "UNPACK_SKIP_IMAGES", "UNPACK_SKIP_PIXELS", "UNPACK_SKIP_ROWS"])),
new FunctionParam(gl, "param", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "polygonOffset", null, [
new FunctionParam(gl, "factor", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "units", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "readPixels", null, null), // handled specially below[
new FunctionInfo(gl, "renderbufferStorage", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, ["RENDERBUFFER"])),
new FunctionParam(gl, "internalformat", new UIInfo(UIType.ENUM, renderableFormats)),
new FunctionParam(gl, "width", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "height", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "sampleCoverage", null, [
new FunctionParam(gl, "value", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "invert", new UIInfo(UIType.BOOL))
]),
new FunctionInfo(gl, "scissor", null, [
new FunctionParam(gl, "x", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "y", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "width", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "height", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "shaderSource", null, [
new FunctionParam(gl, "shader", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "source", new UIInfo(UIType.STRING))
]),
new FunctionInfo(gl, "stencilFunc", null, [
new FunctionParam(gl, "func", new UIInfo(UIType.ENUM, ["NEVER", "LESS", "LEQUAL", "GREATER", "GEQUAL", "EQUAL", "NOTEQUAL", "ALWAYS"])),
new FunctionParam(gl, "ref", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "mask", new UIInfo(UIType.BITMASK))
]),
new FunctionInfo(gl, "stencilFuncSeparate", null, [
new FunctionParam(gl, "face", new UIInfo(UIType.ENUM, ["FRONT", "BACK", "FRONT_AND_BACK"])),
new FunctionParam(gl, "func", new UIInfo(UIType.ENUM, ["NEVER", "LESS", "LEQUAL", "GREATER", "GEQUAL", "EQUAL", "NOTEQUAL", "ALWAYS"])),
new FunctionParam(gl, "ref", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "mask", new UIInfo(UIType.BITMASK))
]),
new FunctionInfo(gl, "stencilMask", null, [
new FunctionParam(gl, "mask", new UIInfo(UIType.BITMASK))
]),
new FunctionInfo(gl, "stencilMaskSeparate", null, [
new FunctionParam(gl, "face", new UIInfo(UIType.ENUM, ["FRONT", "BACK", "FRONT_AND_BACK"])),
new FunctionParam(gl, "mask", new UIInfo(UIType.BITMASK))
]),
new FunctionInfo(gl, "stencilOp", null, [
new FunctionParam(gl, "fail", new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"])),
new FunctionParam(gl, "zfail", new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"])),
new FunctionParam(gl, "zpass", new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"]))
]),
new FunctionInfo(gl, "stencilOpSeparate", null, [
new FunctionParam(gl, "face", new UIInfo(UIType.ENUM, ["FRONT", "BACK", "FRONT_AND_BACK"])),
new FunctionParam(gl, "fail", new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"])),
new FunctionParam(gl, "zfail", new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"])),
new FunctionParam(gl, "zpass", new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"]))
]),
new FunctionInfo(gl, "texImage2D", null, null), // handled specially below
new FunctionInfo(gl, "texParameterf", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, bindTextureTargets)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, texParamNames)),
new FunctionParam(gl, "param", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "texParameteri", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, bindTextureTargets)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, texParamNames)),
new FunctionParam(gl, "param", new UIInfo(UIType.ENUM, ["NEAREST", "LINEAR", "NEAREST_MIPMAP_NEAREST", "LINEAR_MIPMAP_NEAREST", "NEAREST_MIPMAP_LINEAR", "LINEAR_MIPMAP_LINEAR", "CLAMP_TO_EDGE", "MIRRORED_REPEAT", "REPEAT", "COMPARE_REF_TO_TEXTURE", "LEQUAL", "GEQUAL", "LESS", "GREATER", "EQUAL", "NOTEQUAL", "ALWAYS", "NEVER", "RED", "GREEN", "BLUE", "ALPHA", "ZERO", "ONE"])),
]),
new FunctionInfo(gl, "texSubImage2D", null, null), // handled specially below
new FunctionInfo(gl, "compressedTexImage2D", null, null), // handled specially below
new FunctionInfo(gl, "compressedTexSubImage2D", null, null), // handled specially below
new FunctionInfo(gl, "uniform1f", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "x", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "uniform1fv", null, null), // handled specially below
new FunctionInfo(gl, "uniform1i", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "x", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "uniform1iv", null, null), // handled specially below
new FunctionInfo(gl, "uniform2f", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "x", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "y", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "uniform2fv", null, null), // handled specially below
new FunctionInfo(gl, "uniform2i", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "x", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "y", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "uniform2iv", null, null), // handled specially below
new FunctionInfo(gl, "uniform3f", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "x", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "y", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "z", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "uniform3fv", null, null), // handled specially below
new FunctionInfo(gl, "uniform3i", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "x", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "y", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "z", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "uniform3iv", null, null), // handled specially below
new FunctionInfo(gl, "uniform4f", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "x", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "y", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "z", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "w", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "uniform4fv", null, null), // handled specially below
new FunctionInfo(gl, "uniform4i", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "x", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "y", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "z", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "w", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "uniform4iv", null, null), // handled specially below
new FunctionInfo(gl, "uniformMatrix2fv", null, null), // handled specially below
new FunctionInfo(gl, "uniformMatrix3fv", null, null), // handled specially below
new FunctionInfo(gl, "uniformMatrix4fv", null, null), // handled specially below
new FunctionInfo(gl, "useProgram", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "validateProgram", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT))
]),
new FunctionInfo(gl, "vertexAttrib1f", null, [
new FunctionParam(gl, "indx", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "x", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "vertexAttrib1fv", null, [
new FunctionParam(gl, "indx", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "values", new UIInfo(UIType.ARRAY))
]),
new FunctionInfo(gl, "vertexAttrib2f", null, [
new FunctionParam(gl, "indx", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "x", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "y", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "vertexAttrib2fv", null, [
new FunctionParam(gl, "indx", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "values", new UIInfo(UIType.ARRAY))
]),
new FunctionInfo(gl, "vertexAttrib3f", null, [
new FunctionParam(gl, "indx", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "x", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "y", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "z", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "vertexAttrib3fv", null, [
new FunctionParam(gl, "indx", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "values", new UIInfo(UIType.ARRAY))
]),
new FunctionInfo(gl, "vertexAttrib4f", null, [
new FunctionParam(gl, "indx", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "x", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "y", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "z", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "w", new UIInfo(UIType.FLOAT))
]),
new FunctionInfo(gl, "vertexAttrib4fv", null, [
new FunctionParam(gl, "indx", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "values", new UIInfo(UIType.ARRAY))
]),
new FunctionInfo(gl, "vertexAttribPointer", null, [
new FunctionParam(gl, "indx", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "size", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "type", new UIInfo(UIType.ENUM, ["BYTE", "UNSIGNED_BYTE", "SHORT", "UNSIGNED_SHORT", "FIXED", "FLOAT"])),
new FunctionParam(gl, "normalized", new UIInfo(UIType.BOOL)),
new FunctionParam(gl, "stride", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "offset", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "viewport", null, [
new FunctionParam(gl, "x", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "y", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "width", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "height", new UIInfo(UIType.LONG))
]),
new FunctionInfo(gl, "blitFramebuffer", null, [
new FunctionParam(gl, "srcX0", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "srcY0", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "srcX1", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "srcY1", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "dstX0", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "dstY0", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "dstX1", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "dstY1", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "mask", new UIInfo(UIType.BITMASK, ["COLOR_BUFFER_BIT", "DEPTH_BUFFER_BIT", "STENCIL_BUFFER_BIT"])),
new FunctionParam(gl, "filter", new UIInfo(UIType.ENUM, ["NEAREST", "LINEAR"])),
]),
new FunctionInfo(gl, "framebufferTextureLayer", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, framebufferTargets)),
new FunctionParam(gl, "attachment", new UIInfo(UIType.ENUM, attachments)),
new FunctionParam(gl, "texture", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "level", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "layer", new UIInfo(UIType.LONG)),
]),
new FunctionInfo(gl, "invalidateFramebuffer", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, framebufferTargets)),
new FunctionParam(gl, "attachments", new UIInfo(UIType.ARRAY)),
]),
new FunctionInfo(gl, "invalidateSubFramebuffer", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, framebufferTargets)),
new FunctionParam(gl, "attachments", new UIInfo(UIType.ARRAY)),
new FunctionParam(gl, "x", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "y", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "width", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "height", new UIInfo(UIType.LONG)),
]),
new FunctionInfo(gl, "readBuffer", null, [
new FunctionParam(gl, "src", new UIInfo(UIType.ENUM, readBufferEnums)),
]),
/* Renderbuffer objects */
new FunctionInfo(gl, "getInternalformatParameter", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, ["RENDERBUFFER"])),
new FunctionParam(gl, "internalformat", new UIInfo(UIType.ENUM, renderableFormats)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["NUM_SAMPLE_COUNTS", "SAMPLES"])),
]),
new FunctionInfo(gl, "renderbufferStorageMultisample", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, ["RENDERBUFFER"])),
new FunctionParam(gl, "samples", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "internalformat", new UIInfo(UIType.ENUM, renderableFormats)),
new FunctionParam(gl, "width", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "height", new UIInfo(UIType.LONG)),
]),
/* Texture objects */
new FunctionInfo(gl, "texStorage2D", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, texture2DTargets)),
new FunctionParam(gl, "levels", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "internalformat", new UIInfo(UIType.ENUM, sizedTextureInternalFormats)),
new FunctionParam(gl, "width", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "height", new UIInfo(UIType.LONG)),
]),
new FunctionInfo(gl, "texStorage3D", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, texture3DTargets)),
new FunctionParam(gl, "levels", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "internalformat", new UIInfo(UIType.ENUM, sizedTextureInternalFormats)),
new FunctionParam(gl, "width", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "height", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "depth", new UIInfo(UIType.LONG)),
]),
new FunctionInfo(gl, "copyTexSubImage3D", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, texture3DTargets)),
new FunctionParam(gl, "level", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "xoffset", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "yoffset", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "zoffset", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "x", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "y", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "width", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "height", new UIInfo(UIType.LONG)),
]),
/* Programs and shaders */
new FunctionInfo(gl, "GLint getFragDataLocation", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "name", new UIInfo(UIType.STRING)),
]),
new FunctionInfo(gl, "uniform1ui", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "v0", new UIInfo(UIType.ULONG)),
]),
new FunctionInfo(gl, "uniform2ui", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "v0", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "v1", new UIInfo(UIType.ULONG)),
]),
new FunctionInfo(gl, "uniform3ui", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "v0", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "v1", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "v2", new UIInfo(UIType.ULONG)),
]),
new FunctionInfo(gl, "uniform4ui", null, [
new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "v0", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "v1", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "v2", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "v3", new UIInfo(UIType.ULONG)),
]),
/* Vertex attribs */
new FunctionInfo(gl, "vertexAttribI4i", null, [
new FunctionParam(gl, "index", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "x", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "y", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "z", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "w", new UIInfo(UIType.LONG)),
]),
new FunctionInfo(gl, "vertexAttribI4iv", null, [
new FunctionParam(gl, "index", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "values", new UIInfo(UIType.ARRAY)),
]),
new FunctionInfo(gl, "vertexAttribI4ui", null, [
new FunctionParam(gl, "index", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "x", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "y", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "z", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "w", new UIInfo(UIType.ULONG)),
]),
new FunctionInfo(gl, "vertexAttribI4uiv", null, [
new FunctionParam(gl, "index", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "values", new UIInfo(UIType.ARRAY)),
]),
new FunctionInfo(gl, "vertexAttribIPointer", null, [
new FunctionParam(gl, "index", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "size", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "type", new UIInfo(UIType.ENUM, ["BYTE", "UNSIGNED_BYTE", "SHORT", "UNSIGNED_SHORT", "INT", "UNSIGNED_INT"])),
new FunctionParam(gl, "stride", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "offset", new UIInfo(UIType.LONG)),
]),
/* Writing to the drawing buffer */
new FunctionInfo(gl, "vertexAttribDivisor", null, [
new FunctionParam(gl, "index", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "divisor", new UIInfo(UIType.ULONG)),
]),
new FunctionInfo(gl, "drawArraysInstanced", null, [
new FunctionParam(gl, "mode", new UIInfo(UIType.ENUM, drawModes)),
new FunctionParam(gl, "first", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "count", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "instanceCount", new UIInfo(UIType.LONG)),
]),
new FunctionInfo(gl, "drawElementsInstanced", null, [
new FunctionParam(gl, "mode", new UIInfo(UIType.ENUM, drawModes)),
new FunctionParam(gl, "count", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "type", new UIInfo(UIType.ENUM, elementTypes)),
new FunctionParam(gl, "offset", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "instanceCount", new UIInfo(UIType.LONG)),
]),
new FunctionInfo(gl, "drawRangeElements", null, [
new FunctionParam(gl, "mode", new UIInfo(UIType.ENUM, drawModes)),
new FunctionParam(gl, "start", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "end", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "count", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "type", new UIInfo(UIType.ENUM, elementTypes)),
new FunctionParam(gl, "offset", new UIInfo(UIType.LONG)),
]),
/* Multiple Render Targets */
new FunctionInfo(gl, "drawBuffers", null, [
new FunctionParam(gl, "buffers", new UIInfo(UIType.ARRAY)),
]),
new FunctionInfo(gl, "clearBufferfi", null, [
new FunctionParam(gl, "buffer", new UIInfo(UIType.ENUM, ["DEPTH_STENCIL"])),
new FunctionParam(gl, "drawbuffer", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "depth", new UIInfo(UIType.FLOAT)),
new FunctionParam(gl, "stencil", new UIInfo(UIType.LONG)),
]),
new FunctionInfo(gl, "clearBufferfv", null, null), // handled specially below
new FunctionInfo(gl, "clearBufferiv", null, null), // handled specially below
new FunctionInfo(gl, "clearBufferuiv", null, null), // handled specially below
/* Query Objects */
new FunctionInfo(gl, "createQuery", null, [
]),
new FunctionInfo(gl, "deleteQuery", null, [
new FunctionParam(gl, "query", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "GLboolean isQuery", null, [
new FunctionParam(gl, "query", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "beginQuery", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, queryTargets)),
new FunctionParam(gl, "query", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "endQuery", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, queryTargets)),
]),
new FunctionInfo(gl, "getQuery", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, queryTargets)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["CURRENT_QUERY"])),
]),
new FunctionInfo(gl, "getQueryParameter", null, [
new FunctionParam(gl, "query", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["QUERY_RESULT", "QUERY_RESULT_AVAILABLE"])),
]),
/* Sampler Objects */
new FunctionInfo(gl, "createSampler", null, [
]),
new FunctionInfo(gl, "deleteSampler", null, [
new FunctionParam(gl, "sampler", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "GLboolean isSampler", null, [
new FunctionParam(gl, "sampler", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "bindSampler", null, [
new FunctionParam(gl, "unit", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "sampler", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "samplerParameteri", null, [
new FunctionParam(gl, "sampler", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, samplerParamNames)),
new FunctionParam(gl, "param", new UIInfo(UIType.LONG)),
]),
new FunctionInfo(gl, "samplerParameterf", null, [
new FunctionParam(gl, "sampler", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, samplerParamNames)),
new FunctionParam(gl, "param", new UIInfo(UIType.FLOAT)),
]),
new FunctionInfo(gl, "getSamplerParameter", null, [
new FunctionParam(gl, "sampler", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, samplerParamNames)),
]),
/* Sync objects */
new FunctionInfo(gl, "fenceSync", null, [
new FunctionParam(gl, "condition", new UIInfo(UIType.ENUM, ["SYNC_GPU_COMMANDS_COMPLETE"])),
new FunctionParam(gl, "flags", new UIInfo(UIType.BITMASK, [])),
]),
new FunctionInfo(gl, "GLboolean isSync", null, [
new FunctionParam(gl, "sync", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "deleteSync", null, [
new FunctionParam(gl, "sync", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "clientWaitSync", null, [
new FunctionParam(gl, "sync", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "flags", new UIInfo(UIType.BITMASK, ["SYNC_FLUSH_COMMANDS_BIT"])),
new FunctionParam(gl, "timeout", new UIInfo(UIType.ULONG)), // Uint64!
]),
new FunctionInfo(gl, "waitSync", null, [
new FunctionParam(gl, "sync", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "flags", new UIInfo(UIType.BITMASK, [])),
new FunctionParam(gl, "timeout", new UIInfo(UIType.ULONG)), // Uint64!
]),
new FunctionInfo(gl, "getSyncParameter", null, [
new FunctionParam(gl, "sync", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["OBJECT_TYPE", "SYNC_STATUS", "SYNC_CONDITION", "SYNC_FLAGS"])),
]),
/* Transform Feedback */
new FunctionInfo(gl, "createTransformFeedback", null, [
]),
new FunctionInfo(gl, "deleteTransformFeedback", null, [
new FunctionParam(gl, "tf", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "isTransformFeedback", null, [
new FunctionParam(gl, "tf", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "bindTransformFeedback", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, ["TRANSFORM_FEEDBACK"])),
new FunctionParam(gl, "tf", new UIInfo(UIType.Object)),
]),
new FunctionInfo(gl, "beginTransformFeedback", null, [
new FunctionParam(gl, "primitiveMode", new UIInfo(UIType.ENUM, ["POINTS", "LINES", "TRIANGLES"])),
]),
new FunctionInfo(gl, "endTransformFeedback", null, [
]),
new FunctionInfo(gl, "transformFeedbackVaryings", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "varyings", new UIInfo(UIType.ARRAY)),
new FunctionParam(gl, "bufferMode", new UIInfo(UIType.ENUM, ["INTERLEAVED_ATTRIBS", "SEPARATE_ATTRIBS"])),
]),
new FunctionInfo(gl, "getTransformFeedbackVarying", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "index", new UIInfo(UIType.ULONG)),
]),
new FunctionInfo(gl, "pauseTransformFeedback", null, [
]),
new FunctionInfo(gl, "resumeTransformFeedback", null, [
]),
/* Uniform Buffer Objects and Transform Feedback Buffers */
new FunctionInfo(gl, "bindBufferBase", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, ["TRANSFORM_FEEDBACK_BUFFER", "UNIFORM_BUFFER"])),
new FunctionParam(gl, "index", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "buffer", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "bindBufferRange", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, ["TRANSFORM_FEEDBACK_BUFFER", "UNIFORM_BUFFER"])),
new FunctionParam(gl, "index", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "buffer", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "offset", new UIInfo(UIType.LONG)),
new FunctionParam(gl, "size", new UIInfo(UIType.LONG)),
]),
new FunctionInfo(gl, "getIndexedParameter", null, [
new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, ["TRANSFORM_FEEDBACK_BUFFER_BINDING", "TRANSFORM_FEEDBACK_BUFFER_SIZE", "TRANSFORM_FEEDBACK_BUFFER_START", "UNIFORM_BUFFER_BINDING", "UNIFORM_BUFFER_SIZE", "UNIFORM_BUFFER_START"])),
new FunctionParam(gl, "index", new UIInfo(UIType.ULONG)),
]),
new FunctionInfo(gl, "getUniformIndices", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "uniformNames", new UIInfo(UIType.ARRAY)),
]),
new FunctionInfo(gl, "getActiveUniforms", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "uniformIndices", new UIInfo(UIType.ARRAY)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["UNIFORM_TYPE", "UNIFORM_SIZE", "UNIFORM_NAME_LENGTH", "UNIFORM_BLOCK_INDEX", "UNIFORM_ARRAY_STRIDE", "UNIFORM_MATRIX_STRIDE", "UNIFORM_IS_ROW_MAJOR"])),
]),
new FunctionInfo(gl, "getUniformBlockIndex", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "uniformBlockName", new UIInfo(UIType.STRING)),
]),
new FunctionInfo(gl, "getActiveUniformBlockParameter", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "uniformBlockIndex", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "pname", new UIInfo(UIType.ENUM, ["UNIFORM_BLOCK_BINDING", "UNIFORM_BLOCK_DATA_SIZE", "UNIFORM_BLOCK_NAME_LENGTH", "UNIFORM_BLOCK_ACTIVE_UNIFORMS", "UNIFORM_BLOCK_ACTIVE_UNIFORM_INDICES", "UNIFORM_BLOCK_REFERENCED_BY_VERTEX_SHADER", "UNIFORM_BLOCK_REFERENCED_BY_FRAGMENT_SHADER",])),
]),
new FunctionInfo(gl, "getActiveUniformBlockName", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "uniformBlockIndex", new UIInfo(UIType.ULONG)),
]),
new FunctionInfo(gl, "uniformBlockBinding", null, [
new FunctionParam(gl, "program", new UIInfo(UIType.OBJECT)),
new FunctionParam(gl, "uniformBlockIndex", new UIInfo(UIType.ULONG)),
new FunctionParam(gl, "uniformBlockBinding", new UIInfo(UIType.ULONG)),
]),
/* Vertex Array Objects */
new FunctionInfo(gl, "createVertexArray", null, [
]),
new FunctionInfo(gl, "deleteVertexArray", null, [
new FunctionParam(gl, "vertexArray", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "GLboolean isVertexArray", null, [
new FunctionParam(gl, "vertexArray", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "bindVertexArray", null, [
new FunctionParam(gl, "vertexArray", new UIInfo(UIType.OBJECT)),
]),
new FunctionInfo(gl, "texImage3D", null, null), // handled specially below
new FunctionInfo(gl, "texSubImage3D", null, null), // handled specially below
new FunctionInfo(gl, "compressedTexImage3D", null, null), // handled specially below
new FunctionInfo(gl, "compressedTexSubImage3D", null, null), // handled specially below
new FunctionInfo(gl, "uniform1uiv", null, null), // handled specially below
new FunctionInfo(gl, "uniform2uiv", null, null), // handled specially below
new FunctionInfo(gl, "uniform3uiv", null, null), // handled specially below
new FunctionInfo(gl, "uniform4uiv", null, null), // handled specially below
new FunctionInfo(gl, "uniformMatrix3x2fv", null, null), // handled specially below
new FunctionInfo(gl, "uniformMatrix4x2fv", null, null), // handled specially below
new FunctionInfo(gl, "uniformMatrix2x3fv", null, null), // handled specially below
new FunctionInfo(gl, "uniformMatrix4x3fv", null, null), // handled specially below
new FunctionInfo(gl, "uniformMatrix2x4fv", null, null), // handled specially below
new FunctionInfo(gl, "uniformMatrix3x4fv", null, null), // handled specially below
new FunctionInfo(gl, "readPixels", null, null), // handled specially below
new FunctionInfo(gl, "getBufferSubData", null, null), // handled specially below
new FunctionInfo(gl, "texImage2D", null, null), // handled specially below
new FunctionInfo(gl, "texSubImage2D", null, null), // handled specially below
];
// Build lookup
for (var n = 0; n < functionInfos.length; n++) {
functionInfos[functionInfos[n].name] = functionInfos[n];
}
functionInfos["clearBufferfv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "buffer", new UIInfo(UIType.ENUM, [ ])));
args.push(new FunctionParam(gl, "drawbuffer", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "values", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["clearBufferiv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "buffer", new UIInfo(UIType.ENUM, [ ])));
args.push(new FunctionParam(gl, "drawbuffer", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "values", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["clearBufferuiv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "buffer", new UIInfo(UIType.ENUM, [ ])));
args.push(new FunctionParam(gl, "drawbuffer", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "values", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["texImage3D"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, texture3DTargets)));
args.push(new FunctionParam(gl, "level", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "internalformat", new UIInfo(UIType.ENUM, allUncompressedTextureInternalFormats)));
args.push(new FunctionParam(gl, "width", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "height", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "depth", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "border", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "format", new UIInfo(UIType.ENUM, textureFormats)));
args.push(new FunctionParam(gl, "type", textureTypes));
if (typeof(call.args[9]) === "number") {
args.push(new FunctionParam(gl, "pboOffset", new UIInfo(UIType.LONG)));
} else if (util.isTypedArray(call.args[9])) {
args.push(new FunctionParam(gl, "srcData", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 11) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)));
}
} else {
args.push(new FunctionParam(gl, "source", new UIInfo(UIType.ARRAY)));
}
return args;
};
functionInfos["texSubImage3D"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, texture3DTargets)));
args.push(new FunctionParam(gl, "level", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "xoffset", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "yoffset", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "zoffset", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "width", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "height", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "depth", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "format", new UIInfo(UIType.ENUM, textureFormats)));
args.push(new FunctionParam(gl, "type", textureTypes));
if (typeof(call.args[10]) === "number") {
args.push(new FunctionParam(gl, "pboOffset", new UIInfo(UIType.LONG)));
} else if (util.isTypedArray(call.args[10])) {
args.push(new FunctionParam(gl, "srcData", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 12) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
} else {
args.push(new FunctionParam(gl, "source", new UIInfo(UIType.OBJECT)));
}
return args;
};
functionInfos["compressedTexImage2D"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, faceTextureTargets)));
args.push(new FunctionParam(gl, "level", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "internalformat", new UIInfo(UIType.ENUM, compressedTextureInternalFormats)));
args.push(new FunctionParam(gl, "width", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "height", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "border", new UIInfo(UIType.LONG)));
if (typeof(call.args[6]) === "number") {
args.push(new FunctionParam(gl, "offset", new UIInfo(UIType.LONG)));
} else {
args.push(new FunctionParam(gl, "srcData", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 7) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length >= 8) {
args.push(new FunctionParam(gl, "srcLengthOverride", new UIInfo(UIType.ULONG)))
}
}
return args;
};
functionInfos["compressedTexImage3D"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, texture3DTargets)));
args.push(new FunctionParam(gl, "level", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "internalformat", new UIInfo(UIType.ENUM, compressedTextureInternalFormats)));
args.push(new FunctionParam(gl, "width", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "height", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "depth", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "border", new UIInfo(UIType.LONG)));
if (typeof(call.args[7]) === "number") {
args.push(new FunctionParam(gl, "offset", new UIInfo(UIType.LONG)));
} else {
args.push(new FunctionParam(gl, "srcData", new UIInfo(UIType.OBJECT)));
if (call.args.length >= 8) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)));
}
if (call.args.length >= 9) {
args.push(new FunctionParam(gl, "srcLengthOverride", new UIInfo(UIType.ULONG)));
}
}
return args;
};
functionInfos["compressedTexSubImage2D"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, texture2DTargets)));
args.push(new FunctionParam(gl, "level", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "xoffset", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "yoffset", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "width", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "height", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "format", new UIInfo(UIType.ENUM, compressedTextureInternalFormats)));
if (typeof(call.args[7]) === "number") {
args.push(new FunctionParam(gl, "offset", new UIInfo(UIType.LONG)));
} else {
args.push(new FunctionParam(gl, "srcData", new UIInfo(UIType.OBJECT)));
if (call.args.length >= 8) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)));
}
if (call.args.length >= 9) {
args.push(new FunctionParam(gl, "srcLengthOverride", new UIInfo(UIType.ULONG)));
}
}
return args;
};
functionInfos["compressedTexSubImage3D"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, texture3DTargets)));
args.push(new FunctionParam(gl, "level", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "xoffset", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "yoffset", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "zoffset", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "width", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "height", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "depth", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "format", new UIInfo(UIType.ENUM, compressedTextureInternalFormats)));
if (typeof(call.args[9]) === "number") {
args.push(new FunctionParam(gl, "offset", new UIInfo(UIType.LONG)));
} else {
args.push(new FunctionParam(gl, "srcData", new UIInfo(UIType.OBJECT)));
if (call.args.length >= 10) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)));
}
if (call.args.length >= 11) {
args.push(new FunctionParam(gl, "srcLengthOverride", new UIInfo(UIType.ULONG)));
}
}
return args;
};
functionInfos["uniform1fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniform2fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniform3fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniform4fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniform1iv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniform2iv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniform3iv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniform4iv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniform1uiv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniform2uiv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniform3uiv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniform4uiv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 3) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniformMatrix2fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "transpose", new UIInfo(UIType.BOOL)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 5) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniformMatrix3x2fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "transpose", new UIInfo(UIType.BOOL)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 5) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniformMatrix4x2fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "transpose", new UIInfo(UIType.BOOL)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 5) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniformMatrix2x3fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "transpose", new UIInfo(UIType.BOOL)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 5) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniformMatrix3fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "transpose", new UIInfo(UIType.BOOL)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 5) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniformMatrix4x3fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "transpose", new UIInfo(UIType.BOOL)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 5) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniformMatrix2x4fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "transpose", new UIInfo(UIType.BOOL)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 5) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniformMatrix3x4fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "transpose", new UIInfo(UIType.BOOL)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 5) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
functionInfos["uniformMatrix4fv"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "location", new UIInfo(UIType.OBJECT)));
args.push(new FunctionParam(gl, "transpose", new UIInfo(UIType.BOOL)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)))
}
if (call.args.length === 5) {
args.push(new FunctionParam(gl, "srcLength", new UIInfo(UIType.ULONG)))
}
return args;
};
/* Reading back pixels */
// WebGL1:
functionInfos["readPixels"].getArgs = function(call) {
var args = [ ];
args.push(new FunctionParam(gl, "x", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "y", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "width", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "height", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "format", new UIInfo(UIType.ENUM, ["RED", "RED_INTEGER", "RG", "RG_INTEGER", "RGB", "RGB_INTEGER", "RGBA", "RGBA_INTEGER", "LUMINANCE_ALPHA", "LUMINANCE", "ALPHA"])));
args.push(new FunctionParam(gl, "type", new UIInfo(UIType.ENUM, ["UNSIGNED_BYTE", "BYTE", "HALF_FLOAT", "FLOAT", "UNSIGNED_SHORT_5_6_5", "UNSIGNED_SHORT_4_4_4_4", "UNSIGNED_SHORT_5_5_5_1", "UNSIGNED_INT_2_10_10_10_REV", "UNSIGNED_INT_10F_11F_11F_REV", "UNSIGNED_INT_5_9_9_9_REV"])));
if (typeof(call.args[6]) === "number") {
args.push(new FunctionParam(gl, "offset", new UIInfo(UIType.LONG)));
} else {
args.push(new FunctionParam(gl, "dstData", new UIInfo(UIType.OBJECT)));
}
if (call.args.length === 7) {
args.push(new FunctionParam(gl, "dstOffset", new UIInfo(UIType.ULONG)));
}
return args;
};
functionInfos["bufferData"].getArgs = function (call) {
var args = [];
args.push(new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, bufferTargets)));
if (typeof(call.args[1]) === "number") {
args.push(new FunctionParam(gl, "size", new UIInfo(UIType.LONG)));
} else {
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.OBJECT)));
}
args.push(new FunctionParam(gl, "usage", new UIInfo(UIType.ENUM, ["STREAM_DRAW", "STREAM_READ", "STREAM_COPY", "STATIC_DRAW", "STATIC_READ", "STATIC_COPY", "DYNAMIC_DRAW", "DYNAMIC_READ", "DYNAMIC_COPY"])));
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "length", new UIInfo(UIType.LONG)));
}
return args;
};
functionInfos["bufferSubData"].getArgs = function (call) {
var args = [];
args.push(new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, bufferTargets)));
args.push(new FunctionParam(gl, "offset", new UIInfo(UIType.ULONG)));
args.push(new FunctionParam(gl, "data", new UIInfo(UIType.OBJECT)));
if (call.args.length === 4) {
args.push(new FunctionParam(gl, "length", new UIInfo(UIType.LONG)));
}
return args;
};
functionInfos["getBufferSubData"].getArgs = function (call) {
var args = [];
args.push(new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, bufferTargets)));
args.push(new FunctionParam(gl, "srcByteOffset", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "dstBuffer", new UIInfo(UIType.OBJECT)));
if (call.args.length >= 4) {
args.push(new FunctionParam(gl, "dstOffset", new UIInfo(UIType.LONG)));
}
if (call.args.length === 5) {
args.push(new FunctionParam(gl, "length", new UIInfo(UIType.LONG)));
}
return args;
};
functionInfos["texImage2D"].getArgs = function (call) {
var args = [];
args.push(new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, faceTextureTargets)));
args.push(new FunctionParam(gl, "level", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "internalformat", new UIInfo(UIType.ENUM, allUncompressedTextureInternalFormats)));
if (call.args.length >= 9) {
args.push(new FunctionParam(gl, "width", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "height", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "border", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "format", new UIInfo(UIType.ENUM, textureFormats)));
args.push(new FunctionParam(gl, "type", textureTypes));
if (util.isTypedArray(call.args[9])) {
args.push(new FunctionParam(gl, "srcData", new UIInfo(UIType.ARRAY)));
} else {
args.push(new FunctionParam(gl, "source", new UIInfo(UIType.OBJECT)));
}
if (call.args.length >= 10) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)));
}
} else {
args.push(new FunctionParam(gl, "format", new UIInfo(UIType.ENUM, textureFormats)));
args.push(new FunctionParam(gl, "type", textureTypes));
args.push(new FunctionParam(gl, "value", new UIInfo(UIType.OBJECT)));
}
return args;
};
functionInfos["texSubImage2D"].getArgs = function (call) {
var args = [];
args.push(new FunctionParam(gl, "target", new UIInfo(UIType.ENUM, faceTextureTargets)));
args.push(new FunctionParam(gl, "level", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "xoffset", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "yoffset", new UIInfo(UIType.LONG)));
if (call.args.length == 9) {
args.push(new FunctionParam(gl, "width", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "height", new UIInfo(UIType.LONG)));
args.push(new FunctionParam(gl, "format", new UIInfo(UIType.ENUM, textureFormats)));
args.push(new FunctionParam(gl, "type", textureTypes));
if (typeof(call.args[8]) === "number") {
args.push(new FunctionParam(gl, "pboOffset", new UIInfo(UIType.LONG)));
} else if (util.isTypedArray(call.args[8])) {
args.push(new FunctionParam(gl, "srcData", new UIInfo(UIType.ARRAY)));
if (call.args.length >= 10) {
args.push(new FunctionParam(gl, "srcOffset", new UIInfo(UIType.ULONG)));
}
} else {
args.push(new FunctionParam(gl, "source", new UIInfo(UIType.OBJECT)));
}
} else {
args.push(new FunctionParam(gl, "format", new UIInfo(UIType.ENUM, textureFormats)));
args.push(new FunctionParam(gl, "type", textureTypes));
args.push(new FunctionParam(gl, "value", new UIInfo(UIType.OBJECT)));
}
return args;
};
info.functions = functionInfos;
};
var StateParameter = function (staticgl, name, readOnly, ui) {
this.value = staticgl[name];
this.name = name;
this.readOnly = readOnly;
this.ui = ui;
this.getter = function (gl) {
try {
return gl.getParameter(gl[this.name]);
} catch (e) {
console.log("unable to get state parameter " + this.name);
return null;
}
};
};
function setupStateParameters(gl) {
const isWebGL2 = util.isWebGL2(gl)
if (info.stateParameters) {
return;
}
var drawBuffers = [
"NONE",
"BACK",
"DRAW_BUFFER0",
"DRAW_BUFFER1",
"DRAW_BUFFER2",
"DRAW_BUFFER3",
"DRAW_BUFFER4",
"DRAW_BUFFER5",
"DRAW_BUFFER6",
"DRAW_BUFFER7",
"DRAW_BUFFER8",
"DRAW_BUFFER9",
"DRAW_BUFFER10",
"DRAW_BUFFER11",
"DRAW_BUFFER12",
"DRAW_BUFFER13",
"DRAW_BUFFER14",
"DRAW_BUFFER15",
];
var maxTextureUnits = gl.getParameter(gl.MAX_TEXTURE_IMAGE_UNITS);
var hintValues = ["FASTEST", "NICEST", "DONT_CARE"];
var stateParameters = [
new StateParameter(gl, "ACTIVE_TEXTURE", false, new UIInfo(UIType.ENUM, ["TEXTURE0", "TEXTURE1", "TEXTURE2", "TEXTURE3", "TEXTURE4", "TEXTURE5", "TEXTURE6", "TEXTURE7", "TEXTURE8", "TEXTURE9", "TEXTURE10", "TEXTURE11", "TEXTURE12", "TEXTURE13", "TEXTURE14", "TEXTURE15", "TEXTURE16", "TEXTURE17", "TEXTURE18", "TEXTURE19", "TEXTURE20", "TEXTURE21", "TEXTURE22", "TEXTURE23", "TEXTURE24", "TEXTURE25", "TEXTURE26", "TEXTURE27", "TEXTURE28", "TEXTURE29", "TEXTURE30", "TEXTURE31"])),
new StateParameter(gl, "ALIASED_LINE_WIDTH_RANGE", true, new UIInfo(UIType.RANGE)),
new StateParameter(gl, "ALIASED_POINT_SIZE_RANGE", true, new UIInfo(UIType.RANGE)),
new StateParameter(gl, "ALPHA_BITS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "ARRAY_BUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "BLEND", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "BLEND_COLOR", false, new UIInfo(UIType.COLOR)),
new StateParameter(gl, "BLEND_DST_ALPHA", false, new UIInfo(UIType.ENUM, ["ZERO", "ONE", "SRC_COLOR", "ONE_MINUS_SRC_COLOR", "DST_COLOR", "ONE_MINUS_DST_COLOR", "SRC_ALPHA", "ONE_MINUS_SRC_ALPHA", "DST_ALPHA", "ONE_MINUS_DST_ALPHA. GL_CONSTANT_COLOR", "ONE_MINUS_CONSTANT_COLOR", "CONSTANT_ALPHA", "ONE_MINUS_CONSTANT_ALPHA"])),
new StateParameter(gl, "BLEND_DST_RGB", false, new UIInfo(UIType.ENUM, ["ZERO", "ONE", "SRC_COLOR", "ONE_MINUS_SRC_COLOR", "DST_COLOR", "ONE_MINUS_DST_COLOR", "SRC_ALPHA", "ONE_MINUS_SRC_ALPHA", "DST_ALPHA", "ONE_MINUS_DST_ALPHA. GL_CONSTANT_COLOR", "ONE_MINUS_CONSTANT_COLOR", "CONSTANT_ALPHA", "ONE_MINUS_CONSTANT_ALPHA"])),
new StateParameter(gl, "BLEND_EQUATION_ALPHA", false, new UIInfo(UIType.ENUM, ["FUNC_ADD", "FUNC_SUBTRACT", "FUNC_REVERSE_SUBTRACT"])),
new StateParameter(gl, "BLEND_EQUATION_RGB", false, new UIInfo(UIType.ENUM, ["FUNC_ADD", "FUNC_SUBTRACT", "FUNC_REVERSE_SUBTRACT"])),
new StateParameter(gl, "BLEND_SRC_ALPHA", false, new UIInfo(UIType.ENUM, ["ZERO", "ONE", "SRC_COLOR", "ONE_MINUS_SRC_COLOR", "DST_COLOR", "ONE_MINUS_DST_COLOR", "SRC_ALPHA", "ONE_MINUS_SRC_ALPHA", "DST_ALPHA", "ONE_MINUS_DST_ALPHA", "CONSTANT_COLOR", "ONE_MINUS_CONSTANT_COLOR", "CONSTANT_ALPHA", "ONE_MINUS_CONSTANT_ALPHA", "SRC_ALPHA_SATURATE"])),
new StateParameter(gl, "BLEND_SRC_RGB", false, new UIInfo(UIType.ENUM, ["ZERO", "ONE", "SRC_COLOR", "ONE_MINUS_SRC_COLOR", "DST_COLOR", "ONE_MINUS_DST_COLOR", "SRC_ALPHA", "ONE_MINUS_SRC_ALPHA", "DST_ALPHA", "ONE_MINUS_DST_ALPHA", "CONSTANT_COLOR", "ONE_MINUS_CONSTANT_COLOR", "CONSTANT_ALPHA", "ONE_MINUS_CONSTANT_ALPHA", "SRC_ALPHA_SATURATE"])),
new StateParameter(gl, "BLUE_BITS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "COLOR_CLEAR_VALUE", false, new UIInfo(UIType.COLOR)),
new StateParameter(gl, "COLOR_WRITEMASK", false, new UIInfo(UIType.COLORMASK)),
new StateParameter(gl, "CULL_FACE", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "CULL_FACE_MODE", false, new UIInfo(UIType.ENUM, ["FRONT", "BACK", "FRONT_AND_BACK"])),
new StateParameter(gl, "CURRENT_PROGRAM", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "DEPTH_BITS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "DEPTH_CLEAR_VALUE", false, new UIInfo(UIType.FLOAT)),
new StateParameter(gl, "DEPTH_FUNC", false, new UIInfo(UIType.ENUM, ["NEVER", "LESS", "EQUAL", "LEQUAL", "GREATER", "NOTEQUAL", "GEQUAL", "ALWAYS"])),
new StateParameter(gl, "DEPTH_RANGE", false, new UIInfo(UIType.RANGE)),
new StateParameter(gl, "DEPTH_TEST", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "DEPTH_WRITEMASK", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "DITHER", true, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "ELEMENT_ARRAY_BUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "FRAGMENT_SHADER_DERIVATIVE_HINT_OES", false, new UIInfo(UIType.ENUM, hintValues)),
new StateParameter(gl, "FRAMEBUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "FRONT_FACE", false, new UIInfo(UIType.ENUM, ["CW", "CCW"])),
new StateParameter(gl, "GENERATE_MIPMAP_HINT", false, new UIInfo(UIType.ENUM, hintValues)),
new StateParameter(gl, "GREEN_BITS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "LINE_WIDTH", false, new UIInfo(UIType.FLOAT)),
new StateParameter(gl, "MAX_COMBINED_TEXTURE_IMAGE_UNITS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_CUBE_MAP_TEXTURE_SIZE", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_FRAGMENT_UNIFORM_VECTORS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_RENDERBUFFER_SIZE", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_TEXTURE_IMAGE_UNITS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_TEXTURE_MAX_ANISOTROPY_EXT", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_TEXTURE_SIZE", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_VARYING_VECTORS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_VERTEX_ATTRIBS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_VERTEX_TEXTURE_IMAGE_UNITS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_VERTEX_UNIFORM_VECTORS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_VIEWPORT_DIMS", true, new UIInfo(UIType.WH)),
new StateParameter(gl, "PACK_ALIGNMENT", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "POLYGON_OFFSET_FACTOR", false, new UIInfo(UIType.FLOAT)),
new StateParameter(gl, "POLYGON_OFFSET_FILL", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "POLYGON_OFFSET_UNITS", false, new UIInfo(UIType.FLOAT)),
new StateParameter(gl, "RED_BITS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "RENDERBUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "RENDERER", true, new UIInfo(UIType.STRING)),
new StateParameter(gl, "SAMPLE_BUFFERS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "SAMPLE_COVERAGE_INVERT", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "SAMPLE_COVERAGE_VALUE", false, new UIInfo(UIType.FLOAT)),
new StateParameter(gl, "SAMPLES", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "SCISSOR_BOX", false, new UIInfo(UIType.RECT)),
new StateParameter(gl, "SCISSOR_TEST", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "SHADING_LANGUAGE_VERSION", true, new UIInfo(UIType.STRING)),
new StateParameter(gl, "STENCIL_BACK_FAIL", false, new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"])),
new StateParameter(gl, "STENCIL_BACK_FUNC", false, new UIInfo(UIType.ENUM, ["NEVER", "LESS", "LEQUAL", "GREATER", "GEQUAL", "EQUAL", "NOTEQUAL", "ALWAYS"])),
new StateParameter(gl, "STENCIL_BACK_PASS_DEPTH_FAIL", false, new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"])),
new StateParameter(gl, "STENCIL_BACK_PASS_DEPTH_PASS", false, new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"])),
new StateParameter(gl, "STENCIL_BACK_REF", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "STENCIL_BACK_VALUE_MASK", false, new UIInfo(UIType.BITMASK)),
new StateParameter(gl, "STENCIL_BACK_WRITEMASK", false, new UIInfo(UIType.BITMASK)),
new StateParameter(gl, "STENCIL_BITS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "STENCIL_CLEAR_VALUE", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "STENCIL_FAIL", false, new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"])),
new StateParameter(gl, "STENCIL_FUNC", false, new UIInfo(UIType.ENUM, ["NEVER", "LESS", "LEQUAL", "GREATER", "GEQUAL", "EQUAL", "NOTEQUAL", "ALWAYS"])),
new StateParameter(gl, "STENCIL_PASS_DEPTH_FAIL", false, new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"])),
new StateParameter(gl, "STENCIL_PASS_DEPTH_PASS", false, new UIInfo(UIType.ENUM, ["KEEP", "ZERO", "REPLACE", "INCR", "INCR_WRAP", "DECR", "DECR_WRAP", "INVERT"])),
new StateParameter(gl, "STENCIL_REF", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "STENCIL_TEST", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "STENCIL_VALUE_MASK", false, new UIInfo(UIType.BITMASK)),
new StateParameter(gl, "STENCIL_WRITEMASK", false, new UIInfo(UIType.BITMASK)),
new StateParameter(gl, "SUBPIXEL_BITS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "UNPACK_ALIGNMENT", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "UNPACK_COLORSPACE_CONVERSION_WEBGL", false, new UIInfo(UIType.ENUM, ["NONE", "BROWSER_DEFAULT_WEBGL"])),
new StateParameter(gl, "UNPACK_FLIP_Y_WEBGL", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "UNPACK_PREMULTIPLY_ALPHA_WEBGL", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "VENDOR", true, new UIInfo(UIType.STRING)),
new StateParameter(gl, "VERSION", true, new UIInfo(UIType.STRING)),
new StateParameter(gl, "VIEWPORT", false, new UIInfo(UIType.RECT)),
];
if (isWebGL2) {
stateParameters.splice(stateParameters.length, 0, ...[
new StateParameter(gl, "COPY_READ_BUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "COPY_WRITE_BUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "DRAW_FRAMEBUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "FRAGMENT_SHADER_DERIVATIVE_HINT", false, new UIInfo(UIType.ENUM, hintValues)),
new StateParameter(gl, "MAX_3D_TEXTURE_SIZE", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_ARRAY_TEXTURE_LAYERS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_CLIENT_WAIT_TIMEOUT_WEBGL", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_COLOR_ATTACHMENTS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_COMBINED_FRAGMENT_UNIFORM_COMPONENTS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_COMBINED_UNIFORM_BLOCKS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_COMBINED_VERTEX_UNIFORM_COMPONENTS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_DRAW_BUFFERS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_ELEMENT_INDEX", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_ELEMENTS_INDICES", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_ELEMENTS_VERTICES", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_FRAGMENT_INPUT_COMPONENTS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_FRAGMENT_UNIFORM_BLOCKS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_FRAGMENT_UNIFORM_COMPONENTS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_PROGRAM_TEXEL_OFFSET", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_SAMPLES", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_SERVER_WAIT_TIMEOUT", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_TEXTURE_LOD_BIAS", true, new UIInfo(UIType.FLOAT)),
new StateParameter(gl, "MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_UNIFORM_BLOCK_SIZE", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_UNIFORM_BUFFER_BINDINGS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_VARYING_COMPONENTS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_VERTEX_OUTPUT_COMPONENTS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_VERTEX_UNIFORM_BLOCKS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MAX_VERTEX_UNIFORM_COMPONENTS", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "MIN_PROGRAM_TEXEL_OFFSET", true, new UIInfo(UIType.LONG)),
new StateParameter(gl, "PACK_ROW_LENGTH", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "PACK_SKIP_PIXELS", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "PACK_SKIP_ROWS", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "PIXEL_PACK_BUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "PIXEL_UNPACK_BUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "RASTERIZER_DISCARD", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "READ_BUFFER", false, new UIInfo(UIType.ENUM, readBufferEnums)),
new StateParameter(gl, "READ_FRAMEBUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "SAMPLE_ALPHA_TO_COVERAGE", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "SAMPLE_COVERAGE", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "SAMPLER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "TEXTURE_BINDING_2D_ARRAY", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "TEXTURE_BINDING_3D", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "TRANSFORM_FEEDBACK_ACTIVE", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "TRANSFORM_FEEDBACK_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "TRANSFORM_FEEDBACK_BUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "TRANSFORM_FEEDBACK_PAUSED", false, new UIInfo(UIType.BOOL)),
new StateParameter(gl, "UNIFORM_BUFFER_BINDING", false, new UIInfo(UIType.OBJECT)),
new StateParameter(gl, "UNIFORM_BUFFER_OFFSET_ALIGNMENT", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "UNPACK_IMAGE_HEIGHT", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "UNPACK_ROW_LENGTH", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "UNPACK_SKIP_IMAGES", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "UNPACK_SKIP_PIXELS", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "UNPACK_SKIP_ROWS", false, new UIInfo(UIType.LONG)),
new StateParameter(gl, "VERTEX_ARRAY_BINDING", false, new UIInfo(UIType.OBJECT)),
]);
}
function makeTextureStateParametersForBinding(binding) {
for (let n = 0; n < maxTextureUnits; ++n) {
var param = new StateParameter(gl, binding + "_" + n, false, new UIInfo(UIType.OBJECT));
param.getter = (function (n) {
return function (gl) {
var existingBinding = gl.getParameter(gl.ACTIVE_TEXTURE);
gl.activeTexture(gl.TEXTURE0 + n);
var result = gl.getParameter(gl[binding]);
gl.activeTexture(existingBinding);
return result;
};
})(n);
stateParameters.push(param);
}
}
makeTextureStateParametersForBinding("TEXTURE_BINDING_2D");
makeTextureStateParametersForBinding("TEXTURE_BINDING_CUBE_MAP");
if (isWebGL2) {
makeTextureStateParametersForBinding("TEXTURE_BINDING_2D_ARRAY");
makeTextureStateParametersForBinding("TEXTURE_BINDING_3D");
// fix: on WebGL1 need if WEBGL_draw_buffers is enabled?
var maxDrawBuffers = gl.getParameter(gl.MAX_DRAW_BUFFERS);
for (let n = 0; n < maxDrawBuffers; ++n) {
stateParameters.push(new StateParameter(gl, "DRAW_BUFFER" + n, false, new UIInfo(UIType.ENUM, drawBuffers)));
}
}
// Build lookup
for (let n = 0; n < stateParameters.length; n++) {
stateParameters[stateParameters[n].name] = stateParameters[n];
}
info.stateParameters = stateParameters;
};
function setupEnumMap(gl) {
var enumMap = {};
for (var n in gl) {
enumMap[gl[n]] = n;
}
info.enumMap = enumMap;
};
setupEnumMap(glc);
info.UIType = UIType;
info.FunctionType = FunctionType;
//info.functions - deferred
//info.stateParameters - deferred
info.enumToString = function (n) {
var string = info.enumMap[n];
if (string !== undefined) {
return string;
}
return "0x" + n.toString(16);
};
info.initialize = function (gl) {
setupFunctionInfos(gl);
setupStateParameters(gl);
};
return info;
});
|
{
"content_hash": "85709e579fefc48c3349b309993b6ab9",
"timestamp": "",
"source": "github",
"line_count": 2241,
"max_line_length": 493,
"avg_line_length": 54.31905399375279,
"alnum_prop": 0.5638754939250302,
"repo_name": "greggman/WebGL-Inspector",
"id": "5874b038ba59a7c383f7a28a041ec52b5b62ce30",
"size": "121729",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "core/shared/Info.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "47889"
},
{
"name": "HTML",
"bytes": "1807"
},
{
"name": "JavaScript",
"bytes": "893022"
},
{
"name": "Makefile",
"bytes": "407"
},
{
"name": "Shell",
"bytes": "905"
}
],
"symlink_target": ""
}
|
package stats
// Summary is a summary of a player's stats organized by queue type
type Summary struct {
PlayerStatSummaries []PlayerStatsSummary `json:"playerStatSummaries"`
SummonerID int `json:"summonerId"`
}
// PlayerStatsSummary is a summary of stats for a specific queue type
type PlayerStatsSummary struct {
AggregatedStats AggregatedStats `json:"aggregatedStats"`
Losses int `json:"losses"`
ModifyDate int `json:"modifyDate"`
PlayerStatSummaryType string `json:"playerStatSummaryType"`
Wins int `json:"wins"`
}
// RankedStats contains stats, for a summoner, across champions
type RankedStats struct {
ChampionStats []ChampionStats `json:"champions"`
ModifyDate int `json:"modifyDate"`
SummonerID int `json:"summonerId"`
}
// ChampionStats contains stats for a specific champion
type ChampionStats struct {
ID int `json:"id"`
Stats AggregatedStats `json:"stats"`
}
// AggregatedStats contains statistical information
type AggregatedStats struct {
AverageAssists int `json:"averageAssists"`
AverageChampionsKilled int `json:"averageChampionsKilled"`
AverageCombatPlayerScore int `json:"averageCombatPlayerScore"`
AverageNodeCapture int `json:"averageNodeCapture"`
AverageNodeCaptureAssist int `json:"averageNodeCaptureAssist"`
AverageNodeNeutralize int `json:"averageNodeNeutralize"`
AverageNodeNeutralizeAssist int `json:"averageNodeNeutralizeAssist"`
AverageNumDeaths int `json:"averageNumDeaths"`
AverageObjectivePlayerScore int `json:"averageObjectivePlayerScore"`
AverageTeamObjective int `json:"averageTeamObjective"`
AverageTotalPlayerScore int `json:"averageTotalPlayerScore"`
BotGamesPlayed int `json:"botGamesPlayed"`
KillingSpree int `json:"killingSpree"`
MaxAssists int `json:"maxAssists"`
MaxChampionsKilled int `json:"maxChampionsKilled"`
MaxCombatPlayerScore int `json:"maxCombatPlayerScore"`
MaxLargestCriticalStrike int `json:"maxLargestCriticalStrike"`
MaxLargestKillingSpree int `json:"maxLargestKillingSpree"`
MaxNodeCapture int `json:"maxNodeCapture"`
MaxNodeCaptureAssist int `json:"maxNodeCaptureAssist"`
MaxNodeNeutralize int `json:"maxNodeNeutralize"`
MaxNodeNeutralizeAssist int `json:"maxNodeNeutralizeAssist"`
MaxNumDeaths int `json:"maxNumDeaths"`
MaxObjectivePlayerScore int `json:"maxObjectivePlayerScore"`
MaxTeamObjective int `json:"maxTeamObjective"`
MaxTimePlayed int `json:"maxTimePlayed"`
MaxTimeSpentLiving int `json:"maxTimeSpentLiving"`
MaxTotalPlayerScore int `json:"maxTotalPlayerScore"`
MostChampionKillsPerSession int `json:"mostChampionKillsPerSession"`
MostSpellsCast int `json:"mostSpellsCast"`
NormalGamesPlayed int `json:"normalGamesPlayed"`
RankedPremadeGamesPlayed int `json:"rankedPremadeGamesPlayed"`
RankedSoloGamesPlayed int `json:"rankedSoloGamesPlayed"`
TotalAssists int `json:"totalAssists"`
TotalChampionKills int `json:"totalChampionKills"`
TotalDamageDealt int `json:"totalDamageDealt"`
TotalDamageTaken int `json:"totalDamageTaken"`
TotalDeathsPerSession int `json:"totalDeathsPerSession"`
TotalDoubleKills int `json:"totalDoubleKills"`
TotalFirstBlood int `json:"totalFirstBlood"`
TotalGoldEarned int `json:"totalGoldEarned"`
TotalHeal int `json:"totalHeal"`
TotalMagicDamageDealt int `json:"totalMagicDamageDealt"`
TotalMinionKills int `json:"totalMinionKills"`
TotalNeutralMinionsKilled int `json:"totalNeutralMinionsKilled"`
TotalNodeCapture int `json:"totalNodeCapture"`
TotalNodeNeutralize int `json:"totalNodeNeutralize"`
TotalPentaKills int `json:"totalPentaKills"`
TotalPhysicalDamageDealt int `json:"totalPhysicalDamageDealt"`
TotalQuadraKills int `json:"totalQuadraKills"`
TotalSessionsLost int `json:"totalSessionsLost"`
TotalSessionsPlayed int `json:"totalSessionsPlayed"`
TotalSessionsWon int `json:"totalSessionsWon"`
TotalTripleKills int `json:"totalTripleKills"`
TotalTurretsKilled int `json:"totalTurretsKilled"`
TotalUnrealKills int `json:"totalUnrealKills"`
}
|
{
"content_hash": "c747ae2fb344b800df3f0e90ff602c40",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 70,
"avg_line_length": 51.359550561797754,
"alnum_prop": 0.7005031721723911,
"repo_name": "Telrikk/lol-go-api",
"id": "b90310d764c239846920b53ef304c8b34a61c05a",
"size": "4571",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "stats/resources.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "188374"
},
{
"name": "Shell",
"bytes": "1371"
}
],
"symlink_target": ""
}
|
package com.ctrip.hermes.portal.console.topic;
import com.ctrip.hermes.portal.console.ConsolePage;
import org.unidal.web.mvc.ActionContext;
import org.unidal.web.mvc.ActionPayload;
import org.unidal.web.mvc.payload.annotation.FieldMeta;
public class Payload implements ActionPayload<ConsolePage, Action> {
private ConsolePage m_page;
@FieldMeta("op")
private Action m_action;
@FieldMeta("topic")
private String m_topic;
public void setAction(String action) {
m_action = Action.getByName(action, Action.VIEW);
}
@Override
public Action getAction() {
return m_action;
}
@Override
public ConsolePage getPage() {
return m_page;
}
@Override
public void setPage(String page) {
m_page = ConsolePage.getByName(page, ConsolePage.TOPIC);
}
@Override
public void validate(ActionContext<?> ctx) {
if (m_action == null) {
m_action = Action.VIEW;
}
}
public String getTopic() {
return m_topic;
}
public void setTopic(String topic) {
m_topic = topic;
}
}
|
{
"content_hash": "87107d2202d46cff083fd467ac4eafc3",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 68,
"avg_line_length": 19.86,
"alnum_prop": 0.7260825780463243,
"repo_name": "lejingw/hermes",
"id": "4541fb4d4dfc787c769c9432c0af2328a50a61d6",
"size": "993",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "hermes-portal/src/main/java/com/ctrip/hermes/portal/console/topic/Payload.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "10616"
},
{
"name": "Cucumber",
"bytes": "200"
},
{
"name": "HTML",
"bytes": "32777"
},
{
"name": "Java",
"bytes": "2016161"
},
{
"name": "JavaScript",
"bytes": "84544"
},
{
"name": "Shell",
"bytes": "21375"
}
],
"symlink_target": ""
}
|
===============
Monitor Pings
===============
A set of scripts to make it easy to see how often your internet fails.
Installation
============
Put the ping* files somewhere on your path::
mv ping* ~/.local/bin
Usage
=====
#. Run ``pingmonitor``.
#. Maybe add ``killall ping && pinglogcompress && pingmonitor`` to cron.
#. Run ``pinglogreader`` occasionally, to see when and for how long your internet connection has died.
All scripts take ``-h``, if you want detailed usage.
Implementation
==============
I run ``ping`` in the background, checking if Google's public DNS server (because it has an easy-to-remember ip) is up.
Woooooo.
So you need a \*nix OS.
I also use Python for the reader, so you need Python.
I've tried to make this compatible with old versions, but it's a stupid hack and only tested on 2.6.
TODO
====
* Consider using `ping.py`_ instead of ping.
This would significantly reduce installation difficulty and allow automatic compression without having to kill ``pingmonitor`` and restart it.
* Consider converting ``pingmonitor`` into something that can be used as an init script, to make it easily droppable into init.d.
License
=======
BSD. Use this as you wish. (c) 2012 Brandon W Maister.
.. _ping.py: https://github.com/jedie/python-ping/blob/master/README.creole
|
{
"content_hash": "e1a9650473d2fa697734fcd0eb6bcd86",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 144,
"avg_line_length": 29.11111111111111,
"alnum_prop": 0.6992366412213741,
"repo_name": "quodlibetor/pingmonitor",
"id": "7af662b92b7c8a80b8aa35c0c1fb9eb070952e38",
"size": "1310",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.rst",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "3642"
},
{
"name": "Shell",
"bytes": "2144"
}
],
"symlink_target": ""
}
|
{% assign page_dir = page.dir | append: '/' | replace: '//' , '/' %}
{% for item in site.data.menu %}
{% assign key = item[0] %}
{% assign val = item[1] %}
{% if page_dir contains key %}
{% assign navtree = site.data.menu[key].subitems %}
{% endif %}
{% endfor %}
<div class="column col-sm-2 col-xs-1 sidebar-offcanvas" id="sidebar">
<ul class="nav">
<li><a href="#" data-toggle="offcanvas" class="visible-xs text-center"><i class="glyphicon glyphicon-chevron-right"></i></a></li>
</ul>
<ul class="nav">
<li><a href="{{ site.baseurl }}/" data-toggle="offcanvas" class="hidden-xs text-center logo"><img src="{{ site.baseurl }}{{site.data.permanent_config.sidebar_img_path}}"/></a></li>
</ul>
<ul class="nav hidden-xs" id="lg-menu">
<a href="{{ site.baseurl }}/" data-toggle="offcanvas" class="hidden-xs text-center logo sidebar-header"><h2>{{site.title}}</h2></a>
<li class="dropdown text-center sidebar-header">
{% include versions.html nav=site.data.versions %}
</li>
{% if page.generate_nav_for_all_versions %}
{% for item in site.data.menu %}
{% assign key = item[0] %}
{% assign navtree = site.data.menu[key].subitems %}
<div class="sidebar-navigation-container nav" id="sidbar-nav-version-{{site.data.versions[key].version}}" style="display: none;">
{% include nav.html nav=navtree lvl=0 %}
</div>
{% endfor %}
{% else %}
<div class="sidebar-navigation-container nav">
{% include nav.html nav=navtree lvl=0 %}
</div>
{% endif %}
</ul>
<!-- tiny only nav-->
<ul class="nav visible-xs" id="xs-menu">
<li><a href="#featured" class="text-center"><i class="glyphicon glyphicon-list-alt"></i></a></li>
<!-- <li><a href="#stories" class="text-center"><i class="glyphicon glyphicon-list"></i></a></li>
<li><a href="#" class="text-center"><i class="glyphicon glyphicon-paperclip"></i></a></li>
<li><a href="#" class="text-center"><i class="glyphicon glyphicon-refresh"></i></a></li> -->
</ul>
<div id="search">
<form role="search" action="{{ site.baseurl }}/search/" method="get">
<input type="text" size="40" placeholder="Search" name="query">
<input type="hidden" name="version" value="{{ version }}">
<input type="hidden" name="baseUrl" value="{{ site.baseurl }}/{{ base_docs_path }}">
</form>
</div>
</div>
|
{
"content_hash": "f5a2ef36db9b2b609c9bff89feef18f1",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 182,
"avg_line_length": 43.074074074074076,
"alnum_prop": 0.6143594153052451,
"repo_name": "aglne/kaa",
"id": "7fcb6e87a1464892c43e21819af9b876d47d84e3",
"size": "2326",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "gh-pages-stub/_includes/sidebar.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "AMPL",
"bytes": "19815"
},
{
"name": "Arduino",
"bytes": "22520"
},
{
"name": "Batchfile",
"bytes": "21336"
},
{
"name": "C",
"bytes": "6219408"
},
{
"name": "C++",
"bytes": "1729698"
},
{
"name": "CMake",
"bytes": "74157"
},
{
"name": "CSS",
"bytes": "23111"
},
{
"name": "HTML",
"bytes": "6338"
},
{
"name": "Java",
"bytes": "10478878"
},
{
"name": "JavaScript",
"bytes": "4669"
},
{
"name": "Makefile",
"bytes": "15221"
},
{
"name": "Objective-C",
"bytes": "305678"
},
{
"name": "Python",
"bytes": "128276"
},
{
"name": "Shell",
"bytes": "185517"
},
{
"name": "Thrift",
"bytes": "21163"
},
{
"name": "XSLT",
"bytes": "4062"
}
],
"symlink_target": ""
}
|
/*
* Info Actions
*
*/
import axios from 'axios';
import {
INFO_SEARCH_SUCCESS,
INFO_SEARCH_START
} from './constants';
/**
* Dispatched when search starts
*
* @return {object} Action with a type of INFO_SEARCH_START with response
*/
const searchStart = () => ({
type: INFO_SEARCH_START
});
/**
* Dispatched when search is successful
*
* @param {object} response Search response
*
* @return {object} Action with a type of INFO_SEARCH_SUCCESS with response
*/
const searchSuccess = (response) => ({
type: INFO_SEARCH_SUCCESS,
response
});
/**
* Performs detailed character search
*
* @param {integer} id Search id
*
* @return {function} Redux Thunk
*/
export const searchPerson = (id) => (dispatch) => {
dispatch(searchStart());
axios.get(`https://swapi.co/api/people/${id}`)
.then((response) => {
axios.get(response.data.homeworld)
.then((homeworld) => {
const promises = [];
response.data.fellows = [];
response.data.homeworldName = homeworld.data.name;
homeworld.data.residents.forEach((resident) => {
promises.push(axios.get(resident));
});
axios.all(promises).then((residentResults) => {
residentResults.forEach((residentResult) => response.data.fellows.push(residentResult.data));
dispatch(searchSuccess(response.data));
});
})
.catch((error) => {
// handle errors
});
})
.catch((error) => {
// handle errors
});
};
|
{
"content_hash": "8848bec8ed346551a9d8f498b902d0c1",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 104,
"avg_line_length": 24.328125,
"alnum_prop": 0.590237636480411,
"repo_name": "MHanslo/swapi-ui",
"id": "e4871474f2cdadc18c1cd0826a3a9df37496108f",
"size": "1557",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/containers/info/actions.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1240"
},
{
"name": "JavaScript",
"bytes": "19867"
}
],
"symlink_target": ""
}
|
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null
|
{
"content_hash": "a3091b56750bdcc20440edbd2412b1ef",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "5a0c06eb7b578ed1d97e81f2c888d70dd6455441",
"size": "175",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Lamiaceae/Nepeta/Nepeta badamdarica/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
If you are subscribed to a lot of streams, pinning important streams to the top
of the **Streams** section on your left sidebar can be a good way to organize
streams.
## Pinning a stream using the left sidebar
If you want to quickly pin a single stream, you can do so through the stream sidebar.
{!stream-actions.md!}
2. Click on the {!down-chevron.md!} Select the **Pin stream (stream name) to top**
option from the actions dropdown.
3. Upon selecting the **Pin stream (stream name) to top** option, the selected
stream will be pinned the top of the **Streams** section, confirming the success
of your pinning.
### Pinning a stream using the [Subscriptions](/#subscriptions) page
If you want to pin multiple streams at once, you should pin streams through the [Subscriptions](/#subscriptions) page.
{!subscriptions.md!}
{!filter-streams.md!}
2. Select the stream you want to pin in the [Subscriptions](/#subscriptions) page; {!stream-settings.md!}
3. Select the **Pin stream to top of left sidebar** option in **Stream Settings**.
4. Once you close the [Subscriptions](/#subscriptions) page, the selected stream
will be pinned the top of the **Streams** section, confirming the success of
your pinning.
|
{
"content_hash": "b811b9385a6d31d32bad8c4b5c89f652",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 118,
"avg_line_length": 39.193548387096776,
"alnum_prop": 0.7497942386831276,
"repo_name": "AZtheAsian/zulip",
"id": "80af368198b62a0b82b09a4c34bf3aa6dc3feea2",
"size": "1235",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "templates/zerver/help/pin-a-stream.md",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "255229"
},
{
"name": "Emacs Lisp",
"bytes": "158"
},
{
"name": "GCC Machine Description",
"bytes": "142"
},
{
"name": "Groovy",
"bytes": "5509"
},
{
"name": "HTML",
"bytes": "499614"
},
{
"name": "JavaScript",
"bytes": "1441148"
},
{
"name": "Nginx",
"bytes": "1280"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "401825"
},
{
"name": "Puppet",
"bytes": "85156"
},
{
"name": "Python",
"bytes": "3170699"
},
{
"name": "Ruby",
"bytes": "249748"
},
{
"name": "Shell",
"bytes": "37885"
}
],
"symlink_target": ""
}
|
"""Test Met weather entity."""
from homeassistant.components.met import DOMAIN
from homeassistant.components.weather import DOMAIN as WEATHER_DOMAIN
from homeassistant.helpers import entity_registry as er
async def test_tracking_home(hass, mock_weather):
"""Test we track home."""
await hass.config_entries.flow.async_init("met", context={"source": "onboarding"})
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("weather")) == 1
assert len(mock_weather.mock_calls) == 4
# Test the hourly sensor is disabled by default
registry = er.async_get(hass)
state = hass.states.get("weather.test_home_hourly")
assert state is None
entry = registry.async_get("weather.test_home_hourly")
assert entry
assert entry.disabled
assert entry.disabled_by == "integration"
# Test we track config
await hass.config.async_update(latitude=10, longitude=20)
await hass.async_block_till_done()
assert len(mock_weather.mock_calls) == 8
entry = hass.config_entries.async_entries()[0]
await hass.config_entries.async_remove(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("weather")) == 0
async def test_not_tracking_home(hass, mock_weather):
"""Test when we not track home."""
# Pre-create registry entry for disabled by default hourly weather
registry = er.async_get(hass)
registry.async_get_or_create(
WEATHER_DOMAIN,
DOMAIN,
"10-20-hourly",
suggested_object_id="somewhere_hourly",
disabled_by=None,
)
await hass.config_entries.flow.async_init(
"met",
context={"source": "user"},
data={"name": "Somewhere", "latitude": 10, "longitude": 20, "elevation": 0},
)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("weather")) == 2
assert len(mock_weather.mock_calls) == 4
# Test we do not track config
await hass.config.async_update(latitude=10, longitude=20)
await hass.async_block_till_done()
assert len(mock_weather.mock_calls) == 4
entry = hass.config_entries.async_entries()[0]
await hass.config_entries.async_remove(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids("weather")) == 0
|
{
"content_hash": "72af4a369da11e46d608aac957e8e09b",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 86,
"avg_line_length": 33.78260869565217,
"alnum_prop": 0.6812526812526812,
"repo_name": "adrienbrault/home-assistant",
"id": "89c1dc6261234913a333323b469d8b1a259decb3",
"size": "2331",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "tests/components/met/test_weather.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "32021043"
},
{
"name": "Shell",
"bytes": "4900"
}
],
"symlink_target": ""
}
|
//
// BalanceView.m
// myVultr
//
// Created by zhangyuan on 4/24/15.
// Copyright (c) 2015 nextcloudmedia. All rights reserved.
//
#import "BalanceView.h"
@implementation BalanceView
/*
// Only override drawRect: if you perform custom drawing.
// An empty implementation adversely affects performance during animation.
- (void)drawRect:(CGRect)rect {
// Drawing code
}
*/
- (IBAction)displayGestureForTapRecognizer:(id)sender {
if (self.delegate && [self.delegate respondsToSelector:@selector(onTap:)]) {
[self.delegate onTap:self];
}
}
@end
|
{
"content_hash": "9ee70558962cb9a6af3c0730942a820d",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 80,
"avg_line_length": 21.185185185185187,
"alnum_prop": 0.6993006993006993,
"repo_name": "zhangyuan/myVultr",
"id": "8c72b0c81aa0150e7bc96d94c12173d7bad81195",
"size": "572",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "myVultr/Views/BalanceView.m",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "193"
},
{
"name": "Objective-C",
"bytes": "43569"
},
{
"name": "Ruby",
"bytes": "121"
}
],
"symlink_target": ""
}
|
package quiz.chess.pieces;
/**
* Created by bollsal on 2016. 12. 14..
*/
public class Rook extends Piece {
public static final String BLACK_NAME = "R";
public static final String WHITE_NAME = "r";
Rook(Color color) {
super(color);
}
@Override
public String getName() {
if (isWhite()) {
return WHITE_NAME;
} else {
return BLACK_NAME;
}
}
}
|
{
"content_hash": "d02b8d48bfb388dcd10e48e9845bcbc4",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 48,
"avg_line_length": 19.454545454545453,
"alnum_prop": 0.5490654205607477,
"repo_name": "bollsal/pikicast-java-study",
"id": "cecfcf53fe7f79621ffd922a8bf85cec9648442b",
"size": "428",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/quiz/chess/pieces/Rook.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "107523"
}
],
"symlink_target": ""
}
|
package org.omg.DynamicAny;
import gnu.CORBA.OrbRestricted;
import org.omg.CORBA.BAD_PARAM;
import org.omg.CORBA.TypeCode;
import org.omg.CORBA.ORB;
import org.omg.CORBA.Any;
import org.omg.CORBA.portable.InputStream;
import org.omg.CORBA.MARSHAL;
import org.omg.CORBA.portable.OutputStream;
/**
* The helper operations for {@link DynArray}. Following the 1.5 JDK
* specifications, DynArray is always a local object, so the two methods of this
* helper ({@link #read} and {@link #write} are not in use, always throwing
* {@link MARSHAL}.
*
* @specnote always throwing MARSHAL in read and write ensures compatibility
* with other popular implementations like Sun's.
*
* @author Audrius Meskauskas, Lithuania (AudriusA@Bioinformatics.org)
*/
public abstract class DynArrayHelper
{
/**
* Cast the passed object into the DynArray. As DynArray is a local object,
* the method just uses java type cast.
*
* @param obj the object to narrow.
* @return narrowed instance.
* @throws BAD_PARAM if the passed object is not a DynArray.
*/
public static DynArray narrow(org.omg.CORBA.Object obj)
{
try
{
return (DynArray) obj;
}
catch (ClassCastException cex)
{
throw new BAD_PARAM(obj.getClass().getName() + " is not a DynArray");
}
}
/**
* Narrow the given object to the DynArray. For the objects that are
* always local, this operation does not differ from the ordinary
* {@link #narrow} (ClassCastException will be thrown if narrowing something
* different).
*
* @param obj the object to cast.
*
* @return the casted DynArray.
*
* @since 1.5
*
* @see OMG issue 4158.
*/
public static DynArray unchecked_narrow(org.omg.CORBA.Object obj)
{
return narrow(obj);
}
/**
* Get the type code of the {@link DynArray}.
*/
public static TypeCode type()
{
return OrbRestricted.Singleton.create_interface_tc(id(), "DynArray");
}
/**
* Insert the DynArray into the given Any.
*
* @param any the Any to insert into.
*
* @param that the DynArray to insert.
*/
public static void insert(Any any, DynArray that)
{
any.insert_Object(that);
}
/**
* Extract the DynArray from given Any.
*
* @throws BAD_OPERATION if the passed Any does not contain DynArray.
*/
public static DynArray extract(Any any)
{
return narrow(any.extract_Object());
}
/**
* Get the DynArray repository id.
*
* @return "IDL:omg.org/DynamicAny/DynArray:1.0", always.
*/
public static String id()
{
return "IDL:omg.org/DynamicAny/DynArray:1.0";
}
/**
* This should read DynArray from the CDR input stream, but (following the JDK
* 1.5 API) it does not.
*
* @param input a org.omg.CORBA.portable stream to read from.
*
* @specenote Sun throws the same exception.
*
* @throws MARSHAL always.
*/
public static DynArray read(InputStream input)
{
throw new MARSHAL(DynAnyFactoryHelper.not_applicable(id()));
}
/**
* This should read DynArray from the CDR input stream, but (following the JDK
* 1.5 API) it does not.
*
* @param input a org.omg.CORBA.portable stream to read from.
*
* @specenote Sun throws the same exception.
*
* @throws MARSHAL always.
*/
public static void write(OutputStream output, DynArray value)
{
throw new MARSHAL(DynAnyFactoryHelper.not_applicable(id()));
}
}
|
{
"content_hash": "aa03073c8560d847bcb47ecc78730a55",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 80,
"avg_line_length": 25.313868613138688,
"alnum_prop": 0.6620530565167243,
"repo_name": "shaotuanchen/sunflower_exp",
"id": "26824d747ae39409bfe07fb7f22408c1a0c04860",
"size": "5202",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "tools/source/gcc-4.2.4/libjava/classpath/org/omg/DynamicAny/DynArrayHelper.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "459993"
},
{
"name": "Awk",
"bytes": "6562"
},
{
"name": "Batchfile",
"bytes": "9028"
},
{
"name": "C",
"bytes": "50326113"
},
{
"name": "C++",
"bytes": "2040239"
},
{
"name": "CSS",
"bytes": "2355"
},
{
"name": "Clarion",
"bytes": "2484"
},
{
"name": "Coq",
"bytes": "61440"
},
{
"name": "DIGITAL Command Language",
"bytes": "69150"
},
{
"name": "Emacs Lisp",
"bytes": "186910"
},
{
"name": "Fortran",
"bytes": "5364"
},
{
"name": "HTML",
"bytes": "2171356"
},
{
"name": "JavaScript",
"bytes": "27164"
},
{
"name": "Logos",
"bytes": "159114"
},
{
"name": "M",
"bytes": "109006"
},
{
"name": "M4",
"bytes": "100614"
},
{
"name": "Makefile",
"bytes": "5409865"
},
{
"name": "Mercury",
"bytes": "702"
},
{
"name": "Module Management System",
"bytes": "56956"
},
{
"name": "OCaml",
"bytes": "253115"
},
{
"name": "Objective-C",
"bytes": "57800"
},
{
"name": "Papyrus",
"bytes": "3298"
},
{
"name": "Perl",
"bytes": "70992"
},
{
"name": "Perl 6",
"bytes": "693"
},
{
"name": "PostScript",
"bytes": "3440120"
},
{
"name": "Python",
"bytes": "40729"
},
{
"name": "Redcode",
"bytes": "1140"
},
{
"name": "Roff",
"bytes": "3794721"
},
{
"name": "SAS",
"bytes": "56770"
},
{
"name": "SRecode Template",
"bytes": "540157"
},
{
"name": "Shell",
"bytes": "1560436"
},
{
"name": "Smalltalk",
"bytes": "10124"
},
{
"name": "Standard ML",
"bytes": "1212"
},
{
"name": "TeX",
"bytes": "385584"
},
{
"name": "WebAssembly",
"bytes": "52904"
},
{
"name": "Yacc",
"bytes": "510934"
}
],
"symlink_target": ""
}
|
class WordScramble::LetterFrequency
attr_reader :frequency_hash, :length
def initialize(str)
@length = str.length
@frequency_hash = {}
str.downcase.each_char do |c|
@frequency_hash[c] ||= 0
@frequency_hash[c] += 1
end
end
def ==(other)
other.is_a?(WordScramble::LetterFrequency) and other.frequency_hash == @frequency_hash
end
end
|
{
"content_hash": "2b583f7894f4bbd295a6322d8324959a",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 90,
"avg_line_length": 21,
"alnum_prop": 0.6507936507936508,
"repo_name": "declan/word_scramble",
"id": "8badffc7768c6a7745cf0c26e6b59536ce89abec",
"size": "378",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/word_scramble/letter_frequency.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "6300"
}
],
"symlink_target": ""
}
|
package de.danoeh.antennapod.test;
import java.io.File;
import java.util.Date;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import de.danoeh.antennapod.asynctask.DownloadStatus;
import de.danoeh.antennapod.feed.Feed;
import de.danoeh.antennapod.service.download.Downloader;
import de.danoeh.antennapod.service.download.DownloaderCallback;
import de.danoeh.antennapod.service.download.HttpDownloader;
import android.test.AndroidTestCase;
import android.util.Log;
public class HttpDownloaderTest extends AndroidTestCase {
private static final String TAG = "HttpDownloaderTest";
private static final String DOWNLOAD_DIR = "testdownloads";
private static boolean successful = true;
private static ExecutorService es;
private static DownloaderCallback downloaderCallback = new DownloaderCallback() {
@Override
public void onDownloadCompleted(Downloader downloader) {
DownloadStatus status = downloader.getStatus();
if (status != null) {
final String downloadUrl = status.getFeedFile().getDownload_url();
final String fileUrl = status.getFeedFile().getFile_url();
new File(fileUrl).delete();
if (status.isSuccessful()) {
Log.i(TAG, "Download successful: " + downloadUrl);
} else {
Log.e(TAG, "Download not successful: " + status.toString());
successful = false;
}
} else {
Log.wtf(TAG, "Status was null");
successful = false;
}
if (successful == false) {
es.shutdownNow();
}
}
};
public void testDownload() throws InterruptedException {
es = Executors.newFixedThreadPool(5);
int i = 0;
for (String url : TestDownloads.urls) {
Feed feed = new Feed(url, new Date());
String fileUrl = new File(getContext().getExternalFilesDir(DOWNLOAD_DIR).getAbsolutePath(), Integer.toString(i)).getAbsolutePath();
File file = new File(fileUrl);
Log.d(TAG, "Deleting file: " + file.delete());
feed.setFile_url(fileUrl);
DownloadStatus status = new DownloadStatus(feed, Integer.toString(i));
Downloader downloader = new HttpDownloader(downloaderCallback, status);
es.submit(downloader);
i++;
}
Log.i(TAG, "Awaiting termination");
es.shutdown();
es.awaitTermination(Long.MAX_VALUE, TimeUnit.DAYS);
assertTrue(successful);
}
}
|
{
"content_hash": "c54330d27507b3f10aee37f1a225ba50",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 134,
"avg_line_length": 33.114285714285714,
"alnum_prop": 0.7329594477998275,
"repo_name": "repat/AntennaPod",
"id": "24d48bce4f0fe4d876a6527190bb1c61aca8a646",
"size": "2318",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/src/de/danoeh/antennapod/test/HttpDownloaderTest.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "1017297"
}
],
"symlink_target": ""
}
|
% 2 ESO Música
% Alfons Rovira
% Octubre 2016
\newpage
# Prefaci
Aquest document ha estat produit per tal de facilitar l'estudi de l'assignatura de música de 2n d'ESO. Consta de recopilació d'una d'informació per tal de memoritzar millor el temari.
<a rel="license" href="http://creativecommons.org/licenses/by/4.0/"><img alt="Llicència de Creative Commons" style="border-width:0" src="https://i.creativecommons.org/l/by/4.0/88x31.png" /></a>

Aquesta obra està subjecta a una llicència de <a rel="license" href="http://creativecommons.org/licenses/by/4.0/">Reconeixement 4.0 Internacional de Creative Commons</a>
\newpage
# Unitat Didàctica 1
## 0. Introducció
**Notació**: Codi que permet representar la música en un paper. Segle X.
**Fonografia**: Grabació i reproducció mecànica d'un so. Segle XX.
## 1. Escriptura del so
Les cultures musicals de la majoria de països del món són **àgrafes**, és a dir, no escriuen els son musicals sobre el paper.
La **notación musical** és el resultat del procés que es va prolongar durant un cinc segles, del X al XV. La notació fa possible interpretar i analitzar obres compostes fa segles.
### Característiques del so
El **so** és una vibració que es transmet a través de l'aire en forma d'ona.
El so musical es caracteritza per quatre **paràmetres** i una sèrie de **qualitats** pròpies de cada un.
| Paràmetres | Qualitats |
| --- | --- |
| **A**ltura | Agut o greu |
| **D**urada | Llarg o curt |
| **I**ntensitat | Fort o suau |
| **T**imbre | So caracerístic de l'instrument |
## 2. La interpretació expressiva
En la interpretació sempre hi ha alguna cosa nova o imprevista.
> Les **progressions melòdiques** són tècniques de construcció motiviques que consisteixen en prendre un model de més d’un element (melòdic o melòdic-harmònic) i repetir-lo pujant i baixant certs intervals.
## 3. Al·leluia, de Georg Friederich Häendel
## 4. La música i la tecnologia
La relació entre música i tecnologia no és un fet recent.
Després de l'arribada de la tecnologia digital, els recursos que s'han posat al servei de la música són molt nombrosos:
- **Generadors de sons**: sintetitzadors.
- **Processadors d'audio**: software.
- **Grabadors**: dispositius per capturar el so.
- **Reproductors de so**: dispositius per reproduir el so.
## 5. Analitzar els llenguatges per a representar sons
Les **partitutes** són representacions gràfiques de la música.
Proporcionen informació sobre:
- Els **sons** que componen una determinada peça,
- Les **qualitats** d'aquests sons,
- Deixen **pautes** per interpretar-la.
|
{
"content_hash": "72d10ff82f448cad14f458a3d7072c9f",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 206,
"avg_line_length": 34.53333333333333,
"alnum_prop": 0.7359073359073359,
"repo_name": "inclusa/2eso",
"id": "3139243ea4301c3b75e9e56ccb6561d1a14ed7f2",
"size": "2648",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "05_mu/md/01_ud01.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4817"
},
{
"name": "Shell",
"bytes": "6776"
},
{
"name": "TeX",
"bytes": "139940"
}
],
"symlink_target": ""
}
|
//
// SettingsViewController.h
// Wicked Calc
//
// Created by Evan Hsu on 2/28/13.
// Copyright (c) 2013 Evan Hsu. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "ViewController.h"
@interface SettingsViewController : UIViewController<UIPickerViewDelegate, UIPickerViewDataSource>
/*Shared Variables*/
@property (nonatomic) BOOL *vertOrHoriz;
@property (nonatomic) int orientation_id;
@property (nonatomic) int themeNum;
/*Objects*/
@property (strong, nonatomic) IBOutlet UIPickerView *BGscroll;
@property (strong, nonatomic) IBOutlet UIImageView *PreviewWindow;
@property (strong, nonatomic) IBOutlet UIImageView *mainImage;
@property (strong, nonatomic) IBOutlet UIImageView *rightImage;
@property (strong, nonatomic) IBOutlet UIImageView *rightImagetwo;
@property (strong, nonatomic) IBOutlet UIImageView *leftImage;
@property (strong, nonatomic) IBOutlet UIImageView *leftImageTwo;
@property (strong, nonatomic) IBOutlet UIImageView *frameone;
@property (strong, nonatomic) IBOutlet UIImageView *frametwo;
@property (strong, nonatomic) IBOutlet UIImageView *framethree;
@property (strong, nonatomic) IBOutlet UIImageView *framefour;
@end
|
{
"content_hash": "2a89560fc460db4cc4357403012f9115",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 98,
"avg_line_length": 34.1764705882353,
"alnum_prop": 0.7839931153184165,
"repo_name": "gobluewolverine5/Wicked-Calc",
"id": "80a960e692b8bf8af17b287dcb84ba870fc92ef5",
"size": "1162",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fractionCalc/SettingsViewController.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "130522"
},
{
"name": "Shell",
"bytes": "21099"
}
],
"symlink_target": ""
}
|
class RemoveAdminFromTicket < ActiveRecord::Migration
def change
remove_reference :tickets, :admin, index: true, foreign_key: true
end
end
|
{
"content_hash": "e4fad07a8ea1b8b4ac246be8de7bc008",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 68,
"avg_line_length": 29,
"alnum_prop": 0.7724137931034483,
"repo_name": "psyglyphik/auxilia",
"id": "ef92fbb70a92a736303a25366e26af291fc23631",
"size": "145",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "db/migrate/20150906101638_remove_admin_from_ticket.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13239"
},
{
"name": "CoffeeScript",
"bytes": "813"
},
{
"name": "HTML",
"bytes": "65197"
},
{
"name": "JavaScript",
"bytes": "14351"
},
{
"name": "Ruby",
"bytes": "143269"
}
],
"symlink_target": ""
}
|
import builtins
from twocode import utils
def add_node_types(context):
Object, Class, Attr, Func, Arg = [context.obj[name] for name in "Object, Class, Attr, Func, Arg".split(", ")]
String, List, Bool = [context.basic_types[name] for name in "String, List, Bool".split(", ")]
wraps = context.native_wraps
node_types = context.parser.node_types
context.node_types = utils.Object()
def gen_type(name):
type = Class()
context.node_types[name] = type
return type
def attach(type, name, **kwargs):
def wrap(func):
type.__fields__[name] = Func(native=func, **kwargs)
return wrap
def node_init(this, *args, **kwargs):
type_name = type_to_name[this.__type__]
node_type = node_types[type_name]
for var, arg in zip(node_type.vars, args):
setattr(this, var.name, arg)
for key, value in kwargs.items():
setattr(this, key, value)
@wraps(result=True)
def node_get_children(this):
type_name = type_to_name[this.__type__]
children = []
for var in node_types[type_name].vars:
child = context.unwrap(getattr(this, var.name))
if var.type and child:
if not var.list:
children.append(child)
else:
children.extend(child)
return children
@wraps("children")
def node_set_children(this, children):
children = iter(children)
type_name = type_to_name[this.__type__]
for var in node_types[type_name].vars:
child = context.unwrap(getattr(this, var.name))
if var.type and child:
if not var.list:
setattr(this, var.name, next(children))
else:
list_var = []
for i in range(len(child)):
list_var.append(next(children))
setattr(this, var.name, context.wrap(list_var))
@wraps(result=True)
def node_source(node):
return str(context.unwrap_code(node))
@wraps(result=True)
def node_tree(node):
return tree_str(context.unwrap_code(node))
def tree_str(node):
node_type = builtins.type(node)
type_name = node_type.__name__
if type_name not in node_types:
return str(node)
return node.str_func(delim=".\t".replace("\t", " " * (4 - 1)), str=tree_str)
type_to_name = {}
for type_name, node_type in node_types.items():
type = gen_type(type_name)
type.__fields__["__init__"] = Func(native=node_init)
type.__fields__["get_children"] = Func(native=node_get_children, args=[Arg("this", type)], return_type=List)
type.__fields__["set_children"] = Func(native=node_set_children, args=[Arg("this", type), Arg("children", List)], return_type=List)
type.__fields__["source"] = Func(native=node_source, args=[Arg("node", type)], return_type=String)
type.__fields__["tree"] = Func(native=node_tree, args=[Arg("node", type)], return_type=String)
# NOTE:
# chooses to require String
# better than including node types in temp scope, they aren't used anywhere else
type_to_name[type] = type_name
Code = context.node_types["code"]
classes = "Term Expr Tuple Stmt Type".split()
for class_name in classes:
gen_type(class_name)
# all extend Node? a data one
code_from_map = {
"Stmt": lambda node: node_types["code"]([node]),
"Tuple": lambda node: node_types["code"]([node_types["stmt_tuple"](node)]),
"Expr": lambda node: node_types["code"]([node_types["stmt_tuple"](node_types["tuple_expr"](node))]),
"Term": lambda node: node_types["code"]([node_types["stmt_tuple"](node_types["tuple_expr"](node_types["expr_term"](node)))]),
}
class_from_map = {
"Stmt": lambda node: node.lines[0],
"Tuple": lambda node: node.lines[0].tuple,
"Expr": lambda node: node.lines[0].tuple.expr,
"Term": lambda node: node.lines[0].tuple.expr.term,
}
@attach(Code, "__from__", sign="(node:Object)->Code")
def code_from(node):
for class_name, map in code_from_map.items():
type = context.node_types[class_name]
if type in context.inherit_chain(node.__type__):
return context.wrap_code(map(context.unwrap_code(node)))
raise context.exc.ConversionError()
def gen_class_from(class_name, map):
type = context.node_types[class_name]
type.__fields__["__from__"] = Func(native=lambda code: context.wrap_code(map(context.unwrap_code(code))), args=[Arg("code", Code)], return_type=type)
for class_name, map in class_from_map.items():
gen_class_from(class_name, map)
var_type_map = {var: String for var in "id op affix value type pack source path".split()}
var_type_map["macro"] = Bool
for type_name, node_type in node_types.items():
type = context.node_types[type_name]
for class_name in classes:
if type_name.startswith(class_name.lower()):
type.__base__ = context.node_types[class_name]
args = type.__fields__["__init__"].args
args.append(Arg("this", type))
for var in node_type.vars:
attr = Attr()
arg = Arg(var.name)
if var.type:
if var.type.capitalize() in classes:
attr.type = context.node_types[var.type.capitalize()]
if var.type in node_types:
attr.type = context.node_types[var.type]
else:
if var.name in var_type_map:
attr.type = var_type_map[var.name]
if var.list:
attr.type = List
attr.default_ = context.parse("[]") # symbol = expr?
arg.default_ = context.parse("[]")
type.__fields__[var.name] = attr
arg.type = attr.type
args.append(arg)
def wrap_code(node):
node_type = builtins.type(node)
type_name = node_type.__name__
if type_name not in node_types:
return context.wrap(node)
type = context.node_types[type_name]
obj = Object(type)
for var in node_type.vars:
if not var.list:
setattr(obj, var.name, context.wrap_code(node.__dict__[var.name]))
else:
setattr(obj, var.name, context.wrap([context.wrap_code(sub_child) for sub_child in node.__dict__[var.name]]))
return obj
context.wrap_code = wrap_code
def unwrap_code(node):
type_name = type_to_name.get(getattr(node, "__type__", None))
if type_name not in node_types:
return context.unwrap(node)
node_type = node_types[type_name]
obj = object.__new__(node_type)
for var in node_type.vars:
if not var.list:
setattr(obj, var.name, context.unwrap_code(node.__dict__[var.name]))
else:
setattr(obj, var.name, [context.unwrap_code(sub_child) for sub_child in node.__dict__[var.name].__this__])
return obj
context.unwrap_code = unwrap_code
StmtValue = gen_type("StmtValue")
@attach(StmtValue, "__term__", sign="(this:StmtValue)->Object")
def stmtvalue_term(this):
return this.__this__
@attach(StmtValue, "__expr__", sign="(this:StmtValue)->Object")
def stmtvalue_expr(this):
return this.__this__
@attach(StmtValue, "__stmt__", sign="(this:StmtValue)->Null")
def stmtvalue_stmt(this):
return context.wrap(None)
context.stmt_value = lambda value: Object(StmtValue, __this__=value)
|
{
"content_hash": "4ef6154acae06574eebad99ba41ea6db",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 157,
"avg_line_length": 43.74585635359116,
"alnum_prop": 0.5574640060621369,
"repo_name": "MrCoft/twocode",
"id": "6347ca237d22a24799a43da288d5b3b9926b38ac",
"size": "7918",
"binary": false,
"copies": "1",
"ref": "refs/heads/development",
"path": "twocode/context/node_types.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "439119"
},
{
"name": "Roff",
"bytes": "9936"
}
],
"symlink_target": ""
}
|
import abc
import asyncio
import itertools
from urllib import parse
import furl
import aiohttp
from waterbutler.core import streams
from waterbutler.core import exceptions
def build_url(base, *segments, **query):
url = furl.furl(base)
# Filters return generators
# Cast to list to force "spin" it
url.path.segments = list(filter(
lambda segment: segment,
map(
# Furl requires everything to be quoted or not, no mixtures allowed
# prequote everything so %signs don't break everything
lambda segment: parse.quote(segment.strip('/')),
# Include any segments of the original url, effectively list+list but returns a generator
itertools.chain(url.path.segments, segments)
)
))
url.args = query
return url.url
class BaseProvider(metaclass=abc.ABCMeta):
"""The base class for all providers.
Every provider must, at the least,
implement all abstract methods in this class
.. note::
When adding a new provider you must add it to setup.py's
`entry_points` under the `waterbutler.providers` key formatted
as: `<provider name> = waterbutler.providers.yourprovider:<FullProviderName>`
Keep in mind that `yourprovider` modules must export the provider class
"""
BASE_URL = None
def __init__(self, auth, credentials, settings):
"""
:param dict auth: Information about the user this provider will act on the behalf of
:param dict credentials: The credentials used to authenticate with the provider,
ofter an OAuth 2 token
:param dict settings: Configuration settings for this provider,
often folder or repo
"""
self.auth = auth
self.credentials = credentials
self.settings = settings
@abc.abstractproperty
def NAME(self):
raise NotImplementedError
def __eq__(self, other):
try:
return (
type(self) == type(other) and
self.credentials == other.credentials
)
except AttributeError:
return False
def serialized(self):
return {
'name': self.NAME,
'auth': self.auth,
'settings': self.settings,
'credentials': self.credentials,
}
def build_url(self, *segments, **query):
"""A nice wrapped around furl, builds urls based on self.BASE_URL
:param (str, ...) segments: A tuple of string joined into /foo/bar/..
:param dict query: A dictionary that will be turned into query parameters ?foo=bar
:rtype: str
"""
return build_url(self.BASE_URL, *segments, **query)
@property
def default_headers(self):
"""Headers to be included with every request
Commonly OAuth headers or Content-Type
"""
return {}
def build_headers(self, **kwargs):
headers = self.default_headers
headers.update(kwargs)
return {
key: value
for key, value in headers.items()
if value is not None
}
@asyncio.coroutine
def make_request(self, *args, **kwargs):
"""A wrapper around :func:`aiohttp.request`. Inserts default headers.
:param str method: The HTTP method
:param str url: The url to send the request to
:keyword range: An optional tuple (start, end) that is transformed into a Range header
:keyword expects: An optional tuple of HTTP status codes as integers raises an exception
if the returned status code is not in it.
:type expects: tuple of ints
:param Exception throws: The exception to be raised from expects
:param tuple \*args: args passed to :func:`aiohttp.request`
:param dict \*kwargs: kwargs passed to :func:`aiohttp.request`
:rtype: :class:`aiohttp.Response`
:raises ProviderError: Raised if expects is defined
"""
kwargs['headers'] = self.build_headers(**kwargs.get('headers', {}))
range = kwargs.pop('range', None)
expects = kwargs.pop('expects', None)
throws = kwargs.pop('throws', exceptions.ProviderError)
if range:
kwargs['headers']['Range'] = self._build_range_header(range)
response = yield from aiohttp.request(*args, **kwargs)
if expects and response.status not in expects:
raise (yield from exceptions.exception_from_response(response, error=throws, **kwargs))
return response
@asyncio.coroutine
def move(self, dest_provider, src_path, dest_path, rename=None, conflict='replace', handle_naming=True):
"""Moves a file or folder from the current provider to the specified one
Performs a copy and then a delete.
Calls :func:`BaseProvider.intra_move` if possible.
:param BaseProvider dest_provider: The provider to move to
:param dict source_options: A dict to be sent to either :func:`BaseProvider.intra_move`
or :func:`BaseProvider.copy` and :func:`BaseProvider.delete`
:param dict dest_options: A dict to be sent to either :func:`BaseProvider.intra_move`
or :func:`BaseProvider.copy`
"""
args = (dest_provider, src_path, dest_path)
kwargs = {'rename': rename, 'conflict': conflict}
if handle_naming:
dest_path = yield from dest_provider.handle_naming(
src_path,
dest_path,
rename=rename,
conflict=conflict,
)
args = (dest_provider, src_path, dest_path)
kwargs = {}
if self.can_intra_move(dest_provider, src_path):
return (yield from self.intra_move(*args))
if src_path.is_dir:
metadata, created = yield from self._folder_file_op(self.move, *args, **kwargs)
else:
metadata, created = yield from self.copy(*args, handle_naming=False, **kwargs)
yield from self.delete(src_path)
return metadata, created
@asyncio.coroutine
def copy(self, dest_provider, src_path, dest_path, rename=None, conflict='replace', handle_naming=True):
args = (dest_provider, src_path, dest_path)
kwargs = {'rename': rename, 'conflict': conflict, 'handle_naming': handle_naming}
if handle_naming:
dest_path = yield from dest_provider.handle_naming(
src_path,
dest_path,
rename=rename,
conflict=conflict,
)
args = (dest_provider, src_path, dest_path)
kwargs = {}
if self.can_intra_copy(dest_provider, src_path):
return (yield from self.intra_copy(*args))
if src_path.is_dir:
return (yield from self._folder_file_op(self.copy, *args, **kwargs))
download_stream = yield from self.download(src_path)
if getattr(download_stream, 'name', None):
dest_path.rename(download_stream.name)
return (yield from dest_provider.upload(download_stream, dest_path))
@asyncio.coroutine
def _folder_file_op(self, func, dest_provider, src_path, dest_path, **kwargs):
assert src_path.is_dir, 'src_path must be a directory'
assert asyncio.iscoroutinefunction(func), 'func must be a coroutine'
try:
yield from dest_provider.delete(dest_path)
created = True
except exceptions.ProviderError as e:
if e.code != 404:
raise
created = False
folder = yield from dest_provider.create_folder(dest_path)
dest_path = yield from dest_provider.revalidate_path(dest_path.parent, dest_path.name, folder=dest_path.is_dir)
futures = []
for item in (yield from self.metadata(src_path)):
futures.append(
asyncio.async(
func(
dest_provider,
# TODO figure out a way to cut down on all the requests made here
(yield from self.revalidate_path(src_path, item.name, folder=item.is_folder)),
(yield from dest_provider.revalidate_path(dest_path, item.name, folder=item.is_folder)),
handle_naming=False,
)
)
)
if not futures:
folder.children = []
return folder, created
finished, pending = yield from asyncio.wait(futures, return_when=asyncio.FIRST_EXCEPTION)
if len(pending) != 0:
finished.pop().result()
folder.children = [
future.result()[0] # result is a tuple of (metadata, created)
for future in finished
]
return folder, created
@asyncio.coroutine
def handle_naming(self, src_path, dest_path, rename=None, conflict='replace'):
"""Given a WaterButlerPath and the desired name handle any potential
naming issues
ie:
cp /file.txt /folder/ -> /folder/file.txt
cp /folder/ /folder/ -> /folder/folder/
cp /file.txt /folder/file.txt -> /folder/file.txt
cp /file.txt /folder/file.txt -> /folder/file (1).txt
cp /file.txt /folder/doc.txt -> /folder/doc.txt
:param WaterButlerPath src_path: The object that is being copied
:param WaterButlerPath dest_path: The path that is being copied to or into
:param str rename: The desired name of the resulting path, may be incremented
:param str conflict: The conflict resolution strategy, replace or keep
"""
if src_path.is_dir and dest_path.is_file:
# Cant copy a directory to a file
raise ValueError('Destination must be a directory if the source is')
if not dest_path.is_file:
# Directories always are going to be copied into
# cp /folder1/ /folder2/ -> /folder1/folder2/
dest_path = yield from self.revalidate_path(
dest_path,
rename or src_path.name,
folder=src_path.is_dir
)
dest_path, _ = yield from self.handle_name_conflict(dest_path, conflict=conflict)
return dest_path
def can_intra_copy(self, other, path=None):
"""Indicates if a quick copy can be performed
between the current and `other`.
.. note::
Defaults to False
:param waterbutler.core.provider.BaseProvider other: The provider to check against
:rtype: bool
"""
return False
def can_intra_move(self, other, path=None):
"""Indicates if a quick move can be performed
between the current and `other`.
.. note::
Defaults to False
:param waterbutler.core.provider.BaseProvider other: The provider to check against
:rtype: bool
"""
return False
def intra_copy(self, dest_provider, source_options, dest_options):
raise NotImplementedError
@asyncio.coroutine
def intra_move(self, dest_provider, src_path, dest_path):
data, created = yield from self.intra_copy(dest_provider, src_path, dest_path)
yield from self.delete(src_path)
return data, created
@asyncio.coroutine
def exists(self, path, **kwargs):
try:
return (yield from self.metadata(path, **kwargs))
except exceptions.NotFoundError:
return False
except exceptions.MetadataError as e:
if e.code != 404:
raise
return False
@asyncio.coroutine
def handle_name_conflict(self, path, conflict='replace', **kwargs):
"""Given a name and a conflict resolution pattern determine
the correct file path to upload to and indicate if that file exists or not
:param WaterbutlerPath path: An object supporting the waterbutler path API
:param str conflict: replace, keep, warn
:rtype: (WaterButlerPath, dict or False)
:raises: NamingConflict
"""
exists = yield from self.exists(path, **kwargs)
if not exists or conflict == 'replace':
return path, exists
if conflict == 'warn':
raise exceptions.NamingConflict(path)
while (yield from self.exists(path.increment_name(), **kwargs)):
pass
# path.increment_name()
# exists = self.exists(str(path))
return path, False
@asyncio.coroutine
def revalidate_path(self, base, path, folder=False):
return base.child(path, folder=folder)
@asyncio.coroutine
def zip(self, path, **kwargs):
"""Streams a Zip archive of the given folder
:param str path: The folder to compress
"""
if path.is_file:
base_path = path.parent.path
else:
base_path = path.path
names, coros, remaining = [], [], [path]
while remaining:
path = remaining.pop()
metadata = yield from self.metadata(path)
for item in metadata:
current_path = yield from self.revalidate_path(
path,
item.name,
folder=item.is_folder
)
if current_path.is_file:
names.append(current_path.path.replace(base_path, '', 1))
coros.append(self.__zip_defered_download(current_path))
else:
remaining.append(current_path)
return streams.ZipStreamReader(*zip(names, coros))
def __zip_defered_download(self, path):
"""Returns a scoped lambda to defer the execution
of the download coroutine
"""
return lambda: self.download(path)
@abc.abstractmethod
def can_duplicate_names(self):
"""Returns True if a file and a folder in the same directory can have identical names."""
raise NotImplementedError
@abc.abstractmethod
def download(self, **kwargs):
"""Download a file from this provider.
:param dict \*\*kwargs: Arguments to be parsed by child classes
:rtype: :class:`waterbutler.core.streams.ResponseStreamReader`
:raises: :class:`waterbutler.core.exceptions.DownloadError`
"""
raise NotImplementedError
@abc.abstractmethod
def upload(self, stream, **kwargs):
"""
:param dict \*\*kwargs: Arguments to be parsed by child classes
:rtype: (:class:`waterbutler.core.metadata.BaseFileMetadata`, :class:`bool`)
:raises: :class:`waterbutler.core.exceptions.DeleteError`
"""
raise NotImplementedError
@abc.abstractmethod
def delete(self, **kwargs):
"""
:param dict \*\*kwargs: Arguments to be parsed by child classes
:rtype: :class:`None`
:raises: :class:`waterbutler.core.exceptions.DeleteError`
"""
raise NotImplementedError
@abc.abstractmethod
def metadata(self, **kwargs):
"""Get metdata about the specified resource from this provider.
Will be a :class:`list` if the resource is a directory otherwise an instance of :class:`waterbutler.core.metadata.BaseFileMetadata`
:param dict \*\*kwargs: Arguments to be parsed by child classes
:rtype: :class:`waterbutler.core.metadata.BaseMetadata`
:rtype: :class:`list` of :class:`waterbutler.core.metadata.BaseMetadata`
:raises: :class:`waterbutler.core.exceptions.MetadataError`
"""
raise NotImplementedError
@abc.abstractmethod
def validate_v1_path(self, path, **kwargs):
"""API v1 requires that requests against folder endpoints always end with a slash, and
requests against files never end with a slash. This method checks the provider's metadata
for the given id and throws a 404 Not Found if the implicit and explicit types don't
match. This method duplicates the logic in the provider's validate_path method, but
validate_path must currently accomodate v0 AND v1 semantics. After v0's retirement, this
method can replace validate_path.
:param str path: user-supplied path to validate
:rtype: :class:`waterbutler.core.path`
:raises: :class:`waterbutler.core.exceptions.NotFoundError`
"""
raise NotImplementedError
@abc.abstractmethod
def validate_path(self, path, **kwargs):
raise NotImplementedError
def revisions(self, **kwargs):
return [] # TODO Raise 405 by default h/t @rliebz
def create_folder(self, *args, **kwargs):
"""Create a folder in the current provider
returns True if the folder was created; False if it already existed
:rtype: :class:`waterbutler.core.metadata.BaseFolderMetadata`
:raises: :class:`waterbutler.core.exceptions.FolderCreationError`
"""
raise exceptions.ProviderError({'message': 'Folder creation not supported.'}, code=405)
def _build_range_header(self, slice_tup):
start, end = slice_tup
return 'bytes={}-{}'.format(
'' if start is None else start,
'' if end is None else end
)
|
{
"content_hash": "1f491c17c9d4f512d18bba11e488263a",
"timestamp": "",
"source": "github",
"line_count": 468,
"max_line_length": 139,
"avg_line_length": 36.98076923076923,
"alnum_prop": 0.6102155197319004,
"repo_name": "kwierman/waterbutler",
"id": "a096dcc7029127d629a0365319acedc2a0b33043",
"size": "17307",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "waterbutler/core/provider.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "625886"
}
],
"symlink_target": ""
}
|
(function( global, factory ) {
if ( typeof module === "object" && typeof module.exports === "object" ) {
// For CommonJS and CommonJS-like environments where a proper window is present,
// execute the factory and get jQuery
// For environments that do not inherently posses a window with a document
// (such as Node.js), expose a jQuery-making factory as module.exports
// This accentuates the need for the creation of a real window
// e.g. var jQuery = require("jquery")(window);
// See ticket #14549 for more info
module.exports = global.document ?
factory( global, true ) :
function( w ) {
if ( !w.document ) {
throw new Error( "jQuery requires a window with a document" );
}
return factory( w );
};
} else {
factory( global );
}
// Pass this if window is not defined yet
}(typeof window !== "undefined" ? window : this, function( window, noGlobal ) {
// Can't do this because several apps including ASP.NET trace
// the stack via arguments.caller.callee and Firefox dies if
// you try to trace through "use strict" call chains. (#13335)
// Support: Firefox 18+
//
var deletedIds = [];
var slice = deletedIds.slice;
var concat = deletedIds.concat;
var push = deletedIds.push;
var indexOf = deletedIds.indexOf;
var class2type = {};
var toString = class2type.toString;
var hasOwn = class2type.hasOwnProperty;
var trim = "".trim;
var support = {};
var
version = "1.11.0 -ajax,-ajax/jsonp,-ajax/load,-ajax/parseJSON,-ajax/parseXML,-ajax/script,-ajax/var/nonce,-ajax/var/rquery,-ajax/xhr,-manipulation/_evalUrl,-effects,-effects/Tween,-effects/animatedSelector,-effects/support,-event-alias",
// Define a local copy of jQuery
jQuery = function( selector, context ) {
// The jQuery object is actually just the init constructor 'enhanced'
// Need init if jQuery is called (just allow error to be thrown if not included)
return new jQuery.fn.init( selector, context );
},
// Make sure we trim BOM and NBSP (here's looking at you, Safari 5.0 and IE)
rtrim = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,
// Matches dashed string for camelizing
rmsPrefix = /^-ms-/,
rdashAlpha = /-([\da-z])/gi,
// Used by jQuery.camelCase as callback to replace()
fcamelCase = function( all, letter ) {
return letter.toUpperCase();
};
jQuery.fn = jQuery.prototype = {
// The current version of jQuery being used
jquery: version,
constructor: jQuery,
// Start with an empty selector
selector: "",
// The default length of a jQuery object is 0
length: 0,
toArray: function() {
return slice.call( this );
},
// Get the Nth element in the matched element set OR
// Get the whole matched element set as a clean array
get: function( num ) {
return num != null ?
// Return a 'clean' array
( num < 0 ? this[ num + this.length ] : this[ num ] ) :
// Return just the object
slice.call( this );
},
// Take an array of elements and push it onto the stack
// (returning the new matched element set)
pushStack: function( elems ) {
// Build a new jQuery matched element set
var ret = jQuery.merge( this.constructor(), elems );
// Add the old object onto the stack (as a reference)
ret.prevObject = this;
ret.context = this.context;
// Return the newly-formed element set
return ret;
},
// Execute a callback for every element in the matched set.
// (You can seed the arguments with an array of args, but this is
// only used internally.)
each: function( callback, args ) {
return jQuery.each( this, callback, args );
},
map: function( callback ) {
return this.pushStack( jQuery.map(this, function( elem, i ) {
return callback.call( elem, i, elem );
}));
},
slice: function() {
return this.pushStack( slice.apply( this, arguments ) );
},
first: function() {
return this.eq( 0 );
},
last: function() {
return this.eq( -1 );
},
eq: function( i ) {
var len = this.length,
j = +i + ( i < 0 ? len : 0 );
return this.pushStack( j >= 0 && j < len ? [ this[j] ] : [] );
},
end: function() {
return this.prevObject || this.constructor(null);
},
// For internal use only.
// Behaves like an Array's method, not like a jQuery method.
push: push,
sort: deletedIds.sort,
splice: deletedIds.splice
};
jQuery.extend = jQuery.fn.extend = function() {
var src, copyIsArray, copy, name, options, clone,
target = arguments[0] || {},
i = 1,
length = arguments.length,
deep = false;
// Handle a deep copy situation
if ( typeof target === "boolean" ) {
deep = target;
// skip the boolean and the target
target = arguments[ i ] || {};
i++;
}
// Handle case when target is a string or something (possible in deep copy)
if ( typeof target !== "object" && !jQuery.isFunction(target) ) {
target = {};
}
// extend jQuery itself if only one argument is passed
if ( i === length ) {
target = this;
i--;
}
for ( ; i < length; i++ ) {
// Only deal with non-null/undefined values
if ( (options = arguments[ i ]) != null ) {
// Extend the base object
for ( name in options ) {
src = target[ name ];
copy = options[ name ];
// Prevent never-ending loop
if ( target === copy ) {
continue;
}
// Recurse if we're merging plain objects or arrays
if ( deep && copy && ( jQuery.isPlainObject(copy) || (copyIsArray = jQuery.isArray(copy)) ) ) {
if ( copyIsArray ) {
copyIsArray = false;
clone = src && jQuery.isArray(src) ? src : [];
} else {
clone = src && jQuery.isPlainObject(src) ? src : {};
}
// Never move original objects, clone them
target[ name ] = jQuery.extend( deep, clone, copy );
// Don't bring in undefined values
} else if ( copy !== undefined ) {
target[ name ] = copy;
}
}
}
}
// Return the modified object
return target;
};
jQuery.extend({
// Unique for each copy of jQuery on the page
expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ),
// Assume jQuery is ready without the ready module
isReady: true,
error: function( msg ) {
throw new Error( msg );
},
noop: function() {},
// See test/unit/core.js for details concerning isFunction.
// Since version 1.3, DOM methods and functions like alert
// aren't supported. They return false on IE (#2968).
isFunction: function( obj ) {
return jQuery.type(obj) === "function";
},
isArray: Array.isArray || function( obj ) {
return jQuery.type(obj) === "array";
},
isWindow: function( obj ) {
/* jshint eqeqeq: false */
return obj != null && obj == obj.window;
},
isNumeric: function( obj ) {
// parseFloat NaNs numeric-cast false positives (null|true|false|"")
// ...but misinterprets leading-number strings, particularly hex literals ("0x...")
// subtraction forces infinities to NaN
return obj - parseFloat( obj ) >= 0;
},
isEmptyObject: function( obj ) {
var name;
for ( name in obj ) {
return false;
}
return true;
},
isPlainObject: function( obj ) {
var key;
// Must be an Object.
// Because of IE, we also have to check the presence of the constructor property.
// Make sure that DOM nodes and window objects don't pass through, as well
if ( !obj || jQuery.type(obj) !== "object" || obj.nodeType || jQuery.isWindow( obj ) ) {
return false;
}
try {
// Not own constructor property must be Object
if ( obj.constructor &&
!hasOwn.call(obj, "constructor") &&
!hasOwn.call(obj.constructor.prototype, "isPrototypeOf") ) {
return false;
}
} catch ( e ) {
// IE8,9 Will throw exceptions on certain host objects #9897
return false;
}
// Support: IE<9
// Handle iteration over inherited properties before own properties.
if ( support.ownLast ) {
for ( key in obj ) {
return hasOwn.call( obj, key );
}
}
// Own properties are enumerated firstly, so to speed up,
// if last one is own, then all properties are own.
for ( key in obj ) {}
return key === undefined || hasOwn.call( obj, key );
},
type: function( obj ) {
if ( obj == null ) {
return obj + "";
}
return typeof obj === "object" || typeof obj === "function" ?
class2type[ toString.call(obj) ] || "object" :
typeof obj;
},
// Evaluates a script in a global context
// Workarounds based on findings by Jim Driscoll
// http://weblogs.java.net/blog/driscoll/archive/2009/09/08/eval-javascript-global-context
globalEval: function( data ) {
if ( data && jQuery.trim( data ) ) {
// We use execScript on Internet Explorer
// We use an anonymous function so that context is window
// rather than jQuery in Firefox
( window.execScript || function( data ) {
window[ "eval" ].call( window, data );
} )( data );
}
},
// Convert dashed to camelCase; used by the css and data modules
// Microsoft forgot to hump their vendor prefix (#9572)
camelCase: function( string ) {
return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase );
},
nodeName: function( elem, name ) {
return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase();
},
// args is for internal usage only
each: function( obj, callback, args ) {
var value,
i = 0,
length = obj.length,
isArray = isArraylike( obj );
if ( args ) {
if ( isArray ) {
for ( ; i < length; i++ ) {
value = callback.apply( obj[ i ], args );
if ( value === false ) {
break;
}
}
} else {
for ( i in obj ) {
value = callback.apply( obj[ i ], args );
if ( value === false ) {
break;
}
}
}
// A special, fast, case for the most common use of each
} else {
if ( isArray ) {
for ( ; i < length; i++ ) {
value = callback.call( obj[ i ], i, obj[ i ] );
if ( value === false ) {
break;
}
}
} else {
for ( i in obj ) {
value = callback.call( obj[ i ], i, obj[ i ] );
if ( value === false ) {
break;
}
}
}
}
return obj;
},
// Use native String.trim function wherever possible
trim: trim && !trim.call("\uFEFF\xA0") ?
function( text ) {
return text == null ?
"" :
trim.call( text );
} :
// Otherwise use our own trimming functionality
function( text ) {
return text == null ?
"" :
( text + "" ).replace( rtrim, "" );
},
// results is for internal usage only
makeArray: function( arr, results ) {
var ret = results || [];
if ( arr != null ) {
if ( isArraylike( Object(arr) ) ) {
jQuery.merge( ret,
typeof arr === "string" ?
[ arr ] : arr
);
} else {
push.call( ret, arr );
}
}
return ret;
},
inArray: function( elem, arr, i ) {
var len;
if ( arr ) {
if ( indexOf ) {
return indexOf.call( arr, elem, i );
}
len = arr.length;
i = i ? i < 0 ? Math.max( 0, len + i ) : i : 0;
for ( ; i < len; i++ ) {
// Skip accessing in sparse arrays
if ( i in arr && arr[ i ] === elem ) {
return i;
}
}
}
return -1;
},
merge: function( first, second ) {
var len = +second.length,
j = 0,
i = first.length;
while ( j < len ) {
first[ i++ ] = second[ j++ ];
}
// Support: IE<9
// Workaround casting of .length to NaN on otherwise arraylike objects (e.g., NodeLists)
if ( len !== len ) {
while ( second[j] !== undefined ) {
first[ i++ ] = second[ j++ ];
}
}
first.length = i;
return first;
},
grep: function( elems, callback, invert ) {
var callbackInverse,
matches = [],
i = 0,
length = elems.length,
callbackExpect = !invert;
// Go through the array, only saving the items
// that pass the validator function
for ( ; i < length; i++ ) {
callbackInverse = !callback( elems[ i ], i );
if ( callbackInverse !== callbackExpect ) {
matches.push( elems[ i ] );
}
}
return matches;
},
// arg is for internal usage only
map: function( elems, callback, arg ) {
var value,
i = 0,
length = elems.length,
isArray = isArraylike( elems ),
ret = [];
// Go through the array, translating each of the items to their new values
if ( isArray ) {
for ( ; i < length; i++ ) {
value = callback( elems[ i ], i, arg );
if ( value != null ) {
ret.push( value );
}
}
// Go through every key on the object,
} else {
for ( i in elems ) {
value = callback( elems[ i ], i, arg );
if ( value != null ) {
ret.push( value );
}
}
}
// Flatten any nested arrays
return concat.apply( [], ret );
},
// A global GUID counter for objects
guid: 1,
// Bind a function to a context, optionally partially applying any
// arguments.
proxy: function( fn, context ) {
var args, proxy, tmp;
if ( typeof context === "string" ) {
tmp = fn[ context ];
context = fn;
fn = tmp;
}
// Quick check to determine if target is callable, in the spec
// this throws a TypeError, but we will just return undefined.
if ( !jQuery.isFunction( fn ) ) {
return undefined;
}
// Simulated bind
args = slice.call( arguments, 2 );
proxy = function() {
return fn.apply( context || this, args.concat( slice.call( arguments ) ) );
};
// Set the guid of unique handler to the same of original handler, so it can be removed
proxy.guid = fn.guid = fn.guid || jQuery.guid++;
return proxy;
},
now: function() {
return +( new Date() );
},
// jQuery.support is not used in Core but other projects attach their
// properties to it so it needs to exist.
support: support
});
// Populate the class2type map
jQuery.each("Boolean Number String Function Array Date RegExp Object Error".split(" "), function(i, name) {
class2type[ "[object " + name + "]" ] = name.toLowerCase();
});
function isArraylike( obj ) {
var length = obj.length,
type = jQuery.type( obj );
if ( type === "function" || jQuery.isWindow( obj ) ) {
return false;
}
if ( obj.nodeType === 1 && length ) {
return true;
}
return type === "array" || length === 0 ||
typeof length === "number" && length > 0 && ( length - 1 ) in obj;
}
var Sizzle =
/*!
* Sizzle CSS Selector Engine v1.10.16
* http://sizzlejs.com/
*
* Copyright 2013 jQuery Foundation, Inc. and other contributors
* Released under the MIT license
* http://jquery.org/license
*
* Date: 2014-01-13
*/
(function( window ) {
var i,
support,
Expr,
getText,
isXML,
compile,
outermostContext,
sortInput,
hasDuplicate,
// Local document vars
setDocument,
document,
docElem,
documentIsHTML,
rbuggyQSA,
rbuggyMatches,
matches,
contains,
// Instance-specific data
expando = "sizzle" + -(new Date()),
preferredDoc = window.document,
dirruns = 0,
done = 0,
classCache = createCache(),
tokenCache = createCache(),
compilerCache = createCache(),
sortOrder = function( a, b ) {
if ( a === b ) {
hasDuplicate = true;
}
return 0;
},
// General-purpose constants
strundefined = typeof undefined,
MAX_NEGATIVE = 1 << 31,
// Instance methods
hasOwn = ({}).hasOwnProperty,
arr = [],
pop = arr.pop,
push_native = arr.push,
push = arr.push,
slice = arr.slice,
// Use a stripped-down indexOf if we can't use a native one
indexOf = arr.indexOf || function( elem ) {
var i = 0,
len = this.length;
for ( ; i < len; i++ ) {
if ( this[i] === elem ) {
return i;
}
}
return -1;
},
booleans = "checked|selected|async|autofocus|autoplay|controls|defer|disabled|hidden|ismap|loop|multiple|open|readonly|required|scoped",
// Regular expressions
// Whitespace characters http://www.w3.org/TR/css3-selectors/#whitespace
whitespace = "[\\x20\\t\\r\\n\\f]",
// http://www.w3.org/TR/css3-syntax/#characters
characterEncoding = "(?:\\\\.|[\\w-]|[^\\x00-\\xa0])+",
// Loosely modeled on CSS identifier characters
// An unquoted value should be a CSS identifier http://www.w3.org/TR/css3-selectors/#attribute-selectors
// Proper syntax: http://www.w3.org/TR/CSS21/syndata.html#value-def-identifier
identifier = characterEncoding.replace( "w", "w#" ),
// Acceptable operators http://www.w3.org/TR/selectors/#attribute-selectors
attributes = "\\[" + whitespace + "*(" + characterEncoding + ")" + whitespace +
"*(?:([*^$|!~]?=)" + whitespace + "*(?:(['\"])((?:\\\\.|[^\\\\])*?)\\3|(" + identifier + ")|)|)" + whitespace + "*\\]",
// Prefer arguments quoted,
// then not containing pseudos/brackets,
// then attribute selectors/non-parenthetical expressions,
// then anything else
// These preferences are here to reduce the number of selectors
// needing tokenize in the PSEUDO preFilter
pseudos = ":(" + characterEncoding + ")(?:\\(((['\"])((?:\\\\.|[^\\\\])*?)\\3|((?:\\\\.|[^\\\\()[\\]]|" + attributes.replace( 3, 8 ) + ")*)|.*)\\)|)",
// Leading and non-escaped trailing whitespace, capturing some non-whitespace characters preceding the latter
rtrim = new RegExp( "^" + whitespace + "+|((?:^|[^\\\\])(?:\\\\.)*)" + whitespace + "+$", "g" ),
rcomma = new RegExp( "^" + whitespace + "*," + whitespace + "*" ),
rcombinators = new RegExp( "^" + whitespace + "*([>+~]|" + whitespace + ")" + whitespace + "*" ),
rattributeQuotes = new RegExp( "=" + whitespace + "*([^\\]'\"]*?)" + whitespace + "*\\]", "g" ),
rpseudo = new RegExp( pseudos ),
ridentifier = new RegExp( "^" + identifier + "$" ),
matchExpr = {
"ID": new RegExp( "^#(" + characterEncoding + ")" ),
"CLASS": new RegExp( "^\\.(" + characterEncoding + ")" ),
"TAG": new RegExp( "^(" + characterEncoding.replace( "w", "w*" ) + ")" ),
"ATTR": new RegExp( "^" + attributes ),
"PSEUDO": new RegExp( "^" + pseudos ),
"CHILD": new RegExp( "^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\(" + whitespace +
"*(even|odd|(([+-]|)(\\d*)n|)" + whitespace + "*(?:([+-]|)" + whitespace +
"*(\\d+)|))" + whitespace + "*\\)|)", "i" ),
"bool": new RegExp( "^(?:" + booleans + ")$", "i" ),
// For use in libraries implementing .is()
// We use this for POS matching in `select`
"needsContext": new RegExp( "^" + whitespace + "*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\(" +
whitespace + "*((?:-\\d)?\\d*)" + whitespace + "*\\)|)(?=[^-]|$)", "i" )
},
rinputs = /^(?:input|select|textarea|button)$/i,
rheader = /^h\d$/i,
rnative = /^[^{]+\{\s*\[native \w/,
// Easily-parseable/retrievable ID or TAG or CLASS selectors
rquickExpr = /^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,
rsibling = /[+~]/,
rescape = /'|\\/g,
// CSS escapes http://www.w3.org/TR/CSS21/syndata.html#escaped-characters
runescape = new RegExp( "\\\\([\\da-f]{1,6}" + whitespace + "?|(" + whitespace + ")|.)", "ig" ),
funescape = function( _, escaped, escapedWhitespace ) {
var high = "0x" + escaped - 0x10000;
// NaN means non-codepoint
// Support: Firefox
// Workaround erroneous numeric interpretation of +"0x"
return high !== high || escapedWhitespace ?
escaped :
high < 0 ?
// BMP codepoint
String.fromCharCode( high + 0x10000 ) :
// Supplemental Plane codepoint (surrogate pair)
String.fromCharCode( high >> 10 | 0xD800, high & 0x3FF | 0xDC00 );
};
// Optimize for push.apply( _, NodeList )
try {
push.apply(
(arr = slice.call( preferredDoc.childNodes )),
preferredDoc.childNodes
);
// Support: Android<4.0
// Detect silently failing push.apply
arr[ preferredDoc.childNodes.length ].nodeType;
} catch ( e ) {
push = { apply: arr.length ?
// Leverage slice if possible
function( target, els ) {
push_native.apply( target, slice.call(els) );
} :
// Support: IE<9
// Otherwise append directly
function( target, els ) {
var j = target.length,
i = 0;
// Can't trust NodeList.length
while ( (target[j++] = els[i++]) ) {}
target.length = j - 1;
}
};
}
function Sizzle( selector, context, results, seed ) {
var match, elem, m, nodeType,
// QSA vars
i, groups, old, nid, newContext, newSelector;
if ( ( context ? context.ownerDocument || context : preferredDoc ) !== document ) {
setDocument( context );
}
context = context || document;
results = results || [];
if ( !selector || typeof selector !== "string" ) {
return results;
}
if ( (nodeType = context.nodeType) !== 1 && nodeType !== 9 ) {
return [];
}
if ( documentIsHTML && !seed ) {
// Shortcuts
if ( (match = rquickExpr.exec( selector )) ) {
// Speed-up: Sizzle("#ID")
if ( (m = match[1]) ) {
if ( nodeType === 9 ) {
elem = context.getElementById( m );
// Check parentNode to catch when Blackberry 4.6 returns
// nodes that are no longer in the document (jQuery #6963)
if ( elem && elem.parentNode ) {
// Handle the case where IE, Opera, and Webkit return items
// by name instead of ID
if ( elem.id === m ) {
results.push( elem );
return results;
}
} else {
return results;
}
} else {
// Context is not a document
if ( context.ownerDocument && (elem = context.ownerDocument.getElementById( m )) &&
contains( context, elem ) && elem.id === m ) {
results.push( elem );
return results;
}
}
// Speed-up: Sizzle("TAG")
} else if ( match[2] ) {
push.apply( results, context.getElementsByTagName( selector ) );
return results;
// Speed-up: Sizzle(".CLASS")
} else if ( (m = match[3]) && support.getElementsByClassName && context.getElementsByClassName ) {
push.apply( results, context.getElementsByClassName( m ) );
return results;
}
}
// QSA path
if ( support.qsa && (!rbuggyQSA || !rbuggyQSA.test( selector )) ) {
nid = old = expando;
newContext = context;
newSelector = nodeType === 9 && selector;
// qSA works strangely on Element-rooted queries
// We can work around this by specifying an extra ID on the root
// and working up from there (Thanks to Andrew Dupont for the technique)
// IE 8 doesn't work on object elements
if ( nodeType === 1 && context.nodeName.toLowerCase() !== "object" ) {
groups = tokenize( selector );
if ( (old = context.getAttribute("id")) ) {
nid = old.replace( rescape, "\\$&" );
} else {
context.setAttribute( "id", nid );
}
nid = "[id='" + nid + "'] ";
i = groups.length;
while ( i-- ) {
groups[i] = nid + toSelector( groups[i] );
}
newContext = rsibling.test( selector ) && testContext( context.parentNode ) || context;
newSelector = groups.join(",");
}
if ( newSelector ) {
try {
push.apply( results,
newContext.querySelectorAll( newSelector )
);
return results;
} catch(qsaError) {
} finally {
if ( !old ) {
context.removeAttribute("id");
}
}
}
}
}
// All others
return select( selector.replace( rtrim, "$1" ), context, results, seed );
}
/**
* Create key-value caches of limited size
* @returns {Function(string, Object)} Returns the Object data after storing it on itself with
* property name the (space-suffixed) string and (if the cache is larger than Expr.cacheLength)
* deleting the oldest entry
*/
function createCache() {
var keys = [];
function cache( key, value ) {
// Use (key + " ") to avoid collision with native prototype properties (see Issue #157)
if ( keys.push( key + " " ) > Expr.cacheLength ) {
// Only keep the most recent entries
delete cache[ keys.shift() ];
}
return (cache[ key + " " ] = value);
}
return cache;
}
/**
* Mark a function for special use by Sizzle
* @param {Function} fn The function to mark
*/
function markFunction( fn ) {
fn[ expando ] = true;
return fn;
}
/**
* Support testing using an element
* @param {Function} fn Passed the created div and expects a boolean result
*/
function assert( fn ) {
var div = document.createElement("div");
try {
return !!fn( div );
} catch (e) {
return false;
} finally {
// Remove from its parent by default
if ( div.parentNode ) {
div.parentNode.removeChild( div );
}
// release memory in IE
div = null;
}
}
/**
* Adds the same handler for all of the specified attrs
* @param {String} attrs Pipe-separated list of attributes
* @param {Function} handler The method that will be applied
*/
function addHandle( attrs, handler ) {
var arr = attrs.split("|"),
i = attrs.length;
while ( i-- ) {
Expr.attrHandle[ arr[i] ] = handler;
}
}
/**
* Checks document order of two siblings
* @param {Element} a
* @param {Element} b
* @returns {Number} Returns less than 0 if a precedes b, greater than 0 if a follows b
*/
function siblingCheck( a, b ) {
var cur = b && a,
diff = cur && a.nodeType === 1 && b.nodeType === 1 &&
( ~b.sourceIndex || MAX_NEGATIVE ) -
( ~a.sourceIndex || MAX_NEGATIVE );
// Use IE sourceIndex if available on both nodes
if ( diff ) {
return diff;
}
// Check if b follows a
if ( cur ) {
while ( (cur = cur.nextSibling) ) {
if ( cur === b ) {
return -1;
}
}
}
return a ? 1 : -1;
}
/**
* Returns a function to use in pseudos for input types
* @param {String} type
*/
function createInputPseudo( type ) {
return function( elem ) {
var name = elem.nodeName.toLowerCase();
return name === "input" && elem.type === type;
};
}
/**
* Returns a function to use in pseudos for buttons
* @param {String} type
*/
function createButtonPseudo( type ) {
return function( elem ) {
var name = elem.nodeName.toLowerCase();
return (name === "input" || name === "button") && elem.type === type;
};
}
/**
* Returns a function to use in pseudos for positionals
* @param {Function} fn
*/
function createPositionalPseudo( fn ) {
return markFunction(function( argument ) {
argument = +argument;
return markFunction(function( seed, matches ) {
var j,
matchIndexes = fn( [], seed.length, argument ),
i = matchIndexes.length;
// Match elements found at the specified indexes
while ( i-- ) {
if ( seed[ (j = matchIndexes[i]) ] ) {
seed[j] = !(matches[j] = seed[j]);
}
}
});
});
}
/**
* Checks a node for validity as a Sizzle context
* @param {Element|Object=} context
* @returns {Element|Object|Boolean} The input node if acceptable, otherwise a falsy value
*/
function testContext( context ) {
return context && typeof context.getElementsByTagName !== strundefined && context;
}
// Expose support vars for convenience
support = Sizzle.support = {};
/**
* Detects XML nodes
* @param {Element|Object} elem An element or a document
* @returns {Boolean} True iff elem is a non-HTML XML node
*/
isXML = Sizzle.isXML = function( elem ) {
// documentElement is verified for cases where it doesn't yet exist
// (such as loading iframes in IE - #4833)
var documentElement = elem && (elem.ownerDocument || elem).documentElement;
return documentElement ? documentElement.nodeName !== "HTML" : false;
};
/**
* Sets document-related variables once based on the current document
* @param {Element|Object} [doc] An element or document object to use to set the document
* @returns {Object} Returns the current document
*/
setDocument = Sizzle.setDocument = function( node ) {
var hasCompare,
doc = node ? node.ownerDocument || node : preferredDoc,
parent = doc.defaultView;
// If no document and documentElement is available, return
if ( doc === document || doc.nodeType !== 9 || !doc.documentElement ) {
return document;
}
// Set our document
document = doc;
docElem = doc.documentElement;
// Support tests
documentIsHTML = !isXML( doc );
// Support: IE>8
// If iframe document is assigned to "document" variable and if iframe has been reloaded,
// IE will throw "permission denied" error when accessing "document" variable, see jQuery #13936
// IE6-8 do not support the defaultView property so parent will be undefined
if ( parent && parent !== parent.top ) {
// IE11 does not have attachEvent, so all must suffer
if ( parent.addEventListener ) {
parent.addEventListener( "unload", function() {
setDocument();
}, false );
} else if ( parent.attachEvent ) {
parent.attachEvent( "onunload", function() {
setDocument();
});
}
}
/* Attributes
---------------------------------------------------------------------- */
// Support: IE<8
// Verify that getAttribute really returns attributes and not properties (excepting IE8 booleans)
support.attributes = assert(function( div ) {
div.className = "i";
return !div.getAttribute("className");
});
/* getElement(s)By*
---------------------------------------------------------------------- */
// Check if getElementsByTagName("*") returns only elements
support.getElementsByTagName = assert(function( div ) {
div.appendChild( doc.createComment("") );
return !div.getElementsByTagName("*").length;
});
// Check if getElementsByClassName can be trusted
support.getElementsByClassName = rnative.test( doc.getElementsByClassName ) && assert(function( div ) {
div.innerHTML = "<div class='a'></div><div class='a i'></div>";
// Support: Safari<4
// Catch class over-caching
div.firstChild.className = "i";
// Support: Opera<10
// Catch gEBCN failure to find non-leading classes
return div.getElementsByClassName("i").length === 2;
});
// Support: IE<10
// Check if getElementById returns elements by name
// The broken getElementById methods don't pick up programatically-set names,
// so use a roundabout getElementsByName test
support.getById = assert(function( div ) {
docElem.appendChild( div ).id = expando;
return !doc.getElementsByName || !doc.getElementsByName( expando ).length;
});
// ID find and filter
if ( support.getById ) {
Expr.find["ID"] = function( id, context ) {
if ( typeof context.getElementById !== strundefined && documentIsHTML ) {
var m = context.getElementById( id );
// Check parentNode to catch when Blackberry 4.6 returns
// nodes that are no longer in the document #6963
return m && m.parentNode ? [m] : [];
}
};
Expr.filter["ID"] = function( id ) {
var attrId = id.replace( runescape, funescape );
return function( elem ) {
return elem.getAttribute("id") === attrId;
};
};
} else {
// Support: IE6/7
// getElementById is not reliable as a find shortcut
delete Expr.find["ID"];
Expr.filter["ID"] = function( id ) {
var attrId = id.replace( runescape, funescape );
return function( elem ) {
var node = typeof elem.getAttributeNode !== strundefined && elem.getAttributeNode("id");
return node && node.value === attrId;
};
};
}
// Tag
Expr.find["TAG"] = support.getElementsByTagName ?
function( tag, context ) {
if ( typeof context.getElementsByTagName !== strundefined ) {
return context.getElementsByTagName( tag );
}
} :
function( tag, context ) {
var elem,
tmp = [],
i = 0,
results = context.getElementsByTagName( tag );
// Filter out possible comments
if ( tag === "*" ) {
while ( (elem = results[i++]) ) {
if ( elem.nodeType === 1 ) {
tmp.push( elem );
}
}
return tmp;
}
return results;
};
// Class
Expr.find["CLASS"] = support.getElementsByClassName && function( className, context ) {
if ( typeof context.getElementsByClassName !== strundefined && documentIsHTML ) {
return context.getElementsByClassName( className );
}
};
/* QSA/matchesSelector
---------------------------------------------------------------------- */
// QSA and matchesSelector support
// matchesSelector(:active) reports false when true (IE9/Opera 11.5)
rbuggyMatches = [];
// qSa(:focus) reports false when true (Chrome 21)
// We allow this because of a bug in IE8/9 that throws an error
// whenever `document.activeElement` is accessed on an iframe
// So, we allow :focus to pass through QSA all the time to avoid the IE error
// See http://bugs.jquery.com/ticket/13378
rbuggyQSA = [];
if ( (support.qsa = rnative.test( doc.querySelectorAll )) ) {
// Build QSA regex
// Regex strategy adopted from Diego Perini
assert(function( div ) {
// Select is set to empty string on purpose
// This is to test IE's treatment of not explicitly
// setting a boolean content attribute,
// since its presence should be enough
// http://bugs.jquery.com/ticket/12359
div.innerHTML = "<select t=''><option selected=''></option></select>";
// Support: IE8, Opera 10-12
// Nothing should be selected when empty strings follow ^= or $= or *=
if ( div.querySelectorAll("[t^='']").length ) {
rbuggyQSA.push( "[*^$]=" + whitespace + "*(?:''|\"\")" );
}
// Support: IE8
// Boolean attributes and "value" are not treated correctly
if ( !div.querySelectorAll("[selected]").length ) {
rbuggyQSA.push( "\\[" + whitespace + "*(?:value|" + booleans + ")" );
}
// Webkit/Opera - :checked should return selected option elements
// http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked
// IE8 throws error here and will not see later tests
if ( !div.querySelectorAll(":checked").length ) {
rbuggyQSA.push(":checked");
}
});
assert(function( div ) {
// Support: Windows 8 Native Apps
// The type and name attributes are restricted during .innerHTML assignment
var input = doc.createElement("input");
input.setAttribute( "type", "hidden" );
div.appendChild( input ).setAttribute( "name", "D" );
// Support: IE8
// Enforce case-sensitivity of name attribute
if ( div.querySelectorAll("[name=d]").length ) {
rbuggyQSA.push( "name" + whitespace + "*[*^$|!~]?=" );
}
// FF 3.5 - :enabled/:disabled and hidden elements (hidden elements are still enabled)
// IE8 throws error here and will not see later tests
if ( !div.querySelectorAll(":enabled").length ) {
rbuggyQSA.push( ":enabled", ":disabled" );
}
// Opera 10-11 does not throw on post-comma invalid pseudos
div.querySelectorAll("*,:x");
rbuggyQSA.push(",.*:");
});
}
if ( (support.matchesSelector = rnative.test( (matches = docElem.webkitMatchesSelector ||
docElem.mozMatchesSelector ||
docElem.oMatchesSelector ||
docElem.msMatchesSelector) )) ) {
assert(function( div ) {
// Check to see if it's possible to do matchesSelector
// on a disconnected node (IE 9)
support.disconnectedMatch = matches.call( div, "div" );
// This should fail with an exception
// Gecko does not error, returns false instead
matches.call( div, "[s!='']:x" );
rbuggyMatches.push( "!=", pseudos );
});
}
rbuggyQSA = rbuggyQSA.length && new RegExp( rbuggyQSA.join("|") );
rbuggyMatches = rbuggyMatches.length && new RegExp( rbuggyMatches.join("|") );
/* Contains
---------------------------------------------------------------------- */
hasCompare = rnative.test( docElem.compareDocumentPosition );
// Element contains another
// Purposefully does not implement inclusive descendent
// As in, an element does not contain itself
contains = hasCompare || rnative.test( docElem.contains ) ?
function( a, b ) {
var adown = a.nodeType === 9 ? a.documentElement : a,
bup = b && b.parentNode;
return a === bup || !!( bup && bup.nodeType === 1 && (
adown.contains ?
adown.contains( bup ) :
a.compareDocumentPosition && a.compareDocumentPosition( bup ) & 16
));
} :
function( a, b ) {
if ( b ) {
while ( (b = b.parentNode) ) {
if ( b === a ) {
return true;
}
}
}
return false;
};
/* Sorting
---------------------------------------------------------------------- */
// Document order sorting
sortOrder = hasCompare ?
function( a, b ) {
// Flag for duplicate removal
if ( a === b ) {
hasDuplicate = true;
return 0;
}
// Sort on method existence if only one input has compareDocumentPosition
var compare = !a.compareDocumentPosition - !b.compareDocumentPosition;
if ( compare ) {
return compare;
}
// Calculate position if both inputs belong to the same document
compare = ( a.ownerDocument || a ) === ( b.ownerDocument || b ) ?
a.compareDocumentPosition( b ) :
// Otherwise we know they are disconnected
1;
// Disconnected nodes
if ( compare & 1 ||
(!support.sortDetached && b.compareDocumentPosition( a ) === compare) ) {
// Choose the first element that is related to our preferred document
if ( a === doc || a.ownerDocument === preferredDoc && contains(preferredDoc, a) ) {
return -1;
}
if ( b === doc || b.ownerDocument === preferredDoc && contains(preferredDoc, b) ) {
return 1;
}
// Maintain original order
return sortInput ?
( indexOf.call( sortInput, a ) - indexOf.call( sortInput, b ) ) :
0;
}
return compare & 4 ? -1 : 1;
} :
function( a, b ) {
// Exit early if the nodes are identical
if ( a === b ) {
hasDuplicate = true;
return 0;
}
var cur,
i = 0,
aup = a.parentNode,
bup = b.parentNode,
ap = [ a ],
bp = [ b ];
// Parentless nodes are either documents or disconnected
if ( !aup || !bup ) {
return a === doc ? -1 :
b === doc ? 1 :
aup ? -1 :
bup ? 1 :
sortInput ?
( indexOf.call( sortInput, a ) - indexOf.call( sortInput, b ) ) :
0;
// If the nodes are siblings, we can do a quick check
} else if ( aup === bup ) {
return siblingCheck( a, b );
}
// Otherwise we need full lists of their ancestors for comparison
cur = a;
while ( (cur = cur.parentNode) ) {
ap.unshift( cur );
}
cur = b;
while ( (cur = cur.parentNode) ) {
bp.unshift( cur );
}
// Walk down the tree looking for a discrepancy
while ( ap[i] === bp[i] ) {
i++;
}
return i ?
// Do a sibling check if the nodes have a common ancestor
siblingCheck( ap[i], bp[i] ) :
// Otherwise nodes in our document sort first
ap[i] === preferredDoc ? -1 :
bp[i] === preferredDoc ? 1 :
0;
};
return doc;
};
Sizzle.matches = function( expr, elements ) {
return Sizzle( expr, null, null, elements );
};
Sizzle.matchesSelector = function( elem, expr ) {
// Set document vars if needed
if ( ( elem.ownerDocument || elem ) !== document ) {
setDocument( elem );
}
// Make sure that attribute selectors are quoted
expr = expr.replace( rattributeQuotes, "='$1']" );
if ( support.matchesSelector && documentIsHTML &&
( !rbuggyMatches || !rbuggyMatches.test( expr ) ) &&
( !rbuggyQSA || !rbuggyQSA.test( expr ) ) ) {
try {
var ret = matches.call( elem, expr );
// IE 9's matchesSelector returns false on disconnected nodes
if ( ret || support.disconnectedMatch ||
// As well, disconnected nodes are said to be in a document
// fragment in IE 9
elem.document && elem.document.nodeType !== 11 ) {
return ret;
}
} catch(e) {}
}
return Sizzle( expr, document, null, [elem] ).length > 0;
};
Sizzle.contains = function( context, elem ) {
// Set document vars if needed
if ( ( context.ownerDocument || context ) !== document ) {
setDocument( context );
}
return contains( context, elem );
};
Sizzle.attr = function( elem, name ) {
// Set document vars if needed
if ( ( elem.ownerDocument || elem ) !== document ) {
setDocument( elem );
}
var fn = Expr.attrHandle[ name.toLowerCase() ],
// Don't get fooled by Object.prototype properties (jQuery #13807)
val = fn && hasOwn.call( Expr.attrHandle, name.toLowerCase() ) ?
fn( elem, name, !documentIsHTML ) :
undefined;
return val !== undefined ?
val :
support.attributes || !documentIsHTML ?
elem.getAttribute( name ) :
(val = elem.getAttributeNode(name)) && val.specified ?
val.value :
null;
};
Sizzle.error = function( msg ) {
throw new Error( "Syntax error, unrecognized expression: " + msg );
};
/**
* Document sorting and removing duplicates
* @param {ArrayLike} results
*/
Sizzle.uniqueSort = function( results ) {
var elem,
duplicates = [],
j = 0,
i = 0;
// Unless we *know* we can detect duplicates, assume their presence
hasDuplicate = !support.detectDuplicates;
sortInput = !support.sortStable && results.slice( 0 );
results.sort( sortOrder );
if ( hasDuplicate ) {
while ( (elem = results[i++]) ) {
if ( elem === results[ i ] ) {
j = duplicates.push( i );
}
}
while ( j-- ) {
results.splice( duplicates[ j ], 1 );
}
}
// Clear input after sorting to release objects
// See https://github.com/jquery/sizzle/pull/225
sortInput = null;
return results;
};
/**
* Utility function for retrieving the text value of an array of DOM nodes
* @param {Array|Element} elem
*/
getText = Sizzle.getText = function( elem ) {
var node,
ret = "",
i = 0,
nodeType = elem.nodeType;
if ( !nodeType ) {
// If no nodeType, this is expected to be an array
while ( (node = elem[i++]) ) {
// Do not traverse comment nodes
ret += getText( node );
}
} else if ( nodeType === 1 || nodeType === 9 || nodeType === 11 ) {
// Use textContent for elements
// innerText usage removed for consistency of new lines (jQuery #11153)
if ( typeof elem.textContent === "string" ) {
return elem.textContent;
} else {
// Traverse its children
for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) {
ret += getText( elem );
}
}
} else if ( nodeType === 3 || nodeType === 4 ) {
return elem.nodeValue;
}
// Do not include comment or processing instruction nodes
return ret;
};
Expr = Sizzle.selectors = {
// Can be adjusted by the user
cacheLength: 50,
createPseudo: markFunction,
match: matchExpr,
attrHandle: {},
find: {},
relative: {
">": { dir: "parentNode", first: true },
" ": { dir: "parentNode" },
"+": { dir: "previousSibling", first: true },
"~": { dir: "previousSibling" }
},
preFilter: {
"ATTR": function( match ) {
match[1] = match[1].replace( runescape, funescape );
// Move the given value to match[3] whether quoted or unquoted
match[3] = ( match[4] || match[5] || "" ).replace( runescape, funescape );
if ( match[2] === "~=" ) {
match[3] = " " + match[3] + " ";
}
return match.slice( 0, 4 );
},
"CHILD": function( match ) {
/* matches from matchExpr["CHILD"]
1 type (only|nth|...)
2 what (child|of-type)
3 argument (even|odd|\d*|\d*n([+-]\d+)?|...)
4 xn-component of xn+y argument ([+-]?\d*n|)
5 sign of xn-component
6 x of xn-component
7 sign of y-component
8 y of y-component
*/
match[1] = match[1].toLowerCase();
if ( match[1].slice( 0, 3 ) === "nth" ) {
// nth-* requires argument
if ( !match[3] ) {
Sizzle.error( match[0] );
}
// numeric x and y parameters for Expr.filter.CHILD
// remember that false/true cast respectively to 0/1
match[4] = +( match[4] ? match[5] + (match[6] || 1) : 2 * ( match[3] === "even" || match[3] === "odd" ) );
match[5] = +( ( match[7] + match[8] ) || match[3] === "odd" );
// other types prohibit arguments
} else if ( match[3] ) {
Sizzle.error( match[0] );
}
return match;
},
"PSEUDO": function( match ) {
var excess,
unquoted = !match[5] && match[2];
if ( matchExpr["CHILD"].test( match[0] ) ) {
return null;
}
// Accept quoted arguments as-is
if ( match[3] && match[4] !== undefined ) {
match[2] = match[4];
// Strip excess characters from unquoted arguments
} else if ( unquoted && rpseudo.test( unquoted ) &&
// Get excess from tokenize (recursively)
(excess = tokenize( unquoted, true )) &&
// advance to the next closing parenthesis
(excess = unquoted.indexOf( ")", unquoted.length - excess ) - unquoted.length) ) {
// excess is a negative index
match[0] = match[0].slice( 0, excess );
match[2] = unquoted.slice( 0, excess );
}
// Return only captures needed by the pseudo filter method (type and argument)
return match.slice( 0, 3 );
}
},
filter: {
"TAG": function( nodeNameSelector ) {
var nodeName = nodeNameSelector.replace( runescape, funescape ).toLowerCase();
return nodeNameSelector === "*" ?
function() { return true; } :
function( elem ) {
return elem.nodeName && elem.nodeName.toLowerCase() === nodeName;
};
},
"CLASS": function( className ) {
var pattern = classCache[ className + " " ];
return pattern ||
(pattern = new RegExp( "(^|" + whitespace + ")" + className + "(" + whitespace + "|$)" )) &&
classCache( className, function( elem ) {
return pattern.test( typeof elem.className === "string" && elem.className || typeof elem.getAttribute !== strundefined && elem.getAttribute("class") || "" );
});
},
"ATTR": function( name, operator, check ) {
return function( elem ) {
var result = Sizzle.attr( elem, name );
if ( result == null ) {
return operator === "!=";
}
if ( !operator ) {
return true;
}
result += "";
return operator === "=" ? result === check :
operator === "!=" ? result !== check :
operator === "^=" ? check && result.indexOf( check ) === 0 :
operator === "*=" ? check && result.indexOf( check ) > -1 :
operator === "$=" ? check && result.slice( -check.length ) === check :
operator === "~=" ? ( " " + result + " " ).indexOf( check ) > -1 :
operator === "|=" ? result === check || result.slice( 0, check.length + 1 ) === check + "-" :
false;
};
},
"CHILD": function( type, what, argument, first, last ) {
var simple = type.slice( 0, 3 ) !== "nth",
forward = type.slice( -4 ) !== "last",
ofType = what === "of-type";
return first === 1 && last === 0 ?
// Shortcut for :nth-*(n)
function( elem ) {
return !!elem.parentNode;
} :
function( elem, context, xml ) {
var cache, outerCache, node, diff, nodeIndex, start,
dir = simple !== forward ? "nextSibling" : "previousSibling",
parent = elem.parentNode,
name = ofType && elem.nodeName.toLowerCase(),
useCache = !xml && !ofType;
if ( parent ) {
// :(first|last|only)-(child|of-type)
if ( simple ) {
while ( dir ) {
node = elem;
while ( (node = node[ dir ]) ) {
if ( ofType ? node.nodeName.toLowerCase() === name : node.nodeType === 1 ) {
return false;
}
}
// Reverse direction for :only-* (if we haven't yet done so)
start = dir = type === "only" && !start && "nextSibling";
}
return true;
}
start = [ forward ? parent.firstChild : parent.lastChild ];
// non-xml :nth-child(...) stores cache data on `parent`
if ( forward && useCache ) {
// Seek `elem` from a previously-cached index
outerCache = parent[ expando ] || (parent[ expando ] = {});
cache = outerCache[ type ] || [];
nodeIndex = cache[0] === dirruns && cache[1];
diff = cache[0] === dirruns && cache[2];
node = nodeIndex && parent.childNodes[ nodeIndex ];
while ( (node = ++nodeIndex && node && node[ dir ] ||
// Fallback to seeking `elem` from the start
(diff = nodeIndex = 0) || start.pop()) ) {
// When found, cache indexes on `parent` and break
if ( node.nodeType === 1 && ++diff && node === elem ) {
outerCache[ type ] = [ dirruns, nodeIndex, diff ];
break;
}
}
// Use previously-cached element index if available
} else if ( useCache && (cache = (elem[ expando ] || (elem[ expando ] = {}))[ type ]) && cache[0] === dirruns ) {
diff = cache[1];
// xml :nth-child(...) or :nth-last-child(...) or :nth(-last)?-of-type(...)
} else {
// Use the same loop as above to seek `elem` from the start
while ( (node = ++nodeIndex && node && node[ dir ] ||
(diff = nodeIndex = 0) || start.pop()) ) {
if ( ( ofType ? node.nodeName.toLowerCase() === name : node.nodeType === 1 ) && ++diff ) {
// Cache the index of each encountered element
if ( useCache ) {
(node[ expando ] || (node[ expando ] = {}))[ type ] = [ dirruns, diff ];
}
if ( node === elem ) {
break;
}
}
}
}
// Incorporate the offset, then check against cycle size
diff -= last;
return diff === first || ( diff % first === 0 && diff / first >= 0 );
}
};
},
"PSEUDO": function( pseudo, argument ) {
// pseudo-class names are case-insensitive
// http://www.w3.org/TR/selectors/#pseudo-classes
// Prioritize by case sensitivity in case custom pseudos are added with uppercase letters
// Remember that setFilters inherits from pseudos
var args,
fn = Expr.pseudos[ pseudo ] || Expr.setFilters[ pseudo.toLowerCase() ] ||
Sizzle.error( "unsupported pseudo: " + pseudo );
// The user may use createPseudo to indicate that
// arguments are needed to create the filter function
// just as Sizzle does
if ( fn[ expando ] ) {
return fn( argument );
}
// But maintain support for old signatures
if ( fn.length > 1 ) {
args = [ pseudo, pseudo, "", argument ];
return Expr.setFilters.hasOwnProperty( pseudo.toLowerCase() ) ?
markFunction(function( seed, matches ) {
var idx,
matched = fn( seed, argument ),
i = matched.length;
while ( i-- ) {
idx = indexOf.call( seed, matched[i] );
seed[ idx ] = !( matches[ idx ] = matched[i] );
}
}) :
function( elem ) {
return fn( elem, 0, args );
};
}
return fn;
}
},
pseudos: {
// Potentially complex pseudos
"not": markFunction(function( selector ) {
// Trim the selector passed to compile
// to avoid treating leading and trailing
// spaces as combinators
var input = [],
results = [],
matcher = compile( selector.replace( rtrim, "$1" ) );
return matcher[ expando ] ?
markFunction(function( seed, matches, context, xml ) {
var elem,
unmatched = matcher( seed, null, xml, [] ),
i = seed.length;
// Match elements unmatched by `matcher`
while ( i-- ) {
if ( (elem = unmatched[i]) ) {
seed[i] = !(matches[i] = elem);
}
}
}) :
function( elem, context, xml ) {
input[0] = elem;
matcher( input, null, xml, results );
return !results.pop();
};
}),
"has": markFunction(function( selector ) {
return function( elem ) {
return Sizzle( selector, elem ).length > 0;
};
}),
"contains": markFunction(function( text ) {
return function( elem ) {
return ( elem.textContent || elem.innerText || getText( elem ) ).indexOf( text ) > -1;
};
}),
// "Whether an element is represented by a :lang() selector
// is based solely on the element's language value
// being equal to the identifier C,
// or beginning with the identifier C immediately followed by "-".
// The matching of C against the element's language value is performed case-insensitively.
// The identifier C does not have to be a valid language name."
// http://www.w3.org/TR/selectors/#lang-pseudo
"lang": markFunction( function( lang ) {
// lang value must be a valid identifier
if ( !ridentifier.test(lang || "") ) {
Sizzle.error( "unsupported lang: " + lang );
}
lang = lang.replace( runescape, funescape ).toLowerCase();
return function( elem ) {
var elemLang;
do {
if ( (elemLang = documentIsHTML ?
elem.lang :
elem.getAttribute("xml:lang") || elem.getAttribute("lang")) ) {
elemLang = elemLang.toLowerCase();
return elemLang === lang || elemLang.indexOf( lang + "-" ) === 0;
}
} while ( (elem = elem.parentNode) && elem.nodeType === 1 );
return false;
};
}),
// Miscellaneous
"target": function( elem ) {
var hash = window.location && window.location.hash;
return hash && hash.slice( 1 ) === elem.id;
},
"root": function( elem ) {
return elem === docElem;
},
"focus": function( elem ) {
return elem === document.activeElement && (!document.hasFocus || document.hasFocus()) && !!(elem.type || elem.href || ~elem.tabIndex);
},
// Boolean properties
"enabled": function( elem ) {
return elem.disabled === false;
},
"disabled": function( elem ) {
return elem.disabled === true;
},
"checked": function( elem ) {
// In CSS3, :checked should return both checked and selected elements
// http://www.w3.org/TR/2011/REC-css3-selectors-20110929/#checked
var nodeName = elem.nodeName.toLowerCase();
return (nodeName === "input" && !!elem.checked) || (nodeName === "option" && !!elem.selected);
},
"selected": function( elem ) {
// Accessing this property makes selected-by-default
// options in Safari work properly
if ( elem.parentNode ) {
elem.parentNode.selectedIndex;
}
return elem.selected === true;
},
// Contents
"empty": function( elem ) {
// http://www.w3.org/TR/selectors/#empty-pseudo
// :empty is negated by element (1) or content nodes (text: 3; cdata: 4; entity ref: 5),
// but not by others (comment: 8; processing instruction: 7; etc.)
// nodeType < 6 works because attributes (2) do not appear as children
for ( elem = elem.firstChild; elem; elem = elem.nextSibling ) {
if ( elem.nodeType < 6 ) {
return false;
}
}
return true;
},
"parent": function( elem ) {
return !Expr.pseudos["empty"]( elem );
},
// Element/input types
"header": function( elem ) {
return rheader.test( elem.nodeName );
},
"input": function( elem ) {
return rinputs.test( elem.nodeName );
},
"button": function( elem ) {
var name = elem.nodeName.toLowerCase();
return name === "input" && elem.type === "button" || name === "button";
},
"text": function( elem ) {
var attr;
return elem.nodeName.toLowerCase() === "input" &&
elem.type === "text" &&
// Support: IE<8
// New HTML5 attribute values (e.g., "search") appear with elem.type === "text"
( (attr = elem.getAttribute("type")) == null || attr.toLowerCase() === "text" );
},
// Position-in-collection
"first": createPositionalPseudo(function() {
return [ 0 ];
}),
"last": createPositionalPseudo(function( matchIndexes, length ) {
return [ length - 1 ];
}),
"eq": createPositionalPseudo(function( matchIndexes, length, argument ) {
return [ argument < 0 ? argument + length : argument ];
}),
"even": createPositionalPseudo(function( matchIndexes, length ) {
var i = 0;
for ( ; i < length; i += 2 ) {
matchIndexes.push( i );
}
return matchIndexes;
}),
"odd": createPositionalPseudo(function( matchIndexes, length ) {
var i = 1;
for ( ; i < length; i += 2 ) {
matchIndexes.push( i );
}
return matchIndexes;
}),
"lt": createPositionalPseudo(function( matchIndexes, length, argument ) {
var i = argument < 0 ? argument + length : argument;
for ( ; --i >= 0; ) {
matchIndexes.push( i );
}
return matchIndexes;
}),
"gt": createPositionalPseudo(function( matchIndexes, length, argument ) {
var i = argument < 0 ? argument + length : argument;
for ( ; ++i < length; ) {
matchIndexes.push( i );
}
return matchIndexes;
})
}
};
Expr.pseudos["nth"] = Expr.pseudos["eq"];
// Add button/input type pseudos
for ( i in { radio: true, checkbox: true, file: true, password: true, image: true } ) {
Expr.pseudos[ i ] = createInputPseudo( i );
}
for ( i in { submit: true, reset: true } ) {
Expr.pseudos[ i ] = createButtonPseudo( i );
}
// Easy API for creating new setFilters
function setFilters() {}
setFilters.prototype = Expr.filters = Expr.pseudos;
Expr.setFilters = new setFilters();
function tokenize( selector, parseOnly ) {
var matched, match, tokens, type,
soFar, groups, preFilters,
cached = tokenCache[ selector + " " ];
if ( cached ) {
return parseOnly ? 0 : cached.slice( 0 );
}
soFar = selector;
groups = [];
preFilters = Expr.preFilter;
while ( soFar ) {
// Comma and first run
if ( !matched || (match = rcomma.exec( soFar )) ) {
if ( match ) {
// Don't consume trailing commas as valid
soFar = soFar.slice( match[0].length ) || soFar;
}
groups.push( (tokens = []) );
}
matched = false;
// Combinators
if ( (match = rcombinators.exec( soFar )) ) {
matched = match.shift();
tokens.push({
value: matched,
// Cast descendant combinators to space
type: match[0].replace( rtrim, " " )
});
soFar = soFar.slice( matched.length );
}
// Filters
for ( type in Expr.filter ) {
if ( (match = matchExpr[ type ].exec( soFar )) && (!preFilters[ type ] ||
(match = preFilters[ type ]( match ))) ) {
matched = match.shift();
tokens.push({
value: matched,
type: type,
matches: match
});
soFar = soFar.slice( matched.length );
}
}
if ( !matched ) {
break;
}
}
// Return the length of the invalid excess
// if we're just parsing
// Otherwise, throw an error or return tokens
return parseOnly ?
soFar.length :
soFar ?
Sizzle.error( selector ) :
// Cache the tokens
tokenCache( selector, groups ).slice( 0 );
}
function toSelector( tokens ) {
var i = 0,
len = tokens.length,
selector = "";
for ( ; i < len; i++ ) {
selector += tokens[i].value;
}
return selector;
}
function addCombinator( matcher, combinator, base ) {
var dir = combinator.dir,
checkNonElements = base && dir === "parentNode",
doneName = done++;
return combinator.first ?
// Check against closest ancestor/preceding element
function( elem, context, xml ) {
while ( (elem = elem[ dir ]) ) {
if ( elem.nodeType === 1 || checkNonElements ) {
return matcher( elem, context, xml );
}
}
} :
// Check against all ancestor/preceding elements
function( elem, context, xml ) {
var oldCache, outerCache,
newCache = [ dirruns, doneName ];
// We can't set arbitrary data on XML nodes, so they don't benefit from dir caching
if ( xml ) {
while ( (elem = elem[ dir ]) ) {
if ( elem.nodeType === 1 || checkNonElements ) {
if ( matcher( elem, context, xml ) ) {
return true;
}
}
}
} else {
while ( (elem = elem[ dir ]) ) {
if ( elem.nodeType === 1 || checkNonElements ) {
outerCache = elem[ expando ] || (elem[ expando ] = {});
if ( (oldCache = outerCache[ dir ]) &&
oldCache[ 0 ] === dirruns && oldCache[ 1 ] === doneName ) {
// Assign to newCache so results back-propagate to previous elements
return (newCache[ 2 ] = oldCache[ 2 ]);
} else {
// Reuse newcache so results back-propagate to previous elements
outerCache[ dir ] = newCache;
// A match means we're done; a fail means we have to keep checking
if ( (newCache[ 2 ] = matcher( elem, context, xml )) ) {
return true;
}
}
}
}
}
};
}
function elementMatcher( matchers ) {
return matchers.length > 1 ?
function( elem, context, xml ) {
var i = matchers.length;
while ( i-- ) {
if ( !matchers[i]( elem, context, xml ) ) {
return false;
}
}
return true;
} :
matchers[0];
}
function condense( unmatched, map, filter, context, xml ) {
var elem,
newUnmatched = [],
i = 0,
len = unmatched.length,
mapped = map != null;
for ( ; i < len; i++ ) {
if ( (elem = unmatched[i]) ) {
if ( !filter || filter( elem, context, xml ) ) {
newUnmatched.push( elem );
if ( mapped ) {
map.push( i );
}
}
}
}
return newUnmatched;
}
function setMatcher( preFilter, selector, matcher, postFilter, postFinder, postSelector ) {
if ( postFilter && !postFilter[ expando ] ) {
postFilter = setMatcher( postFilter );
}
if ( postFinder && !postFinder[ expando ] ) {
postFinder = setMatcher( postFinder, postSelector );
}
return markFunction(function( seed, results, context, xml ) {
var temp, i, elem,
preMap = [],
postMap = [],
preexisting = results.length,
// Get initial elements from seed or context
elems = seed || multipleContexts( selector || "*", context.nodeType ? [ context ] : context, [] ),
// Prefilter to get matcher input, preserving a map for seed-results synchronization
matcherIn = preFilter && ( seed || !selector ) ?
condense( elems, preMap, preFilter, context, xml ) :
elems,
matcherOut = matcher ?
// If we have a postFinder, or filtered seed, or non-seed postFilter or preexisting results,
postFinder || ( seed ? preFilter : preexisting || postFilter ) ?
// ...intermediate processing is necessary
[] :
// ...otherwise use results directly
results :
matcherIn;
// Find primary matches
if ( matcher ) {
matcher( matcherIn, matcherOut, context, xml );
}
// Apply postFilter
if ( postFilter ) {
temp = condense( matcherOut, postMap );
postFilter( temp, [], context, xml );
// Un-match failing elements by moving them back to matcherIn
i = temp.length;
while ( i-- ) {
if ( (elem = temp[i]) ) {
matcherOut[ postMap[i] ] = !(matcherIn[ postMap[i] ] = elem);
}
}
}
if ( seed ) {
if ( postFinder || preFilter ) {
if ( postFinder ) {
// Get the final matcherOut by condensing this intermediate into postFinder contexts
temp = [];
i = matcherOut.length;
while ( i-- ) {
if ( (elem = matcherOut[i]) ) {
// Restore matcherIn since elem is not yet a final match
temp.push( (matcherIn[i] = elem) );
}
}
postFinder( null, (matcherOut = []), temp, xml );
}
// Move matched elements from seed to results to keep them synchronized
i = matcherOut.length;
while ( i-- ) {
if ( (elem = matcherOut[i]) &&
(temp = postFinder ? indexOf.call( seed, elem ) : preMap[i]) > -1 ) {
seed[temp] = !(results[temp] = elem);
}
}
}
// Add elements to results, through postFinder if defined
} else {
matcherOut = condense(
matcherOut === results ?
matcherOut.splice( preexisting, matcherOut.length ) :
matcherOut
);
if ( postFinder ) {
postFinder( null, results, matcherOut, xml );
} else {
push.apply( results, matcherOut );
}
}
});
}
function matcherFromTokens( tokens ) {
var checkContext, matcher, j,
len = tokens.length,
leadingRelative = Expr.relative[ tokens[0].type ],
implicitRelative = leadingRelative || Expr.relative[" "],
i = leadingRelative ? 1 : 0,
// The foundational matcher ensures that elements are reachable from top-level context(s)
matchContext = addCombinator( function( elem ) {
return elem === checkContext;
}, implicitRelative, true ),
matchAnyContext = addCombinator( function( elem ) {
return indexOf.call( checkContext, elem ) > -1;
}, implicitRelative, true ),
matchers = [ function( elem, context, xml ) {
return ( !leadingRelative && ( xml || context !== outermostContext ) ) || (
(checkContext = context).nodeType ?
matchContext( elem, context, xml ) :
matchAnyContext( elem, context, xml ) );
} ];
for ( ; i < len; i++ ) {
if ( (matcher = Expr.relative[ tokens[i].type ]) ) {
matchers = [ addCombinator(elementMatcher( matchers ), matcher) ];
} else {
matcher = Expr.filter[ tokens[i].type ].apply( null, tokens[i].matches );
// Return special upon seeing a positional matcher
if ( matcher[ expando ] ) {
// Find the next relative operator (if any) for proper handling
j = ++i;
for ( ; j < len; j++ ) {
if ( Expr.relative[ tokens[j].type ] ) {
break;
}
}
return setMatcher(
i > 1 && elementMatcher( matchers ),
i > 1 && toSelector(
// If the preceding token was a descendant combinator, insert an implicit any-element `*`
tokens.slice( 0, i - 1 ).concat({ value: tokens[ i - 2 ].type === " " ? "*" : "" })
).replace( rtrim, "$1" ),
matcher,
i < j && matcherFromTokens( tokens.slice( i, j ) ),
j < len && matcherFromTokens( (tokens = tokens.slice( j )) ),
j < len && toSelector( tokens )
);
}
matchers.push( matcher );
}
}
return elementMatcher( matchers );
}
function matcherFromGroupMatchers( elementMatchers, setMatchers ) {
var bySet = setMatchers.length > 0,
byElement = elementMatchers.length > 0,
superMatcher = function( seed, context, xml, results, outermost ) {
var elem, j, matcher,
matchedCount = 0,
i = "0",
unmatched = seed && [],
setMatched = [],
contextBackup = outermostContext,
// We must always have either seed elements or outermost context
elems = seed || byElement && Expr.find["TAG"]( "*", outermost ),
// Use integer dirruns iff this is the outermost matcher
dirrunsUnique = (dirruns += contextBackup == null ? 1 : Math.random() || 0.1),
len = elems.length;
if ( outermost ) {
outermostContext = context !== document && context;
}
// Add elements passing elementMatchers directly to results
// Keep `i` a string if there are no elements so `matchedCount` will be "00" below
// Support: IE<9, Safari
// Tolerate NodeList properties (IE: "length"; Safari: <number>) matching elements by id
for ( ; i !== len && (elem = elems[i]) != null; i++ ) {
if ( byElement && elem ) {
j = 0;
while ( (matcher = elementMatchers[j++]) ) {
if ( matcher( elem, context, xml ) ) {
results.push( elem );
break;
}
}
if ( outermost ) {
dirruns = dirrunsUnique;
}
}
// Track unmatched elements for set filters
if ( bySet ) {
// They will have gone through all possible matchers
if ( (elem = !matcher && elem) ) {
matchedCount--;
}
// Lengthen the array for every element, matched or not
if ( seed ) {
unmatched.push( elem );
}
}
}
// Apply set filters to unmatched elements
matchedCount += i;
if ( bySet && i !== matchedCount ) {
j = 0;
while ( (matcher = setMatchers[j++]) ) {
matcher( unmatched, setMatched, context, xml );
}
if ( seed ) {
// Reintegrate element matches to eliminate the need for sorting
if ( matchedCount > 0 ) {
while ( i-- ) {
if ( !(unmatched[i] || setMatched[i]) ) {
setMatched[i] = pop.call( results );
}
}
}
// Discard index placeholder values to get only actual matches
setMatched = condense( setMatched );
}
// Add matches to results
push.apply( results, setMatched );
// Seedless set matches succeeding multiple successful matchers stipulate sorting
if ( outermost && !seed && setMatched.length > 0 &&
( matchedCount + setMatchers.length ) > 1 ) {
Sizzle.uniqueSort( results );
}
}
// Override manipulation of globals by nested matchers
if ( outermost ) {
dirruns = dirrunsUnique;
outermostContext = contextBackup;
}
return unmatched;
};
return bySet ?
markFunction( superMatcher ) :
superMatcher;
}
compile = Sizzle.compile = function( selector, group /* Internal Use Only */ ) {
var i,
setMatchers = [],
elementMatchers = [],
cached = compilerCache[ selector + " " ];
if ( !cached ) {
// Generate a function of recursive functions that can be used to check each element
if ( !group ) {
group = tokenize( selector );
}
i = group.length;
while ( i-- ) {
cached = matcherFromTokens( group[i] );
if ( cached[ expando ] ) {
setMatchers.push( cached );
} else {
elementMatchers.push( cached );
}
}
// Cache the compiled function
cached = compilerCache( selector, matcherFromGroupMatchers( elementMatchers, setMatchers ) );
}
return cached;
};
function multipleContexts( selector, contexts, results ) {
var i = 0,
len = contexts.length;
for ( ; i < len; i++ ) {
Sizzle( selector, contexts[i], results );
}
return results;
}
function select( selector, context, results, seed ) {
var i, tokens, token, type, find,
match = tokenize( selector );
if ( !seed ) {
// Try to minimize operations if there is only one group
if ( match.length === 1 ) {
// Take a shortcut and set the context if the root selector is an ID
tokens = match[0] = match[0].slice( 0 );
if ( tokens.length > 2 && (token = tokens[0]).type === "ID" &&
support.getById && context.nodeType === 9 && documentIsHTML &&
Expr.relative[ tokens[1].type ] ) {
context = ( Expr.find["ID"]( token.matches[0].replace(runescape, funescape), context ) || [] )[0];
if ( !context ) {
return results;
}
selector = selector.slice( tokens.shift().value.length );
}
// Fetch a seed set for right-to-left matching
i = matchExpr["needsContext"].test( selector ) ? 0 : tokens.length;
while ( i-- ) {
token = tokens[i];
// Abort if we hit a combinator
if ( Expr.relative[ (type = token.type) ] ) {
break;
}
if ( (find = Expr.find[ type ]) ) {
// Search, expanding context for leading sibling combinators
if ( (seed = find(
token.matches[0].replace( runescape, funescape ),
rsibling.test( tokens[0].type ) && testContext( context.parentNode ) || context
)) ) {
// If seed is empty or no tokens remain, we can return early
tokens.splice( i, 1 );
selector = seed.length && toSelector( tokens );
if ( !selector ) {
push.apply( results, seed );
return results;
}
break;
}
}
}
}
}
// Compile and execute a filtering function
// Provide `match` to avoid retokenization if we modified the selector above
compile( selector, match )(
seed,
context,
!documentIsHTML,
results,
rsibling.test( selector ) && testContext( context.parentNode ) || context
);
return results;
}
// One-time assignments
// Sort stability
support.sortStable = expando.split("").sort( sortOrder ).join("") === expando;
// Support: Chrome<14
// Always assume duplicates if they aren't passed to the comparison function
support.detectDuplicates = !!hasDuplicate;
// Initialize against the default document
setDocument();
// Support: Webkit<537.32 - Safari 6.0.3/Chrome 25 (fixed in Chrome 27)
// Detached nodes confoundingly follow *each other*
support.sortDetached = assert(function( div1 ) {
// Should return 1, but returns 4 (following)
return div1.compareDocumentPosition( document.createElement("div") ) & 1;
});
// Support: IE<8
// Prevent attribute/property "interpolation"
// http://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx
if ( !assert(function( div ) {
div.innerHTML = "<a href='#'></a>";
return div.firstChild.getAttribute("href") === "#" ;
}) ) {
addHandle( "type|href|height|width", function( elem, name, isXML ) {
if ( !isXML ) {
return elem.getAttribute( name, name.toLowerCase() === "type" ? 1 : 2 );
}
});
}
// Support: IE<9
// Use defaultValue in place of getAttribute("value")
if ( !support.attributes || !assert(function( div ) {
div.innerHTML = "<input/>";
div.firstChild.setAttribute( "value", "" );
return div.firstChild.getAttribute( "value" ) === "";
}) ) {
addHandle( "value", function( elem, name, isXML ) {
if ( !isXML && elem.nodeName.toLowerCase() === "input" ) {
return elem.defaultValue;
}
});
}
// Support: IE<9
// Use getAttributeNode to fetch booleans when getAttribute lies
if ( !assert(function( div ) {
return div.getAttribute("disabled") == null;
}) ) {
addHandle( booleans, function( elem, name, isXML ) {
var val;
if ( !isXML ) {
return elem[ name ] === true ? name.toLowerCase() :
(val = elem.getAttributeNode( name )) && val.specified ?
val.value :
null;
}
});
}
return Sizzle;
})( window );
jQuery.find = Sizzle;
jQuery.expr = Sizzle.selectors;
jQuery.expr[":"] = jQuery.expr.pseudos;
jQuery.unique = Sizzle.uniqueSort;
jQuery.text = Sizzle.getText;
jQuery.isXMLDoc = Sizzle.isXML;
jQuery.contains = Sizzle.contains;
var rneedsContext = jQuery.expr.match.needsContext;
var rsingleTag = (/^<(\w+)\s*\/?>(?:<\/\1>|)$/);
var risSimple = /^.[^:#\[\.,]*$/;
// Implement the identical functionality for filter and not
function winnow( elements, qualifier, not ) {
if ( jQuery.isFunction( qualifier ) ) {
return jQuery.grep( elements, function( elem, i ) {
/* jshint -W018 */
return !!qualifier.call( elem, i, elem ) !== not;
});
}
if ( qualifier.nodeType ) {
return jQuery.grep( elements, function( elem ) {
return ( elem === qualifier ) !== not;
});
}
if ( typeof qualifier === "string" ) {
if ( risSimple.test( qualifier ) ) {
return jQuery.filter( qualifier, elements, not );
}
qualifier = jQuery.filter( qualifier, elements );
}
return jQuery.grep( elements, function( elem ) {
return ( jQuery.inArray( elem, qualifier ) >= 0 ) !== not;
});
}
jQuery.filter = function( expr, elems, not ) {
var elem = elems[ 0 ];
if ( not ) {
expr = ":not(" + expr + ")";
}
return elems.length === 1 && elem.nodeType === 1 ?
jQuery.find.matchesSelector( elem, expr ) ? [ elem ] : [] :
jQuery.find.matches( expr, jQuery.grep( elems, function( elem ) {
return elem.nodeType === 1;
}));
};
jQuery.fn.extend({
find: function( selector ) {
var i,
ret = [],
self = this,
len = self.length;
if ( typeof selector !== "string" ) {
return this.pushStack( jQuery( selector ).filter(function() {
for ( i = 0; i < len; i++ ) {
if ( jQuery.contains( self[ i ], this ) ) {
return true;
}
}
}) );
}
for ( i = 0; i < len; i++ ) {
jQuery.find( selector, self[ i ], ret );
}
// Needed because $( selector, context ) becomes $( context ).find( selector )
ret = this.pushStack( len > 1 ? jQuery.unique( ret ) : ret );
ret.selector = this.selector ? this.selector + " " + selector : selector;
return ret;
},
filter: function( selector ) {
return this.pushStack( winnow(this, selector || [], false) );
},
not: function( selector ) {
return this.pushStack( winnow(this, selector || [], true) );
},
is: function( selector ) {
return !!winnow(
this,
// If this is a positional/relative selector, check membership in the returned set
// so $("p:first").is("p:last") won't return true for a doc with two "p".
typeof selector === "string" && rneedsContext.test( selector ) ?
jQuery( selector ) :
selector || [],
false
).length;
}
});
// Initialize a jQuery object
// A central reference to the root jQuery(document)
var rootjQuery,
// Use the correct document accordingly with window argument (sandbox)
document = window.document,
// A simple way to check for HTML strings
// Prioritize #id over <tag> to avoid XSS via location.hash (#9521)
// Strict HTML recognition (#11290: must start with <)
rquickExpr = /^(?:\s*(<[\w\W]+>)[^>]*|#([\w-]*))$/,
init = jQuery.fn.init = function( selector, context ) {
var match, elem;
// HANDLE: $(""), $(null), $(undefined), $(false)
if ( !selector ) {
return this;
}
// Handle HTML strings
if ( typeof selector === "string" ) {
if ( selector.charAt(0) === "<" && selector.charAt( selector.length - 1 ) === ">" && selector.length >= 3 ) {
// Assume that strings that start and end with <> are HTML and skip the regex check
match = [ null, selector, null ];
} else {
match = rquickExpr.exec( selector );
}
// Match html or make sure no context is specified for #id
if ( match && (match[1] || !context) ) {
// HANDLE: $(html) -> $(array)
if ( match[1] ) {
context = context instanceof jQuery ? context[0] : context;
// scripts is true for back-compat
// Intentionally let the error be thrown if parseHTML is not present
jQuery.merge( this, jQuery.parseHTML(
match[1],
context && context.nodeType ? context.ownerDocument || context : document,
true
) );
// HANDLE: $(html, props)
if ( rsingleTag.test( match[1] ) && jQuery.isPlainObject( context ) ) {
for ( match in context ) {
// Properties of context are called as methods if possible
if ( jQuery.isFunction( this[ match ] ) ) {
this[ match ]( context[ match ] );
// ...and otherwise set as attributes
} else {
this.attr( match, context[ match ] );
}
}
}
return this;
// HANDLE: $(#id)
} else {
elem = document.getElementById( match[2] );
// Check parentNode to catch when Blackberry 4.6 returns
// nodes that are no longer in the document #6963
if ( elem && elem.parentNode ) {
// Handle the case where IE and Opera return items
// by name instead of ID
if ( elem.id !== match[2] ) {
return rootjQuery.find( selector );
}
// Otherwise, we inject the element directly into the jQuery object
this.length = 1;
this[0] = elem;
}
this.context = document;
this.selector = selector;
return this;
}
// HANDLE: $(expr, $(...))
} else if ( !context || context.jquery ) {
return ( context || rootjQuery ).find( selector );
// HANDLE: $(expr, context)
// (which is just equivalent to: $(context).find(expr)
} else {
return this.constructor( context ).find( selector );
}
// HANDLE: $(DOMElement)
} else if ( selector.nodeType ) {
this.context = this[0] = selector;
this.length = 1;
return this;
// HANDLE: $(function)
// Shortcut for document ready
} else if ( jQuery.isFunction( selector ) ) {
return typeof rootjQuery.ready !== "undefined" ?
rootjQuery.ready( selector ) :
// Execute immediately if ready is not present
selector( jQuery );
}
if ( selector.selector !== undefined ) {
this.selector = selector.selector;
this.context = selector.context;
}
return jQuery.makeArray( selector, this );
};
// Give the init function the jQuery prototype for later instantiation
init.prototype = jQuery.fn;
// Initialize central reference
rootjQuery = jQuery( document );
var rparentsprev = /^(?:parents|prev(?:Until|All))/,
// methods guaranteed to produce a unique set when starting from a unique set
guaranteedUnique = {
children: true,
contents: true,
next: true,
prev: true
};
jQuery.extend({
dir: function( elem, dir, until ) {
var matched = [],
cur = elem[ dir ];
while ( cur && cur.nodeType !== 9 && (until === undefined || cur.nodeType !== 1 || !jQuery( cur ).is( until )) ) {
if ( cur.nodeType === 1 ) {
matched.push( cur );
}
cur = cur[dir];
}
return matched;
},
sibling: function( n, elem ) {
var r = [];
for ( ; n; n = n.nextSibling ) {
if ( n.nodeType === 1 && n !== elem ) {
r.push( n );
}
}
return r;
}
});
jQuery.fn.extend({
has: function( target ) {
var i,
targets = jQuery( target, this ),
len = targets.length;
return this.filter(function() {
for ( i = 0; i < len; i++ ) {
if ( jQuery.contains( this, targets[i] ) ) {
return true;
}
}
});
},
closest: function( selectors, context ) {
var cur,
i = 0,
l = this.length,
matched = [],
pos = rneedsContext.test( selectors ) || typeof selectors !== "string" ?
jQuery( selectors, context || this.context ) :
0;
for ( ; i < l; i++ ) {
for ( cur = this[i]; cur && cur !== context; cur = cur.parentNode ) {
// Always skip document fragments
if ( cur.nodeType < 11 && (pos ?
pos.index(cur) > -1 :
// Don't pass non-elements to Sizzle
cur.nodeType === 1 &&
jQuery.find.matchesSelector(cur, selectors)) ) {
matched.push( cur );
break;
}
}
}
return this.pushStack( matched.length > 1 ? jQuery.unique( matched ) : matched );
},
// Determine the position of an element within
// the matched set of elements
index: function( elem ) {
// No argument, return index in parent
if ( !elem ) {
return ( this[0] && this[0].parentNode ) ? this.first().prevAll().length : -1;
}
// index in selector
if ( typeof elem === "string" ) {
return jQuery.inArray( this[0], jQuery( elem ) );
}
// Locate the position of the desired element
return jQuery.inArray(
// If it receives a jQuery object, the first element is used
elem.jquery ? elem[0] : elem, this );
},
add: function( selector, context ) {
return this.pushStack(
jQuery.unique(
jQuery.merge( this.get(), jQuery( selector, context ) )
)
);
},
addBack: function( selector ) {
return this.add( selector == null ?
this.prevObject : this.prevObject.filter(selector)
);
}
});
function sibling( cur, dir ) {
do {
cur = cur[ dir ];
} while ( cur && cur.nodeType !== 1 );
return cur;
}
jQuery.each({
parent: function( elem ) {
var parent = elem.parentNode;
return parent && parent.nodeType !== 11 ? parent : null;
},
parents: function( elem ) {
return jQuery.dir( elem, "parentNode" );
},
parentsUntil: function( elem, i, until ) {
return jQuery.dir( elem, "parentNode", until );
},
next: function( elem ) {
return sibling( elem, "nextSibling" );
},
prev: function( elem ) {
return sibling( elem, "previousSibling" );
},
nextAll: function( elem ) {
return jQuery.dir( elem, "nextSibling" );
},
prevAll: function( elem ) {
return jQuery.dir( elem, "previousSibling" );
},
nextUntil: function( elem, i, until ) {
return jQuery.dir( elem, "nextSibling", until );
},
prevUntil: function( elem, i, until ) {
return jQuery.dir( elem, "previousSibling", until );
},
siblings: function( elem ) {
return jQuery.sibling( ( elem.parentNode || {} ).firstChild, elem );
},
children: function( elem ) {
return jQuery.sibling( elem.firstChild );
},
contents: function( elem ) {
return jQuery.nodeName( elem, "iframe" ) ?
elem.contentDocument || elem.contentWindow.document :
jQuery.merge( [], elem.childNodes );
}
}, function( name, fn ) {
jQuery.fn[ name ] = function( until, selector ) {
var ret = jQuery.map( this, fn, until );
if ( name.slice( -5 ) !== "Until" ) {
selector = until;
}
if ( selector && typeof selector === "string" ) {
ret = jQuery.filter( selector, ret );
}
if ( this.length > 1 ) {
// Remove duplicates
if ( !guaranteedUnique[ name ] ) {
ret = jQuery.unique( ret );
}
// Reverse order for parents* and prev-derivatives
if ( rparentsprev.test( name ) ) {
ret = ret.reverse();
}
}
return this.pushStack( ret );
};
});
var rnotwhite = (/\S+/g);
// String to Object options format cache
var optionsCache = {};
// Convert String-formatted options into Object-formatted ones and store in cache
function createOptions( options ) {
var object = optionsCache[ options ] = {};
jQuery.each( options.match( rnotwhite ) || [], function( _, flag ) {
object[ flag ] = true;
});
return object;
}
/*
* Create a callback list using the following parameters:
*
* options: an optional list of space-separated options that will change how
* the callback list behaves or a more traditional option object
*
* By default a callback list will act like an event callback list and can be
* "fired" multiple times.
*
* Possible options:
*
* once: will ensure the callback list can only be fired once (like a Deferred)
*
* memory: will keep track of previous values and will call any callback added
* after the list has been fired right away with the latest "memorized"
* values (like a Deferred)
*
* unique: will ensure a callback can only be added once (no duplicate in the list)
*
* stopOnFalse: interrupt callings when a callback returns false
*
*/
jQuery.Callbacks = function( options ) {
// Convert options from String-formatted to Object-formatted if needed
// (we check in cache first)
options = typeof options === "string" ?
( optionsCache[ options ] || createOptions( options ) ) :
jQuery.extend( {}, options );
var // Flag to know if list is currently firing
firing,
// Last fire value (for non-forgettable lists)
memory,
// Flag to know if list was already fired
fired,
// End of the loop when firing
firingLength,
// Index of currently firing callback (modified by remove if needed)
firingIndex,
// First callback to fire (used internally by add and fireWith)
firingStart,
// Actual callback list
list = [],
// Stack of fire calls for repeatable lists
stack = !options.once && [],
// Fire callbacks
fire = function( data ) {
memory = options.memory && data;
fired = true;
firingIndex = firingStart || 0;
firingStart = 0;
firingLength = list.length;
firing = true;
for ( ; list && firingIndex < firingLength; firingIndex++ ) {
if ( list[ firingIndex ].apply( data[ 0 ], data[ 1 ] ) === false && options.stopOnFalse ) {
memory = false; // To prevent further calls using add
break;
}
}
firing = false;
if ( list ) {
if ( stack ) {
if ( stack.length ) {
fire( stack.shift() );
}
} else if ( memory ) {
list = [];
} else {
self.disable();
}
}
},
// Actual Callbacks object
self = {
// Add a callback or a collection of callbacks to the list
add: function() {
if ( list ) {
// First, we save the current length
var start = list.length;
(function add( args ) {
jQuery.each( args, function( _, arg ) {
var type = jQuery.type( arg );
if ( type === "function" ) {
if ( !options.unique || !self.has( arg ) ) {
list.push( arg );
}
} else if ( arg && arg.length && type !== "string" ) {
// Inspect recursively
add( arg );
}
});
})( arguments );
// Do we need to add the callbacks to the
// current firing batch?
if ( firing ) {
firingLength = list.length;
// With memory, if we're not firing then
// we should call right away
} else if ( memory ) {
firingStart = start;
fire( memory );
}
}
return this;
},
// Remove a callback from the list
remove: function() {
if ( list ) {
jQuery.each( arguments, function( _, arg ) {
var index;
while ( ( index = jQuery.inArray( arg, list, index ) ) > -1 ) {
list.splice( index, 1 );
// Handle firing indexes
if ( firing ) {
if ( index <= firingLength ) {
firingLength--;
}
if ( index <= firingIndex ) {
firingIndex--;
}
}
}
});
}
return this;
},
// Check if a given callback is in the list.
// If no argument is given, return whether or not list has callbacks attached.
has: function( fn ) {
return fn ? jQuery.inArray( fn, list ) > -1 : !!( list && list.length );
},
// Remove all callbacks from the list
empty: function() {
list = [];
firingLength = 0;
return this;
},
// Have the list do nothing anymore
disable: function() {
list = stack = memory = undefined;
return this;
},
// Is it disabled?
disabled: function() {
return !list;
},
// Lock the list in its current state
lock: function() {
stack = undefined;
if ( !memory ) {
self.disable();
}
return this;
},
// Is it locked?
locked: function() {
return !stack;
},
// Call all callbacks with the given context and arguments
fireWith: function( context, args ) {
if ( list && ( !fired || stack ) ) {
args = args || [];
args = [ context, args.slice ? args.slice() : args ];
if ( firing ) {
stack.push( args );
} else {
fire( args );
}
}
return this;
},
// Call all the callbacks with the given arguments
fire: function() {
self.fireWith( this, arguments );
return this;
},
// To know if the callbacks have already been called at least once
fired: function() {
return !!fired;
}
};
return self;
};
jQuery.extend({
Deferred: function( func ) {
var tuples = [
// action, add listener, listener list, final state
[ "resolve", "done", jQuery.Callbacks("once memory"), "resolved" ],
[ "reject", "fail", jQuery.Callbacks("once memory"), "rejected" ],
[ "notify", "progress", jQuery.Callbacks("memory") ]
],
state = "pending",
promise = {
state: function() {
return state;
},
always: function() {
deferred.done( arguments ).fail( arguments );
return this;
},
then: function( /* fnDone, fnFail, fnProgress */ ) {
var fns = arguments;
return jQuery.Deferred(function( newDefer ) {
jQuery.each( tuples, function( i, tuple ) {
var fn = jQuery.isFunction( fns[ i ] ) && fns[ i ];
// deferred[ done | fail | progress ] for forwarding actions to newDefer
deferred[ tuple[1] ](function() {
var returned = fn && fn.apply( this, arguments );
if ( returned && jQuery.isFunction( returned.promise ) ) {
returned.promise()
.done( newDefer.resolve )
.fail( newDefer.reject )
.progress( newDefer.notify );
} else {
newDefer[ tuple[ 0 ] + "With" ]( this === promise ? newDefer.promise() : this, fn ? [ returned ] : arguments );
}
});
});
fns = null;
}).promise();
},
// Get a promise for this deferred
// If obj is provided, the promise aspect is added to the object
promise: function( obj ) {
return obj != null ? jQuery.extend( obj, promise ) : promise;
}
},
deferred = {};
// Keep pipe for back-compat
promise.pipe = promise.then;
// Add list-specific methods
jQuery.each( tuples, function( i, tuple ) {
var list = tuple[ 2 ],
stateString = tuple[ 3 ];
// promise[ done | fail | progress ] = list.add
promise[ tuple[1] ] = list.add;
// Handle state
if ( stateString ) {
list.add(function() {
// state = [ resolved | rejected ]
state = stateString;
// [ reject_list | resolve_list ].disable; progress_list.lock
}, tuples[ i ^ 1 ][ 2 ].disable, tuples[ 2 ][ 2 ].lock );
}
// deferred[ resolve | reject | notify ]
deferred[ tuple[0] ] = function() {
deferred[ tuple[0] + "With" ]( this === deferred ? promise : this, arguments );
return this;
};
deferred[ tuple[0] + "With" ] = list.fireWith;
});
// Make the deferred a promise
promise.promise( deferred );
// Call given func if any
if ( func ) {
func.call( deferred, deferred );
}
// All done!
return deferred;
},
// Deferred helper
when: function( subordinate /* , ..., subordinateN */ ) {
var i = 0,
resolveValues = slice.call( arguments ),
length = resolveValues.length,
// the count of uncompleted subordinates
remaining = length !== 1 || ( subordinate && jQuery.isFunction( subordinate.promise ) ) ? length : 0,
// the master Deferred. If resolveValues consist of only a single Deferred, just use that.
deferred = remaining === 1 ? subordinate : jQuery.Deferred(),
// Update function for both resolve and progress values
updateFunc = function( i, contexts, values ) {
return function( value ) {
contexts[ i ] = this;
values[ i ] = arguments.length > 1 ? slice.call( arguments ) : value;
if ( values === progressValues ) {
deferred.notifyWith( contexts, values );
} else if ( !(--remaining) ) {
deferred.resolveWith( contexts, values );
}
};
},
progressValues, progressContexts, resolveContexts;
// add listeners to Deferred subordinates; treat others as resolved
if ( length > 1 ) {
progressValues = new Array( length );
progressContexts = new Array( length );
resolveContexts = new Array( length );
for ( ; i < length; i++ ) {
if ( resolveValues[ i ] && jQuery.isFunction( resolveValues[ i ].promise ) ) {
resolveValues[ i ].promise()
.done( updateFunc( i, resolveContexts, resolveValues ) )
.fail( deferred.reject )
.progress( updateFunc( i, progressContexts, progressValues ) );
} else {
--remaining;
}
}
}
// if we're not waiting on anything, resolve the master
if ( !remaining ) {
deferred.resolveWith( resolveContexts, resolveValues );
}
return deferred.promise();
}
});
// The deferred used on DOM ready
var readyList;
jQuery.fn.ready = function( fn ) {
// Add the callback
jQuery.ready.promise().done( fn );
return this;
};
jQuery.extend({
// Is the DOM ready to be used? Set to true once it occurs.
isReady: false,
// A counter to track how many items to wait for before
// the ready event fires. See #6781
readyWait: 1,
// Hold (or release) the ready event
holdReady: function( hold ) {
if ( hold ) {
jQuery.readyWait++;
} else {
jQuery.ready( true );
}
},
// Handle when the DOM is ready
ready: function( wait ) {
// Abort if there are pending holds or we're already ready
if ( wait === true ? --jQuery.readyWait : jQuery.isReady ) {
return;
}
// Make sure body exists, at least, in case IE gets a little overzealous (ticket #5443).
if ( !document.body ) {
return setTimeout( jQuery.ready );
}
// Remember that the DOM is ready
jQuery.isReady = true;
// If a normal DOM Ready event fired, decrement, and wait if need be
if ( wait !== true && --jQuery.readyWait > 0 ) {
return;
}
// If there are functions bound, to execute
readyList.resolveWith( document, [ jQuery ] );
// Trigger any bound ready events
if ( jQuery.fn.trigger ) {
jQuery( document ).trigger("ready").off("ready");
}
}
});
/**
* Clean-up method for dom ready events
*/
function detach() {
if ( document.addEventListener ) {
document.removeEventListener( "DOMContentLoaded", completed, false );
window.removeEventListener( "load", completed, false );
} else {
document.detachEvent( "onreadystatechange", completed );
window.detachEvent( "onload", completed );
}
}
/**
* The ready event handler and self cleanup method
*/
function completed() {
// readyState === "complete" is good enough for us to call the dom ready in oldIE
if ( document.addEventListener || event.type === "load" || document.readyState === "complete" ) {
detach();
jQuery.ready();
}
}
jQuery.ready.promise = function( obj ) {
if ( !readyList ) {
readyList = jQuery.Deferred();
// Catch cases where $(document).ready() is called after the browser event has already occurred.
// we once tried to use readyState "interactive" here, but it caused issues like the one
// discovered by ChrisS here: http://bugs.jquery.com/ticket/12282#comment:15
if ( document.readyState === "complete" ) {
// Handle it asynchronously to allow scripts the opportunity to delay ready
setTimeout( jQuery.ready );
// Standards-based browsers support DOMContentLoaded
} else if ( document.addEventListener ) {
// Use the handy event callback
document.addEventListener( "DOMContentLoaded", completed, false );
// A fallback to window.onload, that will always work
window.addEventListener( "load", completed, false );
// If IE event model is used
} else {
// Ensure firing before onload, maybe late but safe also for iframes
document.attachEvent( "onreadystatechange", completed );
// A fallback to window.onload, that will always work
window.attachEvent( "onload", completed );
// If IE and not a frame
// continually check to see if the document is ready
var top = false;
try {
top = window.frameElement == null && document.documentElement;
} catch(e) {}
if ( top && top.doScroll ) {
(function doScrollCheck() {
if ( !jQuery.isReady ) {
try {
// Use the trick by Diego Perini
// http://javascript.nwbox.com/IEContentLoaded/
top.doScroll("left");
} catch(e) {
return setTimeout( doScrollCheck, 50 );
}
// detach all dom ready events
detach();
// and execute any waiting functions
jQuery.ready();
}
})();
}
}
}
return readyList.promise( obj );
};
var strundefined = typeof undefined;
// Support: IE<9
// Iteration over object's inherited properties before its own
var i;
for ( i in jQuery( support ) ) {
break;
}
support.ownLast = i !== "0";
// Note: most support tests are defined in their respective modules.
// false until the test is run
support.inlineBlockNeedsLayout = false;
jQuery(function() {
// We need to execute this one support test ASAP because we need to know
// if body.style.zoom needs to be set.
var container, div,
body = document.getElementsByTagName("body")[0];
if ( !body ) {
// Return for frameset docs that don't have a body
return;
}
// Setup
container = document.createElement( "div" );
container.style.cssText = "border:0;width:0;height:0;position:absolute;top:0;left:-9999px;margin-top:1px";
div = document.createElement( "div" );
body.appendChild( container ).appendChild( div );
if ( typeof div.style.zoom !== strundefined ) {
// Support: IE<8
// Check if natively block-level elements act like inline-block
// elements when setting their display to 'inline' and giving
// them layout
div.style.cssText = "border:0;margin:0;width:1px;padding:1px;display:inline;zoom:1";
if ( (support.inlineBlockNeedsLayout = ( div.offsetWidth === 3 )) ) {
// Prevent IE 6 from affecting layout for positioned elements #11048
// Prevent IE from shrinking the body in IE 7 mode #12869
// Support: IE<8
body.style.zoom = 1;
}
}
body.removeChild( container );
// Null elements to avoid leaks in IE
container = div = null;
});
(function() {
var div = document.createElement( "div" );
// Execute the test only if not already executed in another module.
if (support.deleteExpando == null) {
// Support: IE<9
support.deleteExpando = true;
try {
delete div.test;
} catch( e ) {
support.deleteExpando = false;
}
}
// Null elements to avoid leaks in IE.
div = null;
})();
/**
* Determines whether an object can have data
*/
jQuery.acceptData = function( elem ) {
var noData = jQuery.noData[ (elem.nodeName + " ").toLowerCase() ],
nodeType = +elem.nodeType || 1;
// Do not set data on non-element DOM nodes because it will not be cleared (#8335).
return nodeType !== 1 && nodeType !== 9 ?
false :
// Nodes accept data unless otherwise specified; rejection can be conditional
!noData || noData !== true && elem.getAttribute("classid") === noData;
};
var rbrace = /^(?:\{[\w\W]*\}|\[[\w\W]*\])$/,
rmultiDash = /([A-Z])/g;
function dataAttr( elem, key, data ) {
// If nothing was found internally, try to fetch any
// data from the HTML5 data-* attribute
if ( data === undefined && elem.nodeType === 1 ) {
var name = "data-" + key.replace( rmultiDash, "-$1" ).toLowerCase();
data = elem.getAttribute( name );
if ( typeof data === "string" ) {
try {
data = data === "true" ? true :
data === "false" ? false :
data === "null" ? null :
// Only convert to a number if it doesn't change the string
+data + "" === data ? +data :
rbrace.test( data ) ? jQuery.parseJSON( data ) :
data;
} catch( e ) {}
// Make sure we set the data so it isn't changed later
jQuery.data( elem, key, data );
} else {
data = undefined;
}
}
return data;
}
// checks a cache object for emptiness
function isEmptyDataObject( obj ) {
var name;
for ( name in obj ) {
// if the public data object is empty, the private is still empty
if ( name === "data" && jQuery.isEmptyObject( obj[name] ) ) {
continue;
}
if ( name !== "toJSON" ) {
return false;
}
}
return true;
}
function internalData( elem, name, data, pvt /* Internal Use Only */ ) {
if ( !jQuery.acceptData( elem ) ) {
return;
}
var ret, thisCache,
internalKey = jQuery.expando,
// We have to handle DOM nodes and JS objects differently because IE6-7
// can't GC object references properly across the DOM-JS boundary
isNode = elem.nodeType,
// Only DOM nodes need the global jQuery cache; JS object data is
// attached directly to the object so GC can occur automatically
cache = isNode ? jQuery.cache : elem,
// Only defining an ID for JS objects if its cache already exists allows
// the code to shortcut on the same path as a DOM node with no cache
id = isNode ? elem[ internalKey ] : elem[ internalKey ] && internalKey;
// Avoid doing any more work than we need to when trying to get data on an
// object that has no data at all
if ( (!id || !cache[id] || (!pvt && !cache[id].data)) && data === undefined && typeof name === "string" ) {
return;
}
if ( !id ) {
// Only DOM nodes need a new unique ID for each element since their data
// ends up in the global cache
if ( isNode ) {
id = elem[ internalKey ] = deletedIds.pop() || jQuery.guid++;
} else {
id = internalKey;
}
}
if ( !cache[ id ] ) {
// Avoid exposing jQuery metadata on plain JS objects when the object
// is serialized using JSON.stringify
cache[ id ] = isNode ? {} : { toJSON: jQuery.noop };
}
// An object can be passed to jQuery.data instead of a key/value pair; this gets
// shallow copied over onto the existing cache
if ( typeof name === "object" || typeof name === "function" ) {
if ( pvt ) {
cache[ id ] = jQuery.extend( cache[ id ], name );
} else {
cache[ id ].data = jQuery.extend( cache[ id ].data, name );
}
}
thisCache = cache[ id ];
// jQuery data() is stored in a separate object inside the object's internal data
// cache in order to avoid key collisions between internal data and user-defined
// data.
if ( !pvt ) {
if ( !thisCache.data ) {
thisCache.data = {};
}
thisCache = thisCache.data;
}
if ( data !== undefined ) {
thisCache[ jQuery.camelCase( name ) ] = data;
}
// Check for both converted-to-camel and non-converted data property names
// If a data property was specified
if ( typeof name === "string" ) {
// First Try to find as-is property data
ret = thisCache[ name ];
// Test for null|undefined property data
if ( ret == null ) {
// Try to find the camelCased property
ret = thisCache[ jQuery.camelCase( name ) ];
}
} else {
ret = thisCache;
}
return ret;
}
function internalRemoveData( elem, name, pvt ) {
if ( !jQuery.acceptData( elem ) ) {
return;
}
var thisCache, i,
isNode = elem.nodeType,
// See jQuery.data for more information
cache = isNode ? jQuery.cache : elem,
id = isNode ? elem[ jQuery.expando ] : jQuery.expando;
// If there is already no cache entry for this object, there is no
// purpose in continuing
if ( !cache[ id ] ) {
return;
}
if ( name ) {
thisCache = pvt ? cache[ id ] : cache[ id ].data;
if ( thisCache ) {
// Support array or space separated string names for data keys
if ( !jQuery.isArray( name ) ) {
// try the string as a key before any manipulation
if ( name in thisCache ) {
name = [ name ];
} else {
// split the camel cased version by spaces unless a key with the spaces exists
name = jQuery.camelCase( name );
if ( name in thisCache ) {
name = [ name ];
} else {
name = name.split(" ");
}
}
} else {
// If "name" is an array of keys...
// When data is initially created, via ("key", "val") signature,
// keys will be converted to camelCase.
// Since there is no way to tell _how_ a key was added, remove
// both plain key and camelCase key. #12786
// This will only penalize the array argument path.
name = name.concat( jQuery.map( name, jQuery.camelCase ) );
}
i = name.length;
while ( i-- ) {
delete thisCache[ name[i] ];
}
// If there is no data left in the cache, we want to continue
// and let the cache object itself get destroyed
if ( pvt ? !isEmptyDataObject(thisCache) : !jQuery.isEmptyObject(thisCache) ) {
return;
}
}
}
// See jQuery.data for more information
if ( !pvt ) {
delete cache[ id ].data;
// Don't destroy the parent cache unless the internal data object
// had been the only thing left in it
if ( !isEmptyDataObject( cache[ id ] ) ) {
return;
}
}
// Destroy the cache
if ( isNode ) {
jQuery.cleanData( [ elem ], true );
// Use delete when supported for expandos or `cache` is not a window per isWindow (#10080)
/* jshint eqeqeq: false */
} else if ( support.deleteExpando || cache != cache.window ) {
/* jshint eqeqeq: true */
delete cache[ id ];
// When all else fails, null
} else {
cache[ id ] = null;
}
}
jQuery.extend({
cache: {},
// The following elements (space-suffixed to avoid Object.prototype collisions)
// throw uncatchable exceptions if you attempt to set expando properties
noData: {
"applet ": true,
"embed ": true,
// ...but Flash objects (which have this classid) *can* handle expandos
"object ": "clsid:D27CDB6E-AE6D-11cf-96B8-444553540000"
},
hasData: function( elem ) {
elem = elem.nodeType ? jQuery.cache[ elem[jQuery.expando] ] : elem[ jQuery.expando ];
return !!elem && !isEmptyDataObject( elem );
},
data: function( elem, name, data ) {
return internalData( elem, name, data );
},
removeData: function( elem, name ) {
return internalRemoveData( elem, name );
},
// For internal use only.
_data: function( elem, name, data ) {
return internalData( elem, name, data, true );
},
_removeData: function( elem, name ) {
return internalRemoveData( elem, name, true );
}
});
jQuery.fn.extend({
data: function( key, value ) {
var i, name, data,
elem = this[0],
attrs = elem && elem.attributes;
// Special expections of .data basically thwart jQuery.access,
// so implement the relevant behavior ourselves
// Gets all values
if ( key === undefined ) {
if ( this.length ) {
data = jQuery.data( elem );
if ( elem.nodeType === 1 && !jQuery._data( elem, "parsedAttrs" ) ) {
i = attrs.length;
while ( i-- ) {
name = attrs[i].name;
if ( name.indexOf("data-") === 0 ) {
name = jQuery.camelCase( name.slice(5) );
dataAttr( elem, name, data[ name ] );
}
}
jQuery._data( elem, "parsedAttrs", true );
}
}
return data;
}
// Sets multiple values
if ( typeof key === "object" ) {
return this.each(function() {
jQuery.data( this, key );
});
}
return arguments.length > 1 ?
// Sets one value
this.each(function() {
jQuery.data( this, key, value );
}) :
// Gets one value
// Try to fetch any internally stored data first
elem ? dataAttr( elem, key, jQuery.data( elem, key ) ) : undefined;
},
removeData: function( key ) {
return this.each(function() {
jQuery.removeData( this, key );
});
}
});
jQuery.extend({
queue: function( elem, type, data ) {
var queue;
if ( elem ) {
type = ( type || "fx" ) + "queue";
queue = jQuery._data( elem, type );
// Speed up dequeue by getting out quickly if this is just a lookup
if ( data ) {
if ( !queue || jQuery.isArray(data) ) {
queue = jQuery._data( elem, type, jQuery.makeArray(data) );
} else {
queue.push( data );
}
}
return queue || [];
}
},
dequeue: function( elem, type ) {
type = type || "fx";
var queue = jQuery.queue( elem, type ),
startLength = queue.length,
fn = queue.shift(),
hooks = jQuery._queueHooks( elem, type ),
next = function() {
jQuery.dequeue( elem, type );
};
// If the fx queue is dequeued, always remove the progress sentinel
if ( fn === "inprogress" ) {
fn = queue.shift();
startLength--;
}
if ( fn ) {
// Add a progress sentinel to prevent the fx queue from being
// automatically dequeued
if ( type === "fx" ) {
queue.unshift( "inprogress" );
}
// clear up the last queue stop function
delete hooks.stop;
fn.call( elem, next, hooks );
}
if ( !startLength && hooks ) {
hooks.empty.fire();
}
},
// not intended for public consumption - generates a queueHooks object, or returns the current one
_queueHooks: function( elem, type ) {
var key = type + "queueHooks";
return jQuery._data( elem, key ) || jQuery._data( elem, key, {
empty: jQuery.Callbacks("once memory").add(function() {
jQuery._removeData( elem, type + "queue" );
jQuery._removeData( elem, key );
})
});
}
});
jQuery.fn.extend({
queue: function( type, data ) {
var setter = 2;
if ( typeof type !== "string" ) {
data = type;
type = "fx";
setter--;
}
if ( arguments.length < setter ) {
return jQuery.queue( this[0], type );
}
return data === undefined ?
this :
this.each(function() {
var queue = jQuery.queue( this, type, data );
// ensure a hooks for this queue
jQuery._queueHooks( this, type );
if ( type === "fx" && queue[0] !== "inprogress" ) {
jQuery.dequeue( this, type );
}
});
},
dequeue: function( type ) {
return this.each(function() {
jQuery.dequeue( this, type );
});
},
clearQueue: function( type ) {
return this.queue( type || "fx", [] );
},
// Get a promise resolved when queues of a certain type
// are emptied (fx is the type by default)
promise: function( type, obj ) {
var tmp,
count = 1,
defer = jQuery.Deferred(),
elements = this,
i = this.length,
resolve = function() {
if ( !( --count ) ) {
defer.resolveWith( elements, [ elements ] );
}
};
if ( typeof type !== "string" ) {
obj = type;
type = undefined;
}
type = type || "fx";
while ( i-- ) {
tmp = jQuery._data( elements[ i ], type + "queueHooks" );
if ( tmp && tmp.empty ) {
count++;
tmp.empty.add( resolve );
}
}
resolve();
return defer.promise( obj );
}
});
var pnum = (/[+-]?(?:\d*\.|)\d+(?:[eE][+-]?\d+|)/).source;
var cssExpand = [ "Top", "Right", "Bottom", "Left" ];
var isHidden = function( elem, el ) {
// isHidden might be called from jQuery#filter function;
// in that case, element will be second argument
elem = el || elem;
return jQuery.css( elem, "display" ) === "none" || !jQuery.contains( elem.ownerDocument, elem );
};
// Multifunctional method to get and set values of a collection
// The value/s can optionally be executed if it's a function
var access = jQuery.access = function( elems, fn, key, value, chainable, emptyGet, raw ) {
var i = 0,
length = elems.length,
bulk = key == null;
// Sets many values
if ( jQuery.type( key ) === "object" ) {
chainable = true;
for ( i in key ) {
jQuery.access( elems, fn, i, key[i], true, emptyGet, raw );
}
// Sets one value
} else if ( value !== undefined ) {
chainable = true;
if ( !jQuery.isFunction( value ) ) {
raw = true;
}
if ( bulk ) {
// Bulk operations run against the entire set
if ( raw ) {
fn.call( elems, value );
fn = null;
// ...except when executing function values
} else {
bulk = fn;
fn = function( elem, key, value ) {
return bulk.call( jQuery( elem ), value );
};
}
}
if ( fn ) {
for ( ; i < length; i++ ) {
fn( elems[i], key, raw ? value : value.call( elems[i], i, fn( elems[i], key ) ) );
}
}
}
return chainable ?
elems :
// Gets
bulk ?
fn.call( elems ) :
length ? fn( elems[0], key ) : emptyGet;
};
var rcheckableType = (/^(?:checkbox|radio)$/i);
(function() {
var fragment = document.createDocumentFragment(),
div = document.createElement("div"),
input = document.createElement("input");
// Setup
div.setAttribute( "className", "t" );
div.innerHTML = " <link/><table></table><a href='/a'>a</a>";
// IE strips leading whitespace when .innerHTML is used
support.leadingWhitespace = div.firstChild.nodeType === 3;
// Make sure that tbody elements aren't automatically inserted
// IE will insert them into empty tables
support.tbody = !div.getElementsByTagName( "tbody" ).length;
// Make sure that link elements get serialized correctly by innerHTML
// This requires a wrapper element in IE
support.htmlSerialize = !!div.getElementsByTagName( "link" ).length;
// Makes sure cloning an html5 element does not cause problems
// Where outerHTML is undefined, this still works
support.html5Clone =
document.createElement( "nav" ).cloneNode( true ).outerHTML !== "<:nav></:nav>";
// Check if a disconnected checkbox will retain its checked
// value of true after appended to the DOM (IE6/7)
input.type = "checkbox";
input.checked = true;
fragment.appendChild( input );
support.appendChecked = input.checked;
// Make sure textarea (and checkbox) defaultValue is properly cloned
// Support: IE6-IE11+
div.innerHTML = "<textarea>x</textarea>";
support.noCloneChecked = !!div.cloneNode( true ).lastChild.defaultValue;
// #11217 - WebKit loses check when the name is after the checked attribute
fragment.appendChild( div );
div.innerHTML = "<input type='radio' checked='checked' name='t'/>";
// Support: Safari 5.1, iOS 5.1, Android 4.x, Android 2.3
// old WebKit doesn't clone checked state correctly in fragments
support.checkClone = div.cloneNode( true ).cloneNode( true ).lastChild.checked;
// Support: IE<9
// Opera does not clone events (and typeof div.attachEvent === undefined).
// IE9-10 clones events bound via attachEvent, but they don't trigger with .click()
support.noCloneEvent = true;
if ( div.attachEvent ) {
div.attachEvent( "onclick", function() {
support.noCloneEvent = false;
});
div.cloneNode( true ).click();
}
// Execute the test only if not already executed in another module.
if (support.deleteExpando == null) {
// Support: IE<9
support.deleteExpando = true;
try {
delete div.test;
} catch( e ) {
support.deleteExpando = false;
}
}
// Null elements to avoid leaks in IE.
fragment = div = input = null;
})();
(function() {
var i, eventName,
div = document.createElement( "div" );
// Support: IE<9 (lack submit/change bubble), Firefox 23+ (lack focusin event)
for ( i in { submit: true, change: true, focusin: true }) {
eventName = "on" + i;
if ( !(support[ i + "Bubbles" ] = eventName in window) ) {
// Beware of CSP restrictions (https://developer.mozilla.org/en/Security/CSP)
div.setAttribute( eventName, "t" );
support[ i + "Bubbles" ] = div.attributes[ eventName ].expando === false;
}
}
// Null elements to avoid leaks in IE.
div = null;
})();
var rformElems = /^(?:input|select|textarea)$/i,
rkeyEvent = /^key/,
rmouseEvent = /^(?:mouse|contextmenu)|click/,
rfocusMorph = /^(?:focusinfocus|focusoutblur)$/,
rtypenamespace = /^([^.]*)(?:\.(.+)|)$/;
function returnTrue() {
return true;
}
function returnFalse() {
return false;
}
function safeActiveElement() {
try {
return document.activeElement;
} catch ( err ) { }
}
/*
* Helper functions for managing events -- not part of the public interface.
* Props to Dean Edwards' addEvent library for many of the ideas.
*/
jQuery.event = {
global: {},
add: function( elem, types, handler, data, selector ) {
var tmp, events, t, handleObjIn,
special, eventHandle, handleObj,
handlers, type, namespaces, origType,
elemData = jQuery._data( elem );
// Don't attach events to noData or text/comment nodes (but allow plain objects)
if ( !elemData ) {
return;
}
// Caller can pass in an object of custom data in lieu of the handler
if ( handler.handler ) {
handleObjIn = handler;
handler = handleObjIn.handler;
selector = handleObjIn.selector;
}
// Make sure that the handler has a unique ID, used to find/remove it later
if ( !handler.guid ) {
handler.guid = jQuery.guid++;
}
// Init the element's event structure and main handler, if this is the first
if ( !(events = elemData.events) ) {
events = elemData.events = {};
}
if ( !(eventHandle = elemData.handle) ) {
eventHandle = elemData.handle = function( e ) {
// Discard the second event of a jQuery.event.trigger() and
// when an event is called after a page has unloaded
return typeof jQuery !== strundefined && (!e || jQuery.event.triggered !== e.type) ?
jQuery.event.dispatch.apply( eventHandle.elem, arguments ) :
undefined;
};
// Add elem as a property of the handle fn to prevent a memory leak with IE non-native events
eventHandle.elem = elem;
}
// Handle multiple events separated by a space
types = ( types || "" ).match( rnotwhite ) || [ "" ];
t = types.length;
while ( t-- ) {
tmp = rtypenamespace.exec( types[t] ) || [];
type = origType = tmp[1];
namespaces = ( tmp[2] || "" ).split( "." ).sort();
// There *must* be a type, no attaching namespace-only handlers
if ( !type ) {
continue;
}
// If event changes its type, use the special event handlers for the changed type
special = jQuery.event.special[ type ] || {};
// If selector defined, determine special event api type, otherwise given type
type = ( selector ? special.delegateType : special.bindType ) || type;
// Update special based on newly reset type
special = jQuery.event.special[ type ] || {};
// handleObj is passed to all event handlers
handleObj = jQuery.extend({
type: type,
origType: origType,
data: data,
handler: handler,
guid: handler.guid,
selector: selector,
needsContext: selector && jQuery.expr.match.needsContext.test( selector ),
namespace: namespaces.join(".")
}, handleObjIn );
// Init the event handler queue if we're the first
if ( !(handlers = events[ type ]) ) {
handlers = events[ type ] = [];
handlers.delegateCount = 0;
// Only use addEventListener/attachEvent if the special events handler returns false
if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) {
// Bind the global event handler to the element
if ( elem.addEventListener ) {
elem.addEventListener( type, eventHandle, false );
} else if ( elem.attachEvent ) {
elem.attachEvent( "on" + type, eventHandle );
}
}
}
if ( special.add ) {
special.add.call( elem, handleObj );
if ( !handleObj.handler.guid ) {
handleObj.handler.guid = handler.guid;
}
}
// Add to the element's handler list, delegates in front
if ( selector ) {
handlers.splice( handlers.delegateCount++, 0, handleObj );
} else {
handlers.push( handleObj );
}
// Keep track of which events have ever been used, for event optimization
jQuery.event.global[ type ] = true;
}
// Nullify elem to prevent memory leaks in IE
elem = null;
},
// Detach an event or set of events from an element
remove: function( elem, types, handler, selector, mappedTypes ) {
var j, handleObj, tmp,
origCount, t, events,
special, handlers, type,
namespaces, origType,
elemData = jQuery.hasData( elem ) && jQuery._data( elem );
if ( !elemData || !(events = elemData.events) ) {
return;
}
// Once for each type.namespace in types; type may be omitted
types = ( types || "" ).match( rnotwhite ) || [ "" ];
t = types.length;
while ( t-- ) {
tmp = rtypenamespace.exec( types[t] ) || [];
type = origType = tmp[1];
namespaces = ( tmp[2] || "" ).split( "." ).sort();
// Unbind all events (on this namespace, if provided) for the element
if ( !type ) {
for ( type in events ) {
jQuery.event.remove( elem, type + types[ t ], handler, selector, true );
}
continue;
}
special = jQuery.event.special[ type ] || {};
type = ( selector ? special.delegateType : special.bindType ) || type;
handlers = events[ type ] || [];
tmp = tmp[2] && new RegExp( "(^|\\.)" + namespaces.join("\\.(?:.*\\.|)") + "(\\.|$)" );
// Remove matching events
origCount = j = handlers.length;
while ( j-- ) {
handleObj = handlers[ j ];
if ( ( mappedTypes || origType === handleObj.origType ) &&
( !handler || handler.guid === handleObj.guid ) &&
( !tmp || tmp.test( handleObj.namespace ) ) &&
( !selector || selector === handleObj.selector || selector === "**" && handleObj.selector ) ) {
handlers.splice( j, 1 );
if ( handleObj.selector ) {
handlers.delegateCount--;
}
if ( special.remove ) {
special.remove.call( elem, handleObj );
}
}
}
// Remove generic event handler if we removed something and no more handlers exist
// (avoids potential for endless recursion during removal of special event handlers)
if ( origCount && !handlers.length ) {
if ( !special.teardown || special.teardown.call( elem, namespaces, elemData.handle ) === false ) {
jQuery.removeEvent( elem, type, elemData.handle );
}
delete events[ type ];
}
}
// Remove the expando if it's no longer used
if ( jQuery.isEmptyObject( events ) ) {
delete elemData.handle;
// removeData also checks for emptiness and clears the expando if empty
// so use it instead of delete
jQuery._removeData( elem, "events" );
}
},
trigger: function( event, data, elem, onlyHandlers ) {
var handle, ontype, cur,
bubbleType, special, tmp, i,
eventPath = [ elem || document ],
type = hasOwn.call( event, "type" ) ? event.type : event,
namespaces = hasOwn.call( event, "namespace" ) ? event.namespace.split(".") : [];
cur = tmp = elem = elem || document;
// Don't do events on text and comment nodes
if ( elem.nodeType === 3 || elem.nodeType === 8 ) {
return;
}
// focus/blur morphs to focusin/out; ensure we're not firing them right now
if ( rfocusMorph.test( type + jQuery.event.triggered ) ) {
return;
}
if ( type.indexOf(".") >= 0 ) {
// Namespaced trigger; create a regexp to match event type in handle()
namespaces = type.split(".");
type = namespaces.shift();
namespaces.sort();
}
ontype = type.indexOf(":") < 0 && "on" + type;
// Caller can pass in a jQuery.Event object, Object, or just an event type string
event = event[ jQuery.expando ] ?
event :
new jQuery.Event( type, typeof event === "object" && event );
// Trigger bitmask: & 1 for native handlers; & 2 for jQuery (always true)
event.isTrigger = onlyHandlers ? 2 : 3;
event.namespace = namespaces.join(".");
event.namespace_re = event.namespace ?
new RegExp( "(^|\\.)" + namespaces.join("\\.(?:.*\\.|)") + "(\\.|$)" ) :
null;
// Clean up the event in case it is being reused
event.result = undefined;
if ( !event.target ) {
event.target = elem;
}
// Clone any incoming data and prepend the event, creating the handler arg list
data = data == null ?
[ event ] :
jQuery.makeArray( data, [ event ] );
// Allow special events to draw outside the lines
special = jQuery.event.special[ type ] || {};
if ( !onlyHandlers && special.trigger && special.trigger.apply( elem, data ) === false ) {
return;
}
// Determine event propagation path in advance, per W3C events spec (#9951)
// Bubble up to document, then to window; watch for a global ownerDocument var (#9724)
if ( !onlyHandlers && !special.noBubble && !jQuery.isWindow( elem ) ) {
bubbleType = special.delegateType || type;
if ( !rfocusMorph.test( bubbleType + type ) ) {
cur = cur.parentNode;
}
for ( ; cur; cur = cur.parentNode ) {
eventPath.push( cur );
tmp = cur;
}
// Only add window if we got to document (e.g., not plain obj or detached DOM)
if ( tmp === (elem.ownerDocument || document) ) {
eventPath.push( tmp.defaultView || tmp.parentWindow || window );
}
}
// Fire handlers on the event path
i = 0;
while ( (cur = eventPath[i++]) && !event.isPropagationStopped() ) {
event.type = i > 1 ?
bubbleType :
special.bindType || type;
// jQuery handler
handle = ( jQuery._data( cur, "events" ) || {} )[ event.type ] && jQuery._data( cur, "handle" );
if ( handle ) {
handle.apply( cur, data );
}
// Native handler
handle = ontype && cur[ ontype ];
if ( handle && handle.apply && jQuery.acceptData( cur ) ) {
event.result = handle.apply( cur, data );
if ( event.result === false ) {
event.preventDefault();
}
}
}
event.type = type;
// If nobody prevented the default action, do it now
if ( !onlyHandlers && !event.isDefaultPrevented() ) {
if ( (!special._default || special._default.apply( eventPath.pop(), data ) === false) &&
jQuery.acceptData( elem ) ) {
// Call a native DOM method on the target with the same name name as the event.
// Can't use an .isFunction() check here because IE6/7 fails that test.
// Don't do default actions on window, that's where global variables be (#6170)
if ( ontype && elem[ type ] && !jQuery.isWindow( elem ) ) {
// Don't re-trigger an onFOO event when we call its FOO() method
tmp = elem[ ontype ];
if ( tmp ) {
elem[ ontype ] = null;
}
// Prevent re-triggering of the same event, since we already bubbled it above
jQuery.event.triggered = type;
try {
elem[ type ]();
} catch ( e ) {
// IE<9 dies on focus/blur to hidden element (#1486,#12518)
// only reproducible on winXP IE8 native, not IE9 in IE8 mode
}
jQuery.event.triggered = undefined;
if ( tmp ) {
elem[ ontype ] = tmp;
}
}
}
}
return event.result;
},
dispatch: function( event ) {
// Make a writable jQuery.Event from the native event object
event = jQuery.event.fix( event );
var i, ret, handleObj, matched, j,
handlerQueue = [],
args = slice.call( arguments ),
handlers = ( jQuery._data( this, "events" ) || {} )[ event.type ] || [],
special = jQuery.event.special[ event.type ] || {};
// Use the fix-ed jQuery.Event rather than the (read-only) native event
args[0] = event;
event.delegateTarget = this;
// Call the preDispatch hook for the mapped type, and let it bail if desired
if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) {
return;
}
// Determine handlers
handlerQueue = jQuery.event.handlers.call( this, event, handlers );
// Run delegates first; they may want to stop propagation beneath us
i = 0;
while ( (matched = handlerQueue[ i++ ]) && !event.isPropagationStopped() ) {
event.currentTarget = matched.elem;
j = 0;
while ( (handleObj = matched.handlers[ j++ ]) && !event.isImmediatePropagationStopped() ) {
// Triggered event must either 1) have no namespace, or
// 2) have namespace(s) a subset or equal to those in the bound event (both can have no namespace).
if ( !event.namespace_re || event.namespace_re.test( handleObj.namespace ) ) {
event.handleObj = handleObj;
event.data = handleObj.data;
ret = ( (jQuery.event.special[ handleObj.origType ] || {}).handle || handleObj.handler )
.apply( matched.elem, args );
if ( ret !== undefined ) {
if ( (event.result = ret) === false ) {
event.preventDefault();
event.stopPropagation();
}
}
}
}
}
// Call the postDispatch hook for the mapped type
if ( special.postDispatch ) {
special.postDispatch.call( this, event );
}
return event.result;
},
handlers: function( event, handlers ) {
var sel, handleObj, matches, i,
handlerQueue = [],
delegateCount = handlers.delegateCount,
cur = event.target;
// Find delegate handlers
// Black-hole SVG <use> instance trees (#13180)
// Avoid non-left-click bubbling in Firefox (#3861)
if ( delegateCount && cur.nodeType && (!event.button || event.type !== "click") ) {
/* jshint eqeqeq: false */
for ( ; cur != this; cur = cur.parentNode || this ) {
/* jshint eqeqeq: true */
// Don't check non-elements (#13208)
// Don't process clicks on disabled elements (#6911, #8165, #11382, #11764)
if ( cur.nodeType === 1 && (cur.disabled !== true || event.type !== "click") ) {
matches = [];
for ( i = 0; i < delegateCount; i++ ) {
handleObj = handlers[ i ];
// Don't conflict with Object.prototype properties (#13203)
sel = handleObj.selector + " ";
if ( matches[ sel ] === undefined ) {
matches[ sel ] = handleObj.needsContext ?
jQuery( sel, this ).index( cur ) >= 0 :
jQuery.find( sel, this, null, [ cur ] ).length;
}
if ( matches[ sel ] ) {
matches.push( handleObj );
}
}
if ( matches.length ) {
handlerQueue.push({ elem: cur, handlers: matches });
}
}
}
}
// Add the remaining (directly-bound) handlers
if ( delegateCount < handlers.length ) {
handlerQueue.push({ elem: this, handlers: handlers.slice( delegateCount ) });
}
return handlerQueue;
},
fix: function( event ) {
if ( event[ jQuery.expando ] ) {
return event;
}
// Create a writable copy of the event object and normalize some properties
var i, prop, copy,
type = event.type,
originalEvent = event,
fixHook = this.fixHooks[ type ];
if ( !fixHook ) {
this.fixHooks[ type ] = fixHook =
rmouseEvent.test( type ) ? this.mouseHooks :
rkeyEvent.test( type ) ? this.keyHooks :
{};
}
copy = fixHook.props ? this.props.concat( fixHook.props ) : this.props;
event = new jQuery.Event( originalEvent );
i = copy.length;
while ( i-- ) {
prop = copy[ i ];
event[ prop ] = originalEvent[ prop ];
}
// Support: IE<9
// Fix target property (#1925)
if ( !event.target ) {
event.target = originalEvent.srcElement || document;
}
// Support: Chrome 23+, Safari?
// Target should not be a text node (#504, #13143)
if ( event.target.nodeType === 3 ) {
event.target = event.target.parentNode;
}
// Support: IE<9
// For mouse/key events, metaKey==false if it's undefined (#3368, #11328)
event.metaKey = !!event.metaKey;
return fixHook.filter ? fixHook.filter( event, originalEvent ) : event;
},
// Includes some event props shared by KeyEvent and MouseEvent
props: "altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "),
fixHooks: {},
keyHooks: {
props: "char charCode key keyCode".split(" "),
filter: function( event, original ) {
// Add which for key events
if ( event.which == null ) {
event.which = original.charCode != null ? original.charCode : original.keyCode;
}
return event;
}
},
mouseHooks: {
props: "button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "),
filter: function( event, original ) {
var body, eventDoc, doc,
button = original.button,
fromElement = original.fromElement;
// Calculate pageX/Y if missing and clientX/Y available
if ( event.pageX == null && original.clientX != null ) {
eventDoc = event.target.ownerDocument || document;
doc = eventDoc.documentElement;
body = eventDoc.body;
event.pageX = original.clientX + ( doc && doc.scrollLeft || body && body.scrollLeft || 0 ) - ( doc && doc.clientLeft || body && body.clientLeft || 0 );
event.pageY = original.clientY + ( doc && doc.scrollTop || body && body.scrollTop || 0 ) - ( doc && doc.clientTop || body && body.clientTop || 0 );
}
// Add relatedTarget, if necessary
if ( !event.relatedTarget && fromElement ) {
event.relatedTarget = fromElement === event.target ? original.toElement : fromElement;
}
// Add which for click: 1 === left; 2 === middle; 3 === right
// Note: button is not normalized, so don't use it
if ( !event.which && button !== undefined ) {
event.which = ( button & 1 ? 1 : ( button & 2 ? 3 : ( button & 4 ? 2 : 0 ) ) );
}
return event;
}
},
special: {
load: {
// Prevent triggered image.load events from bubbling to window.load
noBubble: true
},
focus: {
// Fire native event if possible so blur/focus sequence is correct
trigger: function() {
if ( this !== safeActiveElement() && this.focus ) {
try {
this.focus();
return false;
} catch ( e ) {
// Support: IE<9
// If we error on focus to hidden element (#1486, #12518),
// let .trigger() run the handlers
}
}
},
delegateType: "focusin"
},
blur: {
trigger: function() {
if ( this === safeActiveElement() && this.blur ) {
this.blur();
return false;
}
},
delegateType: "focusout"
},
click: {
// For checkbox, fire native event so checked state will be right
trigger: function() {
if ( jQuery.nodeName( this, "input" ) && this.type === "checkbox" && this.click ) {
this.click();
return false;
}
},
// For cross-browser consistency, don't fire native .click() on links
_default: function( event ) {
return jQuery.nodeName( event.target, "a" );
}
},
beforeunload: {
postDispatch: function( event ) {
// Even when returnValue equals to undefined Firefox will still show alert
if ( event.result !== undefined ) {
event.originalEvent.returnValue = event.result;
}
}
}
},
simulate: function( type, elem, event, bubble ) {
// Piggyback on a donor event to simulate a different one.
// Fake originalEvent to avoid donor's stopPropagation, but if the
// simulated event prevents default then we do the same on the donor.
var e = jQuery.extend(
new jQuery.Event(),
event,
{
type: type,
isSimulated: true,
originalEvent: {}
}
);
if ( bubble ) {
jQuery.event.trigger( e, null, elem );
} else {
jQuery.event.dispatch.call( elem, e );
}
if ( e.isDefaultPrevented() ) {
event.preventDefault();
}
}
};
jQuery.removeEvent = document.removeEventListener ?
function( elem, type, handle ) {
if ( elem.removeEventListener ) {
elem.removeEventListener( type, handle, false );
}
} :
function( elem, type, handle ) {
var name = "on" + type;
if ( elem.detachEvent ) {
// #8545, #7054, preventing memory leaks for custom events in IE6-8
// detachEvent needed property on element, by name of that event, to properly expose it to GC
if ( typeof elem[ name ] === strundefined ) {
elem[ name ] = null;
}
elem.detachEvent( name, handle );
}
};
jQuery.Event = function( src, props ) {
// Allow instantiation without the 'new' keyword
if ( !(this instanceof jQuery.Event) ) {
return new jQuery.Event( src, props );
}
// Event object
if ( src && src.type ) {
this.originalEvent = src;
this.type = src.type;
// Events bubbling up the document may have been marked as prevented
// by a handler lower down the tree; reflect the correct value.
this.isDefaultPrevented = src.defaultPrevented ||
src.defaultPrevented === undefined && (
// Support: IE < 9
src.returnValue === false ||
// Support: Android < 4.0
src.getPreventDefault && src.getPreventDefault() ) ?
returnTrue :
returnFalse;
// Event type
} else {
this.type = src;
}
// Put explicitly provided properties onto the event object
if ( props ) {
jQuery.extend( this, props );
}
// Create a timestamp if incoming event doesn't have one
this.timeStamp = src && src.timeStamp || jQuery.now();
// Mark it as fixed
this[ jQuery.expando ] = true;
};
// jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding
// http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html
jQuery.Event.prototype = {
isDefaultPrevented: returnFalse,
isPropagationStopped: returnFalse,
isImmediatePropagationStopped: returnFalse,
preventDefault: function() {
var e = this.originalEvent;
this.isDefaultPrevented = returnTrue;
if ( !e ) {
return;
}
// If preventDefault exists, run it on the original event
if ( e.preventDefault ) {
e.preventDefault();
// Support: IE
// Otherwise set the returnValue property of the original event to false
} else {
e.returnValue = false;
}
},
stopPropagation: function() {
var e = this.originalEvent;
this.isPropagationStopped = returnTrue;
if ( !e ) {
return;
}
// If stopPropagation exists, run it on the original event
if ( e.stopPropagation ) {
e.stopPropagation();
}
// Support: IE
// Set the cancelBubble property of the original event to true
e.cancelBubble = true;
},
stopImmediatePropagation: function() {
this.isImmediatePropagationStopped = returnTrue;
this.stopPropagation();
}
};
// Create mouseenter/leave events using mouseover/out and event-time checks
jQuery.each({
mouseenter: "mouseover",
mouseleave: "mouseout"
}, function( orig, fix ) {
jQuery.event.special[ orig ] = {
delegateType: fix,
bindType: fix,
handle: function( event ) {
var ret,
target = this,
related = event.relatedTarget,
handleObj = event.handleObj;
// For mousenter/leave call the handler if related is outside the target.
// NB: No relatedTarget if the mouse left/entered the browser window
if ( !related || (related !== target && !jQuery.contains( target, related )) ) {
event.type = handleObj.origType;
ret = handleObj.handler.apply( this, arguments );
event.type = fix;
}
return ret;
}
};
});
// IE submit delegation
if ( !support.submitBubbles ) {
jQuery.event.special.submit = {
setup: function() {
// Only need this for delegated form submit events
if ( jQuery.nodeName( this, "form" ) ) {
return false;
}
// Lazy-add a submit handler when a descendant form may potentially be submitted
jQuery.event.add( this, "click._submit keypress._submit", function( e ) {
// Node name check avoids a VML-related crash in IE (#9807)
var elem = e.target,
form = jQuery.nodeName( elem, "input" ) || jQuery.nodeName( elem, "button" ) ? elem.form : undefined;
if ( form && !jQuery._data( form, "submitBubbles" ) ) {
jQuery.event.add( form, "submit._submit", function( event ) {
event._submit_bubble = true;
});
jQuery._data( form, "submitBubbles", true );
}
});
// return undefined since we don't need an event listener
},
postDispatch: function( event ) {
// If form was submitted by the user, bubble the event up the tree
if ( event._submit_bubble ) {
delete event._submit_bubble;
if ( this.parentNode && !event.isTrigger ) {
jQuery.event.simulate( "submit", this.parentNode, event, true );
}
}
},
teardown: function() {
// Only need this for delegated form submit events
if ( jQuery.nodeName( this, "form" ) ) {
return false;
}
// Remove delegated handlers; cleanData eventually reaps submit handlers attached above
jQuery.event.remove( this, "._submit" );
}
};
}
// IE change delegation and checkbox/radio fix
if ( !support.changeBubbles ) {
jQuery.event.special.change = {
setup: function() {
if ( rformElems.test( this.nodeName ) ) {
// IE doesn't fire change on a check/radio until blur; trigger it on click
// after a propertychange. Eat the blur-change in special.change.handle.
// This still fires onchange a second time for check/radio after blur.
if ( this.type === "checkbox" || this.type === "radio" ) {
jQuery.event.add( this, "propertychange._change", function( event ) {
if ( event.originalEvent.propertyName === "checked" ) {
this._just_changed = true;
}
});
jQuery.event.add( this, "click._change", function( event ) {
if ( this._just_changed && !event.isTrigger ) {
this._just_changed = false;
}
// Allow triggered, simulated change events (#11500)
jQuery.event.simulate( "change", this, event, true );
});
}
return false;
}
// Delegated event; lazy-add a change handler on descendant inputs
jQuery.event.add( this, "beforeactivate._change", function( e ) {
var elem = e.target;
if ( rformElems.test( elem.nodeName ) && !jQuery._data( elem, "changeBubbles" ) ) {
jQuery.event.add( elem, "change._change", function( event ) {
if ( this.parentNode && !event.isSimulated && !event.isTrigger ) {
jQuery.event.simulate( "change", this.parentNode, event, true );
}
});
jQuery._data( elem, "changeBubbles", true );
}
});
},
handle: function( event ) {
var elem = event.target;
// Swallow native change events from checkbox/radio, we already triggered them above
if ( this !== elem || event.isSimulated || event.isTrigger || (elem.type !== "radio" && elem.type !== "checkbox") ) {
return event.handleObj.handler.apply( this, arguments );
}
},
teardown: function() {
jQuery.event.remove( this, "._change" );
return !rformElems.test( this.nodeName );
}
};
}
// Create "bubbling" focus and blur events
if ( !support.focusinBubbles ) {
jQuery.each({ focus: "focusin", blur: "focusout" }, function( orig, fix ) {
// Attach a single capturing handler on the document while someone wants focusin/focusout
var handler = function( event ) {
jQuery.event.simulate( fix, event.target, jQuery.event.fix( event ), true );
};
jQuery.event.special[ fix ] = {
setup: function() {
var doc = this.ownerDocument || this,
attaches = jQuery._data( doc, fix );
if ( !attaches ) {
doc.addEventListener( orig, handler, true );
}
jQuery._data( doc, fix, ( attaches || 0 ) + 1 );
},
teardown: function() {
var doc = this.ownerDocument || this,
attaches = jQuery._data( doc, fix ) - 1;
if ( !attaches ) {
doc.removeEventListener( orig, handler, true );
jQuery._removeData( doc, fix );
} else {
jQuery._data( doc, fix, attaches );
}
}
};
});
}
jQuery.fn.extend({
on: function( types, selector, data, fn, /*INTERNAL*/ one ) {
var type, origFn;
// Types can be a map of types/handlers
if ( typeof types === "object" ) {
// ( types-Object, selector, data )
if ( typeof selector !== "string" ) {
// ( types-Object, data )
data = data || selector;
selector = undefined;
}
for ( type in types ) {
this.on( type, selector, data, types[ type ], one );
}
return this;
}
if ( data == null && fn == null ) {
// ( types, fn )
fn = selector;
data = selector = undefined;
} else if ( fn == null ) {
if ( typeof selector === "string" ) {
// ( types, selector, fn )
fn = data;
data = undefined;
} else {
// ( types, data, fn )
fn = data;
data = selector;
selector = undefined;
}
}
if ( fn === false ) {
fn = returnFalse;
} else if ( !fn ) {
return this;
}
if ( one === 1 ) {
origFn = fn;
fn = function( event ) {
// Can use an empty set, since event contains the info
jQuery().off( event );
return origFn.apply( this, arguments );
};
// Use same guid so caller can remove using origFn
fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ );
}
return this.each( function() {
jQuery.event.add( this, types, fn, data, selector );
});
},
one: function( types, selector, data, fn ) {
return this.on( types, selector, data, fn, 1 );
},
off: function( types, selector, fn ) {
var handleObj, type;
if ( types && types.preventDefault && types.handleObj ) {
// ( event ) dispatched jQuery.Event
handleObj = types.handleObj;
jQuery( types.delegateTarget ).off(
handleObj.namespace ? handleObj.origType + "." + handleObj.namespace : handleObj.origType,
handleObj.selector,
handleObj.handler
);
return this;
}
if ( typeof types === "object" ) {
// ( types-object [, selector] )
for ( type in types ) {
this.off( type, selector, types[ type ] );
}
return this;
}
if ( selector === false || typeof selector === "function" ) {
// ( types [, fn] )
fn = selector;
selector = undefined;
}
if ( fn === false ) {
fn = returnFalse;
}
return this.each(function() {
jQuery.event.remove( this, types, fn, selector );
});
},
trigger: function( type, data ) {
return this.each(function() {
jQuery.event.trigger( type, data, this );
});
},
triggerHandler: function( type, data ) {
var elem = this[0];
if ( elem ) {
return jQuery.event.trigger( type, data, elem, true );
}
}
});
function createSafeFragment( document ) {
var list = nodeNames.split( "|" ),
safeFrag = document.createDocumentFragment();
if ( safeFrag.createElement ) {
while ( list.length ) {
safeFrag.createElement(
list.pop()
);
}
}
return safeFrag;
}
var nodeNames = "abbr|article|aside|audio|bdi|canvas|data|datalist|details|figcaption|figure|footer|" +
"header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",
rinlinejQuery = / jQuery\d+="(?:null|\d+)"/g,
rnoshimcache = new RegExp("<(?:" + nodeNames + ")[\\s/>]", "i"),
rleadingWhitespace = /^\s+/,
rxhtmlTag = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,
rtagName = /<([\w:]+)/,
rtbody = /<tbody/i,
rhtml = /<|&#?\w+;/,
rnoInnerhtml = /<(?:script|style|link)/i,
// checked="checked" or checked
rchecked = /checked\s*(?:[^=]|=\s*.checked.)/i,
rscriptType = /^$|\/(?:java|ecma)script/i,
rscriptTypeMasked = /^true\/(.*)/,
rcleanScript = /^\s*<!(?:\[CDATA\[|--)|(?:\]\]|--)>\s*$/g,
// We have to close these tags to support XHTML (#13200)
wrapMap = {
option: [ 1, "<select multiple='multiple'>", "</select>" ],
legend: [ 1, "<fieldset>", "</fieldset>" ],
area: [ 1, "<map>", "</map>" ],
param: [ 1, "<object>", "</object>" ],
thead: [ 1, "<table>", "</table>" ],
tr: [ 2, "<table><tbody>", "</tbody></table>" ],
col: [ 2, "<table><tbody></tbody><colgroup>", "</colgroup></table>" ],
td: [ 3, "<table><tbody><tr>", "</tr></tbody></table>" ],
// IE6-8 can't serialize link, script, style, or any html5 (NoScope) tags,
// unless wrapped in a div with non-breaking characters in front of it.
_default: support.htmlSerialize ? [ 0, "", "" ] : [ 1, "X<div>", "</div>" ]
},
safeFragment = createSafeFragment( document ),
fragmentDiv = safeFragment.appendChild( document.createElement("div") );
wrapMap.optgroup = wrapMap.option;
wrapMap.tbody = wrapMap.tfoot = wrapMap.colgroup = wrapMap.caption = wrapMap.thead;
wrapMap.th = wrapMap.td;
function getAll( context, tag ) {
var elems, elem,
i = 0,
found = typeof context.getElementsByTagName !== strundefined ? context.getElementsByTagName( tag || "*" ) :
typeof context.querySelectorAll !== strundefined ? context.querySelectorAll( tag || "*" ) :
undefined;
if ( !found ) {
for ( found = [], elems = context.childNodes || context; (elem = elems[i]) != null; i++ ) {
if ( !tag || jQuery.nodeName( elem, tag ) ) {
found.push( elem );
} else {
jQuery.merge( found, getAll( elem, tag ) );
}
}
}
return tag === undefined || tag && jQuery.nodeName( context, tag ) ?
jQuery.merge( [ context ], found ) :
found;
}
// Used in buildFragment, fixes the defaultChecked property
function fixDefaultChecked( elem ) {
if ( rcheckableType.test( elem.type ) ) {
elem.defaultChecked = elem.checked;
}
}
// Support: IE<8
// Manipulating tables requires a tbody
function manipulationTarget( elem, content ) {
return jQuery.nodeName( elem, "table" ) &&
jQuery.nodeName( content.nodeType !== 11 ? content : content.firstChild, "tr" ) ?
elem.getElementsByTagName("tbody")[0] ||
elem.appendChild( elem.ownerDocument.createElement("tbody") ) :
elem;
}
// Replace/restore the type attribute of script elements for safe DOM manipulation
function disableScript( elem ) {
elem.type = (jQuery.find.attr( elem, "type" ) !== null) + "/" + elem.type;
return elem;
}
function restoreScript( elem ) {
var match = rscriptTypeMasked.exec( elem.type );
if ( match ) {
elem.type = match[1];
} else {
elem.removeAttribute("type");
}
return elem;
}
// Mark scripts as having already been evaluated
function setGlobalEval( elems, refElements ) {
var elem,
i = 0;
for ( ; (elem = elems[i]) != null; i++ ) {
jQuery._data( elem, "globalEval", !refElements || jQuery._data( refElements[i], "globalEval" ) );
}
}
function cloneCopyEvent( src, dest ) {
if ( dest.nodeType !== 1 || !jQuery.hasData( src ) ) {
return;
}
var type, i, l,
oldData = jQuery._data( src ),
curData = jQuery._data( dest, oldData ),
events = oldData.events;
if ( events ) {
delete curData.handle;
curData.events = {};
for ( type in events ) {
for ( i = 0, l = events[ type ].length; i < l; i++ ) {
jQuery.event.add( dest, type, events[ type ][ i ] );
}
}
}
// make the cloned public data object a copy from the original
if ( curData.data ) {
curData.data = jQuery.extend( {}, curData.data );
}
}
function fixCloneNodeIssues( src, dest ) {
var nodeName, e, data;
// We do not need to do anything for non-Elements
if ( dest.nodeType !== 1 ) {
return;
}
nodeName = dest.nodeName.toLowerCase();
// IE6-8 copies events bound via attachEvent when using cloneNode.
if ( !support.noCloneEvent && dest[ jQuery.expando ] ) {
data = jQuery._data( dest );
for ( e in data.events ) {
jQuery.removeEvent( dest, e, data.handle );
}
// Event data gets referenced instead of copied if the expando gets copied too
dest.removeAttribute( jQuery.expando );
}
// IE blanks contents when cloning scripts, and tries to evaluate newly-set text
if ( nodeName === "script" && dest.text !== src.text ) {
disableScript( dest ).text = src.text;
restoreScript( dest );
// IE6-10 improperly clones children of object elements using classid.
// IE10 throws NoModificationAllowedError if parent is null, #12132.
} else if ( nodeName === "object" ) {
if ( dest.parentNode ) {
dest.outerHTML = src.outerHTML;
}
// This path appears unavoidable for IE9. When cloning an object
// element in IE9, the outerHTML strategy above is not sufficient.
// If the src has innerHTML and the destination does not,
// copy the src.innerHTML into the dest.innerHTML. #10324
if ( support.html5Clone && ( src.innerHTML && !jQuery.trim(dest.innerHTML) ) ) {
dest.innerHTML = src.innerHTML;
}
} else if ( nodeName === "input" && rcheckableType.test( src.type ) ) {
// IE6-8 fails to persist the checked state of a cloned checkbox
// or radio button. Worse, IE6-7 fail to give the cloned element
// a checked appearance if the defaultChecked value isn't also set
dest.defaultChecked = dest.checked = src.checked;
// IE6-7 get confused and end up setting the value of a cloned
// checkbox/radio button to an empty string instead of "on"
if ( dest.value !== src.value ) {
dest.value = src.value;
}
// IE6-8 fails to return the selected option to the default selected
// state when cloning options
} else if ( nodeName === "option" ) {
dest.defaultSelected = dest.selected = src.defaultSelected;
// IE6-8 fails to set the defaultValue to the correct value when
// cloning other types of input fields
} else if ( nodeName === "input" || nodeName === "textarea" ) {
dest.defaultValue = src.defaultValue;
}
}
jQuery.extend({
clone: function( elem, dataAndEvents, deepDataAndEvents ) {
var destElements, node, clone, i, srcElements,
inPage = jQuery.contains( elem.ownerDocument, elem );
if ( support.html5Clone || jQuery.isXMLDoc(elem) || !rnoshimcache.test( "<" + elem.nodeName + ">" ) ) {
clone = elem.cloneNode( true );
// IE<=8 does not properly clone detached, unknown element nodes
} else {
fragmentDiv.innerHTML = elem.outerHTML;
fragmentDiv.removeChild( clone = fragmentDiv.firstChild );
}
if ( (!support.noCloneEvent || !support.noCloneChecked) &&
(elem.nodeType === 1 || elem.nodeType === 11) && !jQuery.isXMLDoc(elem) ) {
// We eschew Sizzle here for performance reasons: http://jsperf.com/getall-vs-sizzle/2
destElements = getAll( clone );
srcElements = getAll( elem );
// Fix all IE cloning issues
for ( i = 0; (node = srcElements[i]) != null; ++i ) {
// Ensure that the destination node is not null; Fixes #9587
if ( destElements[i] ) {
fixCloneNodeIssues( node, destElements[i] );
}
}
}
// Copy the events from the original to the clone
if ( dataAndEvents ) {
if ( deepDataAndEvents ) {
srcElements = srcElements || getAll( elem );
destElements = destElements || getAll( clone );
for ( i = 0; (node = srcElements[i]) != null; i++ ) {
cloneCopyEvent( node, destElements[i] );
}
} else {
cloneCopyEvent( elem, clone );
}
}
// Preserve script evaluation history
destElements = getAll( clone, "script" );
if ( destElements.length > 0 ) {
setGlobalEval( destElements, !inPage && getAll( elem, "script" ) );
}
destElements = srcElements = node = null;
// Return the cloned set
return clone;
},
buildFragment: function( elems, context, scripts, selection ) {
var j, elem, contains,
tmp, tag, tbody, wrap,
l = elems.length,
// Ensure a safe fragment
safe = createSafeFragment( context ),
nodes = [],
i = 0;
for ( ; i < l; i++ ) {
elem = elems[ i ];
if ( elem || elem === 0 ) {
// Add nodes directly
if ( jQuery.type( elem ) === "object" ) {
jQuery.merge( nodes, elem.nodeType ? [ elem ] : elem );
// Convert non-html into a text node
} else if ( !rhtml.test( elem ) ) {
nodes.push( context.createTextNode( elem ) );
// Convert html into DOM nodes
} else {
tmp = tmp || safe.appendChild( context.createElement("div") );
// Deserialize a standard representation
tag = (rtagName.exec( elem ) || [ "", "" ])[ 1 ].toLowerCase();
wrap = wrapMap[ tag ] || wrapMap._default;
tmp.innerHTML = wrap[1] + elem.replace( rxhtmlTag, "<$1></$2>" ) + wrap[2];
// Descend through wrappers to the right content
j = wrap[0];
while ( j-- ) {
tmp = tmp.lastChild;
}
// Manually add leading whitespace removed by IE
if ( !support.leadingWhitespace && rleadingWhitespace.test( elem ) ) {
nodes.push( context.createTextNode( rleadingWhitespace.exec( elem )[0] ) );
}
// Remove IE's autoinserted <tbody> from table fragments
if ( !support.tbody ) {
// String was a <table>, *may* have spurious <tbody>
elem = tag === "table" && !rtbody.test( elem ) ?
tmp.firstChild :
// String was a bare <thead> or <tfoot>
wrap[1] === "<table>" && !rtbody.test( elem ) ?
tmp :
0;
j = elem && elem.childNodes.length;
while ( j-- ) {
if ( jQuery.nodeName( (tbody = elem.childNodes[j]), "tbody" ) && !tbody.childNodes.length ) {
elem.removeChild( tbody );
}
}
}
jQuery.merge( nodes, tmp.childNodes );
// Fix #12392 for WebKit and IE > 9
tmp.textContent = "";
// Fix #12392 for oldIE
while ( tmp.firstChild ) {
tmp.removeChild( tmp.firstChild );
}
// Remember the top-level container for proper cleanup
tmp = safe.lastChild;
}
}
}
// Fix #11356: Clear elements from fragment
if ( tmp ) {
safe.removeChild( tmp );
}
// Reset defaultChecked for any radios and checkboxes
// about to be appended to the DOM in IE 6/7 (#8060)
if ( !support.appendChecked ) {
jQuery.grep( getAll( nodes, "input" ), fixDefaultChecked );
}
i = 0;
while ( (elem = nodes[ i++ ]) ) {
// #4087 - If origin and destination elements are the same, and this is
// that element, do not do anything
if ( selection && jQuery.inArray( elem, selection ) !== -1 ) {
continue;
}
contains = jQuery.contains( elem.ownerDocument, elem );
// Append to fragment
tmp = getAll( safe.appendChild( elem ), "script" );
// Preserve script evaluation history
if ( contains ) {
setGlobalEval( tmp );
}
// Capture executables
if ( scripts ) {
j = 0;
while ( (elem = tmp[ j++ ]) ) {
if ( rscriptType.test( elem.type || "" ) ) {
scripts.push( elem );
}
}
}
}
tmp = null;
return safe;
},
cleanData: function( elems, /* internal */ acceptData ) {
var elem, type, id, data,
i = 0,
internalKey = jQuery.expando,
cache = jQuery.cache,
deleteExpando = support.deleteExpando,
special = jQuery.event.special;
for ( ; (elem = elems[i]) != null; i++ ) {
if ( acceptData || jQuery.acceptData( elem ) ) {
id = elem[ internalKey ];
data = id && cache[ id ];
if ( data ) {
if ( data.events ) {
for ( type in data.events ) {
if ( special[ type ] ) {
jQuery.event.remove( elem, type );
// This is a shortcut to avoid jQuery.event.remove's overhead
} else {
jQuery.removeEvent( elem, type, data.handle );
}
}
}
// Remove cache only if it was not already removed by jQuery.event.remove
if ( cache[ id ] ) {
delete cache[ id ];
// IE does not allow us to delete expando properties from nodes,
// nor does it have a removeAttribute function on Document nodes;
// we must handle all of these cases
if ( deleteExpando ) {
delete elem[ internalKey ];
} else if ( typeof elem.removeAttribute !== strundefined ) {
elem.removeAttribute( internalKey );
} else {
elem[ internalKey ] = null;
}
deletedIds.push( id );
}
}
}
}
}
});
jQuery.fn.extend({
text: function( value ) {
return access( this, function( value ) {
return value === undefined ?
jQuery.text( this ) :
this.empty().append( ( this[0] && this[0].ownerDocument || document ).createTextNode( value ) );
}, null, value, arguments.length );
},
append: function() {
return this.domManip( arguments, function( elem ) {
if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) {
var target = manipulationTarget( this, elem );
target.appendChild( elem );
}
});
},
prepend: function() {
return this.domManip( arguments, function( elem ) {
if ( this.nodeType === 1 || this.nodeType === 11 || this.nodeType === 9 ) {
var target = manipulationTarget( this, elem );
target.insertBefore( elem, target.firstChild );
}
});
},
before: function() {
return this.domManip( arguments, function( elem ) {
if ( this.parentNode ) {
this.parentNode.insertBefore( elem, this );
}
});
},
after: function() {
return this.domManip( arguments, function( elem ) {
if ( this.parentNode ) {
this.parentNode.insertBefore( elem, this.nextSibling );
}
});
},
remove: function( selector, keepData /* Internal Use Only */ ) {
var elem,
elems = selector ? jQuery.filter( selector, this ) : this,
i = 0;
for ( ; (elem = elems[i]) != null; i++ ) {
if ( !keepData && elem.nodeType === 1 ) {
jQuery.cleanData( getAll( elem ) );
}
if ( elem.parentNode ) {
if ( keepData && jQuery.contains( elem.ownerDocument, elem ) ) {
setGlobalEval( getAll( elem, "script" ) );
}
elem.parentNode.removeChild( elem );
}
}
return this;
},
empty: function() {
var elem,
i = 0;
for ( ; (elem = this[i]) != null; i++ ) {
// Remove element nodes and prevent memory leaks
if ( elem.nodeType === 1 ) {
jQuery.cleanData( getAll( elem, false ) );
}
// Remove any remaining nodes
while ( elem.firstChild ) {
elem.removeChild( elem.firstChild );
}
// If this is a select, ensure that it displays empty (#12336)
// Support: IE<9
if ( elem.options && jQuery.nodeName( elem, "select" ) ) {
elem.options.length = 0;
}
}
return this;
},
clone: function( dataAndEvents, deepDataAndEvents ) {
dataAndEvents = dataAndEvents == null ? false : dataAndEvents;
deepDataAndEvents = deepDataAndEvents == null ? dataAndEvents : deepDataAndEvents;
return this.map(function() {
return jQuery.clone( this, dataAndEvents, deepDataAndEvents );
});
},
html: function( value ) {
return access( this, function( value ) {
var elem = this[ 0 ] || {},
i = 0,
l = this.length;
if ( value === undefined ) {
return elem.nodeType === 1 ?
elem.innerHTML.replace( rinlinejQuery, "" ) :
undefined;
}
// See if we can take a shortcut and just use innerHTML
if ( typeof value === "string" && !rnoInnerhtml.test( value ) &&
( support.htmlSerialize || !rnoshimcache.test( value ) ) &&
( support.leadingWhitespace || !rleadingWhitespace.test( value ) ) &&
!wrapMap[ (rtagName.exec( value ) || [ "", "" ])[ 1 ].toLowerCase() ] ) {
value = value.replace( rxhtmlTag, "<$1></$2>" );
try {
for (; i < l; i++ ) {
// Remove element nodes and prevent memory leaks
elem = this[i] || {};
if ( elem.nodeType === 1 ) {
jQuery.cleanData( getAll( elem, false ) );
elem.innerHTML = value;
}
}
elem = 0;
// If using innerHTML throws an exception, use the fallback method
} catch(e) {}
}
if ( elem ) {
this.empty().append( value );
}
}, null, value, arguments.length );
},
replaceWith: function() {
var arg = arguments[ 0 ];
// Make the changes, replacing each context element with the new content
this.domManip( arguments, function( elem ) {
arg = this.parentNode;
jQuery.cleanData( getAll( this ) );
if ( arg ) {
arg.replaceChild( elem, this );
}
});
// Force removal if there was no new content (e.g., from empty arguments)
return arg && (arg.length || arg.nodeType) ? this : this.remove();
},
detach: function( selector ) {
return this.remove( selector, true );
},
domManip: function( args, callback ) {
// Flatten any nested arrays
args = concat.apply( [], args );
var first, node, hasScripts,
scripts, doc, fragment,
i = 0,
l = this.length,
set = this,
iNoClone = l - 1,
value = args[0],
isFunction = jQuery.isFunction( value );
// We can't cloneNode fragments that contain checked, in WebKit
if ( isFunction ||
( l > 1 && typeof value === "string" &&
!support.checkClone && rchecked.test( value ) ) ) {
return this.each(function( index ) {
var self = set.eq( index );
if ( isFunction ) {
args[0] = value.call( this, index, self.html() );
}
self.domManip( args, callback );
});
}
if ( l ) {
fragment = jQuery.buildFragment( args, this[ 0 ].ownerDocument, false, this );
first = fragment.firstChild;
if ( fragment.childNodes.length === 1 ) {
fragment = first;
}
if ( first ) {
scripts = jQuery.map( getAll( fragment, "script" ), disableScript );
hasScripts = scripts.length;
// Use the original fragment for the last item instead of the first because it can end up
// being emptied incorrectly in certain situations (#8070).
for ( ; i < l; i++ ) {
node = fragment;
if ( i !== iNoClone ) {
node = jQuery.clone( node, true, true );
// Keep references to cloned scripts for later restoration
if ( hasScripts ) {
jQuery.merge( scripts, getAll( node, "script" ) );
}
}
callback.call( this[i], node, i );
}
if ( hasScripts ) {
doc = scripts[ scripts.length - 1 ].ownerDocument;
// Reenable scripts
jQuery.map( scripts, restoreScript );
// Evaluate executable scripts on first document insertion
for ( i = 0; i < hasScripts; i++ ) {
node = scripts[ i ];
if ( rscriptType.test( node.type || "" ) &&
!jQuery._data( node, "globalEval" ) && jQuery.contains( doc, node ) ) {
if ( node.src ) {
// Optional AJAX dependency, but won't run scripts if not present
if ( jQuery._evalUrl ) {
jQuery._evalUrl( node.src );
}
} else {
jQuery.globalEval( ( node.text || node.textContent || node.innerHTML || "" ).replace( rcleanScript, "" ) );
}
}
}
}
// Fix #11809: Avoid leaking memory
fragment = first = null;
}
}
return this;
}
});
jQuery.each({
appendTo: "append",
prependTo: "prepend",
insertBefore: "before",
insertAfter: "after",
replaceAll: "replaceWith"
}, function( name, original ) {
jQuery.fn[ name ] = function( selector ) {
var elems,
i = 0,
ret = [],
insert = jQuery( selector ),
last = insert.length - 1;
for ( ; i <= last; i++ ) {
elems = i === last ? this : this.clone(true);
jQuery( insert[i] )[ original ]( elems );
// Modern browsers can apply jQuery collections as arrays, but oldIE needs a .get()
push.apply( ret, elems.get() );
}
return this.pushStack( ret );
};
});
var iframe,
elemdisplay = {};
/**
* Retrieve the actual display of a element
* @param {String} name nodeName of the element
* @param {Object} doc Document object
*/
// Called only from within defaultDisplay
function actualDisplay( name, doc ) {
var elem = jQuery( doc.createElement( name ) ).appendTo( doc.body ),
// getDefaultComputedStyle might be reliably used only on attached element
display = window.getDefaultComputedStyle ?
// Use of this method is a temporary fix (more like optmization) until something better comes along,
// since it was removed from specification and supported only in FF
window.getDefaultComputedStyle( elem[ 0 ] ).display : jQuery.css( elem[ 0 ], "display" );
// We don't have any data stored on the element,
// so use "detach" method as fast way to get rid of the element
elem.detach();
return display;
}
/**
* Try to determine the default display value of an element
* @param {String} nodeName
*/
function defaultDisplay( nodeName ) {
var doc = document,
display = elemdisplay[ nodeName ];
if ( !display ) {
display = actualDisplay( nodeName, doc );
// If the simple way fails, read from inside an iframe
if ( display === "none" || !display ) {
// Use the already-created iframe if possible
iframe = (iframe || jQuery( "<iframe frameborder='0' width='0' height='0'/>" )).appendTo( doc.documentElement );
// Always write a new HTML skeleton so Webkit and Firefox don't choke on reuse
doc = ( iframe[ 0 ].contentWindow || iframe[ 0 ].contentDocument ).document;
// Support: IE
doc.write();
doc.close();
display = actualDisplay( nodeName, doc );
iframe.detach();
}
// Store the correct default display
elemdisplay[ nodeName ] = display;
}
return display;
}
var rmargin = (/^margin/);
var rnumnonpx = new RegExp( "^(" + pnum + ")(?!px)[a-z%]+$", "i" );
var getStyles, curCSS,
rposition = /^(top|right|bottom|left)$/;
if ( window.getComputedStyle ) {
getStyles = function( elem ) {
return elem.ownerDocument.defaultView.getComputedStyle( elem, null );
};
curCSS = function( elem, name, computed ) {
var width, minWidth, maxWidth, ret,
style = elem.style;
computed = computed || getStyles( elem );
// getPropertyValue is only needed for .css('filter') in IE9, see #12537
ret = computed ? computed.getPropertyValue( name ) || computed[ name ] : undefined;
if ( computed ) {
if ( ret === "" && !jQuery.contains( elem.ownerDocument, elem ) ) {
ret = jQuery.style( elem, name );
}
// A tribute to the "awesome hack by Dean Edwards"
// Chrome < 17 and Safari 5.0 uses "computed value" instead of "used value" for margin-right
// Safari 5.1.7 (at least) returns percentage for a larger set of values, but width seems to be reliably pixels
// this is against the CSSOM draft spec: http://dev.w3.org/csswg/cssom/#resolved-values
if ( rnumnonpx.test( ret ) && rmargin.test( name ) ) {
// Remember the original values
width = style.width;
minWidth = style.minWidth;
maxWidth = style.maxWidth;
// Put in the new values to get a computed value out
style.minWidth = style.maxWidth = style.width = ret;
ret = computed.width;
// Revert the changed values
style.width = width;
style.minWidth = minWidth;
style.maxWidth = maxWidth;
}
}
// Support: IE
// IE returns zIndex value as an integer.
return ret === undefined ?
ret :
ret + "";
};
} else if ( document.documentElement.currentStyle ) {
getStyles = function( elem ) {
return elem.currentStyle;
};
curCSS = function( elem, name, computed ) {
var left, rs, rsLeft, ret,
style = elem.style;
computed = computed || getStyles( elem );
ret = computed ? computed[ name ] : undefined;
// Avoid setting ret to empty string here
// so we don't default to auto
if ( ret == null && style && style[ name ] ) {
ret = style[ name ];
}
// From the awesome hack by Dean Edwards
// http://erik.eae.net/archives/2007/07/27/18.54.15/#comment-102291
// If we're not dealing with a regular pixel number
// but a number that has a weird ending, we need to convert it to pixels
// but not position css attributes, as those are proportional to the parent element instead
// and we can't measure the parent instead because it might trigger a "stacking dolls" problem
if ( rnumnonpx.test( ret ) && !rposition.test( name ) ) {
// Remember the original values
left = style.left;
rs = elem.runtimeStyle;
rsLeft = rs && rs.left;
// Put in the new values to get a computed value out
if ( rsLeft ) {
rs.left = elem.currentStyle.left;
}
style.left = name === "fontSize" ? "1em" : ret;
ret = style.pixelLeft + "px";
// Revert the changed values
style.left = left;
if ( rsLeft ) {
rs.left = rsLeft;
}
}
// Support: IE
// IE returns zIndex value as an integer.
return ret === undefined ?
ret :
ret + "" || "auto";
};
}
function addGetHookIf( conditionFn, hookFn ) {
// Define the hook, we'll check on the first run if it's really needed.
return {
get: function() {
var condition = conditionFn();
if ( condition == null ) {
// The test was not ready at this point; screw the hook this time
// but check again when needed next time.
return;
}
if ( condition ) {
// Hook not needed (or it's not possible to use it due to missing dependency),
// remove it.
// Since there are no other hooks for marginRight, remove the whole object.
delete this.get;
return;
}
// Hook needed; redefine it so that the support test is not executed again.
return (this.get = hookFn).apply( this, arguments );
}
};
}
(function() {
var a, reliableHiddenOffsetsVal, boxSizingVal, boxSizingReliableVal,
pixelPositionVal, reliableMarginRightVal,
div = document.createElement( "div" ),
containerStyles = "border:0;width:0;height:0;position:absolute;top:0;left:-9999px",
divReset =
"-webkit-box-sizing:content-box;-moz-box-sizing:content-box;box-sizing:content-box;" +
"display:block;padding:0;margin:0;border:0";
// Setup
div.innerHTML = " <link/><table></table><a href='/a'>a</a><input type='checkbox'/>";
a = div.getElementsByTagName( "a" )[ 0 ];
a.style.cssText = "float:left;opacity:.5";
// Make sure that element opacity exists
// (IE uses filter instead)
// Use a regex to work around a WebKit issue. See #5145
support.opacity = /^0.5/.test( a.style.opacity );
// Verify style float existence
// (IE uses styleFloat instead of cssFloat)
support.cssFloat = !!a.style.cssFloat;
div.style.backgroundClip = "content-box";
div.cloneNode( true ).style.backgroundClip = "";
support.clearCloneStyle = div.style.backgroundClip === "content-box";
// Null elements to avoid leaks in IE.
a = div = null;
jQuery.extend(support, {
reliableHiddenOffsets: function() {
if ( reliableHiddenOffsetsVal != null ) {
return reliableHiddenOffsetsVal;
}
var container, tds, isSupported,
div = document.createElement( "div" ),
body = document.getElementsByTagName( "body" )[ 0 ];
if ( !body ) {
// Return for frameset docs that don't have a body
return;
}
// Setup
div.setAttribute( "className", "t" );
div.innerHTML = " <link/><table></table><a href='/a'>a</a><input type='checkbox'/>";
container = document.createElement( "div" );
container.style.cssText = containerStyles;
body.appendChild( container ).appendChild( div );
// Support: IE8
// Check if table cells still have offsetWidth/Height when they are set
// to display:none and there are still other visible table cells in a
// table row; if so, offsetWidth/Height are not reliable for use when
// determining if an element has been hidden directly using
// display:none (it is still safe to use offsets if a parent element is
// hidden; don safety goggles and see bug #4512 for more information).
div.innerHTML = "<table><tr><td></td><td>t</td></tr></table>";
tds = div.getElementsByTagName( "td" );
tds[ 0 ].style.cssText = "padding:0;margin:0;border:0;display:none";
isSupported = ( tds[ 0 ].offsetHeight === 0 );
tds[ 0 ].style.display = "";
tds[ 1 ].style.display = "none";
// Support: IE8
// Check if empty table cells still have offsetWidth/Height
reliableHiddenOffsetsVal = isSupported && ( tds[ 0 ].offsetHeight === 0 );
body.removeChild( container );
// Null elements to avoid leaks in IE.
div = body = null;
return reliableHiddenOffsetsVal;
},
boxSizing: function() {
if ( boxSizingVal == null ) {
computeStyleTests();
}
return boxSizingVal;
},
boxSizingReliable: function() {
if ( boxSizingReliableVal == null ) {
computeStyleTests();
}
return boxSizingReliableVal;
},
pixelPosition: function() {
if ( pixelPositionVal == null ) {
computeStyleTests();
}
return pixelPositionVal;
},
reliableMarginRight: function() {
var body, container, div, marginDiv;
// Use window.getComputedStyle because jsdom on node.js will break without it.
if ( reliableMarginRightVal == null && window.getComputedStyle ) {
body = document.getElementsByTagName( "body" )[ 0 ];
if ( !body ) {
// Test fired too early or in an unsupported environment, exit.
return;
}
container = document.createElement( "div" );
div = document.createElement( "div" );
container.style.cssText = containerStyles;
body.appendChild( container ).appendChild( div );
// Check if div with explicit width and no margin-right incorrectly
// gets computed margin-right based on width of container. (#3333)
// Fails in WebKit before Feb 2011 nightlies
// WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right
marginDiv = div.appendChild( document.createElement( "div" ) );
marginDiv.style.cssText = div.style.cssText = divReset;
marginDiv.style.marginRight = marginDiv.style.width = "0";
div.style.width = "1px";
reliableMarginRightVal =
!parseFloat( ( window.getComputedStyle( marginDiv, null ) || {} ).marginRight );
body.removeChild( container );
}
return reliableMarginRightVal;
}
});
function computeStyleTests() {
var container, div,
body = document.getElementsByTagName( "body" )[ 0 ];
if ( !body ) {
// Test fired too early or in an unsupported environment, exit.
return;
}
container = document.createElement( "div" );
div = document.createElement( "div" );
container.style.cssText = containerStyles;
body.appendChild( container ).appendChild( div );
div.style.cssText =
"-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box;" +
"position:absolute;display:block;padding:1px;border:1px;width:4px;" +
"margin-top:1%;top:1%";
// Workaround failing boxSizing test due to offsetWidth returning wrong value
// with some non-1 values of body zoom, ticket #13543
jQuery.swap( body, body.style.zoom != null ? { zoom: 1 } : {}, function() {
boxSizingVal = div.offsetWidth === 4;
});
// Will be changed later if needed.
boxSizingReliableVal = true;
pixelPositionVal = false;
reliableMarginRightVal = true;
// Use window.getComputedStyle because jsdom on node.js will break without it.
if ( window.getComputedStyle ) {
pixelPositionVal = ( window.getComputedStyle( div, null ) || {} ).top !== "1%";
boxSizingReliableVal =
( window.getComputedStyle( div, null ) || { width: "4px" } ).width === "4px";
}
body.removeChild( container );
// Null elements to avoid leaks in IE.
div = body = null;
}
})();
// A method for quickly swapping in/out CSS properties to get correct calculations.
jQuery.swap = function( elem, options, callback, args ) {
var ret, name,
old = {};
// Remember the old values, and insert the new ones
for ( name in options ) {
old[ name ] = elem.style[ name ];
elem.style[ name ] = options[ name ];
}
ret = callback.apply( elem, args || [] );
// Revert the old values
for ( name in options ) {
elem.style[ name ] = old[ name ];
}
return ret;
};
var
ralpha = /alpha\([^)]*\)/i,
ropacity = /opacity\s*=\s*([^)]*)/,
// swappable if display is none or starts with table except "table", "table-cell", or "table-caption"
// see here for display values: https://developer.mozilla.org/en-US/docs/CSS/display
rdisplayswap = /^(none|table(?!-c[ea]).+)/,
rnumsplit = new RegExp( "^(" + pnum + ")(.*)$", "i" ),
rrelNum = new RegExp( "^([+-])=(" + pnum + ")", "i" ),
cssShow = { position: "absolute", visibility: "hidden", display: "block" },
cssNormalTransform = {
letterSpacing: 0,
fontWeight: 400
},
cssPrefixes = [ "Webkit", "O", "Moz", "ms" ];
// return a css property mapped to a potentially vendor prefixed property
function vendorPropName( style, name ) {
// shortcut for names that are not vendor prefixed
if ( name in style ) {
return name;
}
// check for vendor prefixed names
var capName = name.charAt(0).toUpperCase() + name.slice(1),
origName = name,
i = cssPrefixes.length;
while ( i-- ) {
name = cssPrefixes[ i ] + capName;
if ( name in style ) {
return name;
}
}
return origName;
}
function showHide( elements, show ) {
var display, elem, hidden,
values = [],
index = 0,
length = elements.length;
for ( ; index < length; index++ ) {
elem = elements[ index ];
if ( !elem.style ) {
continue;
}
values[ index ] = jQuery._data( elem, "olddisplay" );
display = elem.style.display;
if ( show ) {
// Reset the inline display of this element to learn if it is
// being hidden by cascaded rules or not
if ( !values[ index ] && display === "none" ) {
elem.style.display = "";
}
// Set elements which have been overridden with display: none
// in a stylesheet to whatever the default browser style is
// for such an element
if ( elem.style.display === "" && isHidden( elem ) ) {
values[ index ] = jQuery._data( elem, "olddisplay", defaultDisplay(elem.nodeName) );
}
} else {
if ( !values[ index ] ) {
hidden = isHidden( elem );
if ( display && display !== "none" || !hidden ) {
jQuery._data( elem, "olddisplay", hidden ? display : jQuery.css( elem, "display" ) );
}
}
}
}
// Set the display of most of the elements in a second loop
// to avoid the constant reflow
for ( index = 0; index < length; index++ ) {
elem = elements[ index ];
if ( !elem.style ) {
continue;
}
if ( !show || elem.style.display === "none" || elem.style.display === "" ) {
elem.style.display = show ? values[ index ] || "" : "none";
}
}
return elements;
}
function setPositiveNumber( elem, value, subtract ) {
var matches = rnumsplit.exec( value );
return matches ?
// Guard against undefined "subtract", e.g., when used as in cssHooks
Math.max( 0, matches[ 1 ] - ( subtract || 0 ) ) + ( matches[ 2 ] || "px" ) :
value;
}
function augmentWidthOrHeight( elem, name, extra, isBorderBox, styles ) {
var i = extra === ( isBorderBox ? "border" : "content" ) ?
// If we already have the right measurement, avoid augmentation
4 :
// Otherwise initialize for horizontal or vertical properties
name === "width" ? 1 : 0,
val = 0;
for ( ; i < 4; i += 2 ) {
// both box models exclude margin, so add it if we want it
if ( extra === "margin" ) {
val += jQuery.css( elem, extra + cssExpand[ i ], true, styles );
}
if ( isBorderBox ) {
// border-box includes padding, so remove it if we want content
if ( extra === "content" ) {
val -= jQuery.css( elem, "padding" + cssExpand[ i ], true, styles );
}
// at this point, extra isn't border nor margin, so remove border
if ( extra !== "margin" ) {
val -= jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles );
}
} else {
// at this point, extra isn't content, so add padding
val += jQuery.css( elem, "padding" + cssExpand[ i ], true, styles );
// at this point, extra isn't content nor padding, so add border
if ( extra !== "padding" ) {
val += jQuery.css( elem, "border" + cssExpand[ i ] + "Width", true, styles );
}
}
}
return val;
}
function getWidthOrHeight( elem, name, extra ) {
// Start with offset property, which is equivalent to the border-box value
var valueIsBorderBox = true,
val = name === "width" ? elem.offsetWidth : elem.offsetHeight,
styles = getStyles( elem ),
isBorderBox = support.boxSizing() && jQuery.css( elem, "boxSizing", false, styles ) === "border-box";
// some non-html elements return undefined for offsetWidth, so check for null/undefined
// svg - https://bugzilla.mozilla.org/show_bug.cgi?id=649285
// MathML - https://bugzilla.mozilla.org/show_bug.cgi?id=491668
if ( val <= 0 || val == null ) {
// Fall back to computed then uncomputed css if necessary
val = curCSS( elem, name, styles );
if ( val < 0 || val == null ) {
val = elem.style[ name ];
}
// Computed unit is not pixels. Stop here and return.
if ( rnumnonpx.test(val) ) {
return val;
}
// we need the check for style in case a browser which returns unreliable values
// for getComputedStyle silently falls back to the reliable elem.style
valueIsBorderBox = isBorderBox && ( support.boxSizingReliable() || val === elem.style[ name ] );
// Normalize "", auto, and prepare for extra
val = parseFloat( val ) || 0;
}
// use the active box-sizing model to add/subtract irrelevant styles
return ( val +
augmentWidthOrHeight(
elem,
name,
extra || ( isBorderBox ? "border" : "content" ),
valueIsBorderBox,
styles
)
) + "px";
}
jQuery.extend({
// Add in style property hooks for overriding the default
// behavior of getting and setting a style property
cssHooks: {
opacity: {
get: function( elem, computed ) {
if ( computed ) {
// We should always get a number back from opacity
var ret = curCSS( elem, "opacity" );
return ret === "" ? "1" : ret;
}
}
}
},
// Don't automatically add "px" to these possibly-unitless properties
cssNumber: {
"columnCount": true,
"fillOpacity": true,
"fontWeight": true,
"lineHeight": true,
"opacity": true,
"order": true,
"orphans": true,
"widows": true,
"zIndex": true,
"zoom": true
},
// Add in properties whose names you wish to fix before
// setting or getting the value
cssProps: {
// normalize float css property
"float": support.cssFloat ? "cssFloat" : "styleFloat"
},
// Get and set the style property on a DOM Node
style: function( elem, name, value, extra ) {
// Don't set styles on text and comment nodes
if ( !elem || elem.nodeType === 3 || elem.nodeType === 8 || !elem.style ) {
return;
}
// Make sure that we're working with the right name
var ret, type, hooks,
origName = jQuery.camelCase( name ),
style = elem.style;
name = jQuery.cssProps[ origName ] || ( jQuery.cssProps[ origName ] = vendorPropName( style, origName ) );
// gets hook for the prefixed version
// followed by the unprefixed version
hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ];
// Check if we're setting a value
if ( value !== undefined ) {
type = typeof value;
// convert relative number strings (+= or -=) to relative numbers. #7345
if ( type === "string" && (ret = rrelNum.exec( value )) ) {
value = ( ret[1] + 1 ) * ret[2] + parseFloat( jQuery.css( elem, name ) );
// Fixes bug #9237
type = "number";
}
// Make sure that null and NaN values aren't set. See: #7116
if ( value == null || value !== value ) {
return;
}
// If a number was passed in, add 'px' to the (except for certain CSS properties)
if ( type === "number" && !jQuery.cssNumber[ origName ] ) {
value += "px";
}
// Fixes #8908, it can be done more correctly by specifing setters in cssHooks,
// but it would mean to define eight (for every problematic property) identical functions
if ( !support.clearCloneStyle && value === "" && name.indexOf("background") === 0 ) {
style[ name ] = "inherit";
}
// If a hook was provided, use that value, otherwise just set the specified value
if ( !hooks || !("set" in hooks) || (value = hooks.set( elem, value, extra )) !== undefined ) {
// Support: IE
// Swallow errors from 'invalid' CSS values (#5509)
try {
// Support: Chrome, Safari
// Setting style to blank string required to delete "style: x !important;"
style[ name ] = "";
style[ name ] = value;
} catch(e) {}
}
} else {
// If a hook was provided get the non-computed value from there
if ( hooks && "get" in hooks && (ret = hooks.get( elem, false, extra )) !== undefined ) {
return ret;
}
// Otherwise just get the value from the style object
return style[ name ];
}
},
css: function( elem, name, extra, styles ) {
var num, val, hooks,
origName = jQuery.camelCase( name );
// Make sure that we're working with the right name
name = jQuery.cssProps[ origName ] || ( jQuery.cssProps[ origName ] = vendorPropName( elem.style, origName ) );
// gets hook for the prefixed version
// followed by the unprefixed version
hooks = jQuery.cssHooks[ name ] || jQuery.cssHooks[ origName ];
// If a hook was provided get the computed value from there
if ( hooks && "get" in hooks ) {
val = hooks.get( elem, true, extra );
}
// Otherwise, if a way to get the computed value exists, use that
if ( val === undefined ) {
val = curCSS( elem, name, styles );
}
//convert "normal" to computed value
if ( val === "normal" && name in cssNormalTransform ) {
val = cssNormalTransform[ name ];
}
// Return, converting to number if forced or a qualifier was provided and val looks numeric
if ( extra === "" || extra ) {
num = parseFloat( val );
return extra === true || jQuery.isNumeric( num ) ? num || 0 : val;
}
return val;
}
});
jQuery.each([ "height", "width" ], function( i, name ) {
jQuery.cssHooks[ name ] = {
get: function( elem, computed, extra ) {
if ( computed ) {
// certain elements can have dimension info if we invisibly show them
// however, it must have a current display style that would benefit from this
return elem.offsetWidth === 0 && rdisplayswap.test( jQuery.css( elem, "display" ) ) ?
jQuery.swap( elem, cssShow, function() {
return getWidthOrHeight( elem, name, extra );
}) :
getWidthOrHeight( elem, name, extra );
}
},
set: function( elem, value, extra ) {
var styles = extra && getStyles( elem );
return setPositiveNumber( elem, value, extra ?
augmentWidthOrHeight(
elem,
name,
extra,
support.boxSizing() && jQuery.css( elem, "boxSizing", false, styles ) === "border-box",
styles
) : 0
);
}
};
});
if ( !support.opacity ) {
jQuery.cssHooks.opacity = {
get: function( elem, computed ) {
// IE uses filters for opacity
return ropacity.test( (computed && elem.currentStyle ? elem.currentStyle.filter : elem.style.filter) || "" ) ?
( 0.01 * parseFloat( RegExp.$1 ) ) + "" :
computed ? "1" : "";
},
set: function( elem, value ) {
var style = elem.style,
currentStyle = elem.currentStyle,
opacity = jQuery.isNumeric( value ) ? "alpha(opacity=" + value * 100 + ")" : "",
filter = currentStyle && currentStyle.filter || style.filter || "";
// IE has trouble with opacity if it does not have layout
// Force it by setting the zoom level
style.zoom = 1;
// if setting opacity to 1, and no other filters exist - attempt to remove filter attribute #6652
// if value === "", then remove inline opacity #12685
if ( ( value >= 1 || value === "" ) &&
jQuery.trim( filter.replace( ralpha, "" ) ) === "" &&
style.removeAttribute ) {
// Setting style.filter to null, "" & " " still leave "filter:" in the cssText
// if "filter:" is present at all, clearType is disabled, we want to avoid this
// style.removeAttribute is IE Only, but so apparently is this code path...
style.removeAttribute( "filter" );
// if there is no filter style applied in a css rule or unset inline opacity, we are done
if ( value === "" || currentStyle && !currentStyle.filter ) {
return;
}
}
// otherwise, set new filter values
style.filter = ralpha.test( filter ) ?
filter.replace( ralpha, opacity ) :
filter + " " + opacity;
}
};
}
jQuery.cssHooks.marginRight = addGetHookIf( support.reliableMarginRight,
function( elem, computed ) {
if ( computed ) {
// WebKit Bug 13343 - getComputedStyle returns wrong value for margin-right
// Work around by temporarily setting element display to inline-block
return jQuery.swap( elem, { "display": "inline-block" },
curCSS, [ elem, "marginRight" ] );
}
}
);
// These hooks are used by animate to expand properties
jQuery.each({
margin: "",
padding: "",
border: "Width"
}, function( prefix, suffix ) {
jQuery.cssHooks[ prefix + suffix ] = {
expand: function( value ) {
var i = 0,
expanded = {},
// assumes a single number if not a string
parts = typeof value === "string" ? value.split(" ") : [ value ];
for ( ; i < 4; i++ ) {
expanded[ prefix + cssExpand[ i ] + suffix ] =
parts[ i ] || parts[ i - 2 ] || parts[ 0 ];
}
return expanded;
}
};
if ( !rmargin.test( prefix ) ) {
jQuery.cssHooks[ prefix + suffix ].set = setPositiveNumber;
}
});
jQuery.fn.extend({
css: function( name, value ) {
return access( this, function( elem, name, value ) {
var styles, len,
map = {},
i = 0;
if ( jQuery.isArray( name ) ) {
styles = getStyles( elem );
len = name.length;
for ( ; i < len; i++ ) {
map[ name[ i ] ] = jQuery.css( elem, name[ i ], false, styles );
}
return map;
}
return value !== undefined ?
jQuery.style( elem, name, value ) :
jQuery.css( elem, name );
}, name, value, arguments.length > 1 );
},
show: function() {
return showHide( this, true );
},
hide: function() {
return showHide( this );
},
toggle: function( state ) {
if ( typeof state === "boolean" ) {
return state ? this.show() : this.hide();
}
return this.each(function() {
if ( isHidden( this ) ) {
jQuery( this ).show();
} else {
jQuery( this ).hide();
}
});
}
});
// Based off of the plugin by Clint Helfers, with permission.
// http://blindsignals.com/index.php/2009/07/jquery-delay/
jQuery.fn.delay = function( time, type ) {
time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time;
type = type || "fx";
return this.queue( type, function( next, hooks ) {
var timeout = setTimeout( next, time );
hooks.stop = function() {
clearTimeout( timeout );
};
});
};
(function() {
var a, input, select, opt,
div = document.createElement("div" );
// Setup
div.setAttribute( "className", "t" );
div.innerHTML = " <link/><table></table><a href='/a'>a</a><input type='checkbox'/>";
a = div.getElementsByTagName("a")[ 0 ];
// First batch of tests.
select = document.createElement("select");
opt = select.appendChild( document.createElement("option") );
input = div.getElementsByTagName("input")[ 0 ];
a.style.cssText = "top:1px";
// Test setAttribute on camelCase class. If it works, we need attrFixes when doing get/setAttribute (ie6/7)
support.getSetAttribute = div.className !== "t";
// Get the style information from getAttribute
// (IE uses .cssText instead)
support.style = /top/.test( a.getAttribute("style") );
// Make sure that URLs aren't manipulated
// (IE normalizes it by default)
support.hrefNormalized = a.getAttribute("href") === "/a";
// Check the default checkbox/radio value ("" on WebKit; "on" elsewhere)
support.checkOn = !!input.value;
// Make sure that a selected-by-default option has a working selected property.
// (WebKit defaults to false instead of true, IE too, if it's in an optgroup)
support.optSelected = opt.selected;
// Tests for enctype support on a form (#6743)
support.enctype = !!document.createElement("form").enctype;
// Make sure that the options inside disabled selects aren't marked as disabled
// (WebKit marks them as disabled)
select.disabled = true;
support.optDisabled = !opt.disabled;
// Support: IE8 only
// Check if we can trust getAttribute("value")
input = document.createElement( "input" );
input.setAttribute( "value", "" );
support.input = input.getAttribute( "value" ) === "";
// Check if an input maintains its value after becoming a radio
input.value = "t";
input.setAttribute( "type", "radio" );
support.radioValue = input.value === "t";
// Null elements to avoid leaks in IE.
a = input = select = opt = div = null;
})();
var rreturn = /\r/g;
jQuery.fn.extend({
val: function( value ) {
var hooks, ret, isFunction,
elem = this[0];
if ( !arguments.length ) {
if ( elem ) {
hooks = jQuery.valHooks[ elem.type ] || jQuery.valHooks[ elem.nodeName.toLowerCase() ];
if ( hooks && "get" in hooks && (ret = hooks.get( elem, "value" )) !== undefined ) {
return ret;
}
ret = elem.value;
return typeof ret === "string" ?
// handle most common string cases
ret.replace(rreturn, "") :
// handle cases where value is null/undef or number
ret == null ? "" : ret;
}
return;
}
isFunction = jQuery.isFunction( value );
return this.each(function( i ) {
var val;
if ( this.nodeType !== 1 ) {
return;
}
if ( isFunction ) {
val = value.call( this, i, jQuery( this ).val() );
} else {
val = value;
}
// Treat null/undefined as ""; convert numbers to string
if ( val == null ) {
val = "";
} else if ( typeof val === "number" ) {
val += "";
} else if ( jQuery.isArray( val ) ) {
val = jQuery.map( val, function( value ) {
return value == null ? "" : value + "";
});
}
hooks = jQuery.valHooks[ this.type ] || jQuery.valHooks[ this.nodeName.toLowerCase() ];
// If set returns undefined, fall back to normal setting
if ( !hooks || !("set" in hooks) || hooks.set( this, val, "value" ) === undefined ) {
this.value = val;
}
});
}
});
jQuery.extend({
valHooks: {
option: {
get: function( elem ) {
var val = jQuery.find.attr( elem, "value" );
return val != null ?
val :
jQuery.text( elem );
}
},
select: {
get: function( elem ) {
var value, option,
options = elem.options,
index = elem.selectedIndex,
one = elem.type === "select-one" || index < 0,
values = one ? null : [],
max = one ? index + 1 : options.length,
i = index < 0 ?
max :
one ? index : 0;
// Loop through all the selected options
for ( ; i < max; i++ ) {
option = options[ i ];
// oldIE doesn't update selected after form reset (#2551)
if ( ( option.selected || i === index ) &&
// Don't return options that are disabled or in a disabled optgroup
( support.optDisabled ? !option.disabled : option.getAttribute("disabled") === null ) &&
( !option.parentNode.disabled || !jQuery.nodeName( option.parentNode, "optgroup" ) ) ) {
// Get the specific value for the option
value = jQuery( option ).val();
// We don't need an array for one selects
if ( one ) {
return value;
}
// Multi-Selects return an array
values.push( value );
}
}
return values;
},
set: function( elem, value ) {
var optionSet, option,
options = elem.options,
values = jQuery.makeArray( value ),
i = options.length;
while ( i-- ) {
option = options[ i ];
if ( jQuery.inArray( jQuery.valHooks.option.get( option ), values ) >= 0 ) {
// Support: IE6
// When new option element is added to select box we need to
// force reflow of newly added node in order to workaround delay
// of initialization properties
try {
option.selected = optionSet = true;
} catch ( _ ) {
// Will be executed only in IE6
option.scrollHeight;
}
} else {
option.selected = false;
}
}
// Force browsers to behave consistently when non-matching value is set
if ( !optionSet ) {
elem.selectedIndex = -1;
}
return options;
}
}
}
});
// Radios and checkboxes getter/setter
jQuery.each([ "radio", "checkbox" ], function() {
jQuery.valHooks[ this ] = {
set: function( elem, value ) {
if ( jQuery.isArray( value ) ) {
return ( elem.checked = jQuery.inArray( jQuery(elem).val(), value ) >= 0 );
}
}
};
if ( !support.checkOn ) {
jQuery.valHooks[ this ].get = function( elem ) {
// Support: Webkit
// "" is returned instead of "on" if a value isn't specified
return elem.getAttribute("value") === null ? "on" : elem.value;
};
}
});
var nodeHook, boolHook,
attrHandle = jQuery.expr.attrHandle,
ruseDefault = /^(?:checked|selected)$/i,
getSetAttribute = support.getSetAttribute,
getSetInput = support.input;
jQuery.fn.extend({
attr: function( name, value ) {
return access( this, jQuery.attr, name, value, arguments.length > 1 );
},
removeAttr: function( name ) {
return this.each(function() {
jQuery.removeAttr( this, name );
});
}
});
jQuery.extend({
attr: function( elem, name, value ) {
var hooks, ret,
nType = elem.nodeType;
// don't get/set attributes on text, comment and attribute nodes
if ( !elem || nType === 3 || nType === 8 || nType === 2 ) {
return;
}
// Fallback to prop when attributes are not supported
if ( typeof elem.getAttribute === strundefined ) {
return jQuery.prop( elem, name, value );
}
// All attributes are lowercase
// Grab necessary hook if one is defined
if ( nType !== 1 || !jQuery.isXMLDoc( elem ) ) {
name = name.toLowerCase();
hooks = jQuery.attrHooks[ name ] ||
( jQuery.expr.match.bool.test( name ) ? boolHook : nodeHook );
}
if ( value !== undefined ) {
if ( value === null ) {
jQuery.removeAttr( elem, name );
} else if ( hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ) {
return ret;
} else {
elem.setAttribute( name, value + "" );
return value;
}
} else if ( hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== null ) {
return ret;
} else {
ret = jQuery.find.attr( elem, name );
// Non-existent attributes return null, we normalize to undefined
return ret == null ?
undefined :
ret;
}
},
removeAttr: function( elem, value ) {
var name, propName,
i = 0,
attrNames = value && value.match( rnotwhite );
if ( attrNames && elem.nodeType === 1 ) {
while ( (name = attrNames[i++]) ) {
propName = jQuery.propFix[ name ] || name;
// Boolean attributes get special treatment (#10870)
if ( jQuery.expr.match.bool.test( name ) ) {
// Set corresponding property to false
if ( getSetInput && getSetAttribute || !ruseDefault.test( name ) ) {
elem[ propName ] = false;
// Support: IE<9
// Also clear defaultChecked/defaultSelected (if appropriate)
} else {
elem[ jQuery.camelCase( "default-" + name ) ] =
elem[ propName ] = false;
}
// See #9699 for explanation of this approach (setting first, then removal)
} else {
jQuery.attr( elem, name, "" );
}
elem.removeAttribute( getSetAttribute ? name : propName );
}
}
},
attrHooks: {
type: {
set: function( elem, value ) {
if ( !support.radioValue && value === "radio" && jQuery.nodeName(elem, "input") ) {
// Setting the type on a radio button after the value resets the value in IE6-9
// Reset value to default in case type is set after value during creation
var val = elem.value;
elem.setAttribute( "type", value );
if ( val ) {
elem.value = val;
}
return value;
}
}
}
}
});
// Hook for boolean attributes
boolHook = {
set: function( elem, value, name ) {
if ( value === false ) {
// Remove boolean attributes when set to false
jQuery.removeAttr( elem, name );
} else if ( getSetInput && getSetAttribute || !ruseDefault.test( name ) ) {
// IE<8 needs the *property* name
elem.setAttribute( !getSetAttribute && jQuery.propFix[ name ] || name, name );
// Use defaultChecked and defaultSelected for oldIE
} else {
elem[ jQuery.camelCase( "default-" + name ) ] = elem[ name ] = true;
}
return name;
}
};
// Retrieve booleans specially
jQuery.each( jQuery.expr.match.bool.source.match( /\w+/g ), function( i, name ) {
var getter = attrHandle[ name ] || jQuery.find.attr;
attrHandle[ name ] = getSetInput && getSetAttribute || !ruseDefault.test( name ) ?
function( elem, name, isXML ) {
var ret, handle;
if ( !isXML ) {
// Avoid an infinite loop by temporarily removing this function from the getter
handle = attrHandle[ name ];
attrHandle[ name ] = ret;
ret = getter( elem, name, isXML ) != null ?
name.toLowerCase() :
null;
attrHandle[ name ] = handle;
}
return ret;
} :
function( elem, name, isXML ) {
if ( !isXML ) {
return elem[ jQuery.camelCase( "default-" + name ) ] ?
name.toLowerCase() :
null;
}
};
});
// fix oldIE attroperties
if ( !getSetInput || !getSetAttribute ) {
jQuery.attrHooks.value = {
set: function( elem, value, name ) {
if ( jQuery.nodeName( elem, "input" ) ) {
// Does not return so that setAttribute is also used
elem.defaultValue = value;
} else {
// Use nodeHook if defined (#1954); otherwise setAttribute is fine
return nodeHook && nodeHook.set( elem, value, name );
}
}
};
}
// IE6/7 do not support getting/setting some attributes with get/setAttribute
if ( !getSetAttribute ) {
// Use this for any attribute in IE6/7
// This fixes almost every IE6/7 issue
nodeHook = {
set: function( elem, value, name ) {
// Set the existing or create a new attribute node
var ret = elem.getAttributeNode( name );
if ( !ret ) {
elem.setAttributeNode(
(ret = elem.ownerDocument.createAttribute( name ))
);
}
ret.value = value += "";
// Break association with cloned elements by also using setAttribute (#9646)
if ( name === "value" || value === elem.getAttribute( name ) ) {
return value;
}
}
};
// Some attributes are constructed with empty-string values when not defined
attrHandle.id = attrHandle.name = attrHandle.coords =
function( elem, name, isXML ) {
var ret;
if ( !isXML ) {
return (ret = elem.getAttributeNode( name )) && ret.value !== "" ?
ret.value :
null;
}
};
// Fixing value retrieval on a button requires this module
jQuery.valHooks.button = {
get: function( elem, name ) {
var ret = elem.getAttributeNode( name );
if ( ret && ret.specified ) {
return ret.value;
}
},
set: nodeHook.set
};
// Set contenteditable to false on removals(#10429)
// Setting to empty string throws an error as an invalid value
jQuery.attrHooks.contenteditable = {
set: function( elem, value, name ) {
nodeHook.set( elem, value === "" ? false : value, name );
}
};
// Set width and height to auto instead of 0 on empty string( Bug #8150 )
// This is for removals
jQuery.each([ "width", "height" ], function( i, name ) {
jQuery.attrHooks[ name ] = {
set: function( elem, value ) {
if ( value === "" ) {
elem.setAttribute( name, "auto" );
return value;
}
}
};
});
}
if ( !support.style ) {
jQuery.attrHooks.style = {
get: function( elem ) {
// Return undefined in the case of empty string
// Note: IE uppercases css property names, but if we were to .toLowerCase()
// .cssText, that would destroy case senstitivity in URL's, like in "background"
return elem.style.cssText || undefined;
},
set: function( elem, value ) {
return ( elem.style.cssText = value + "" );
}
};
}
var rfocusable = /^(?:input|select|textarea|button|object)$/i,
rclickable = /^(?:a|area)$/i;
jQuery.fn.extend({
prop: function( name, value ) {
return access( this, jQuery.prop, name, value, arguments.length > 1 );
},
removeProp: function( name ) {
name = jQuery.propFix[ name ] || name;
return this.each(function() {
// try/catch handles cases where IE balks (such as removing a property on window)
try {
this[ name ] = undefined;
delete this[ name ];
} catch( e ) {}
});
}
});
jQuery.extend({
propFix: {
"for": "htmlFor",
"class": "className"
},
prop: function( elem, name, value ) {
var ret, hooks, notxml,
nType = elem.nodeType;
// don't get/set properties on text, comment and attribute nodes
if ( !elem || nType === 3 || nType === 8 || nType === 2 ) {
return;
}
notxml = nType !== 1 || !jQuery.isXMLDoc( elem );
if ( notxml ) {
// Fix name and attach hooks
name = jQuery.propFix[ name ] || name;
hooks = jQuery.propHooks[ name ];
}
if ( value !== undefined ) {
return hooks && "set" in hooks && (ret = hooks.set( elem, value, name )) !== undefined ?
ret :
( elem[ name ] = value );
} else {
return hooks && "get" in hooks && (ret = hooks.get( elem, name )) !== null ?
ret :
elem[ name ];
}
},
propHooks: {
tabIndex: {
get: function( elem ) {
// elem.tabIndex doesn't always return the correct value when it hasn't been explicitly set
// http://fluidproject.org/blog/2008/01/09/getting-setting-and-removing-tabindex-values-with-javascript/
// Use proper attribute retrieval(#12072)
var tabindex = jQuery.find.attr( elem, "tabindex" );
return tabindex ?
parseInt( tabindex, 10 ) :
rfocusable.test( elem.nodeName ) || rclickable.test( elem.nodeName ) && elem.href ?
0 :
-1;
}
}
}
});
// Some attributes require a special call on IE
// http://msdn.microsoft.com/en-us/library/ms536429%28VS.85%29.aspx
if ( !support.hrefNormalized ) {
// href/src property should get the full normalized URL (#10299/#12915)
jQuery.each([ "href", "src" ], function( i, name ) {
jQuery.propHooks[ name ] = {
get: function( elem ) {
return elem.getAttribute( name, 4 );
}
};
});
}
// Support: Safari, IE9+
// mis-reports the default selected property of an option
// Accessing the parent's selectedIndex property fixes it
if ( !support.optSelected ) {
jQuery.propHooks.selected = {
get: function( elem ) {
var parent = elem.parentNode;
if ( parent ) {
parent.selectedIndex;
// Make sure that it also works with optgroups, see #5701
if ( parent.parentNode ) {
parent.parentNode.selectedIndex;
}
}
return null;
}
};
}
jQuery.each([
"tabIndex",
"readOnly",
"maxLength",
"cellSpacing",
"cellPadding",
"rowSpan",
"colSpan",
"useMap",
"frameBorder",
"contentEditable"
], function() {
jQuery.propFix[ this.toLowerCase() ] = this;
});
// IE6/7 call enctype encoding
if ( !support.enctype ) {
jQuery.propFix.enctype = "encoding";
}
var rclass = /[\t\r\n\f]/g;
jQuery.fn.extend({
addClass: function( value ) {
var classes, elem, cur, clazz, j, finalValue,
i = 0,
len = this.length,
proceed = typeof value === "string" && value;
if ( jQuery.isFunction( value ) ) {
return this.each(function( j ) {
jQuery( this ).addClass( value.call( this, j, this.className ) );
});
}
if ( proceed ) {
// The disjunction here is for better compressibility (see removeClass)
classes = ( value || "" ).match( rnotwhite ) || [];
for ( ; i < len; i++ ) {
elem = this[ i ];
cur = elem.nodeType === 1 && ( elem.className ?
( " " + elem.className + " " ).replace( rclass, " " ) :
" "
);
if ( cur ) {
j = 0;
while ( (clazz = classes[j++]) ) {
if ( cur.indexOf( " " + clazz + " " ) < 0 ) {
cur += clazz + " ";
}
}
// only assign if different to avoid unneeded rendering.
finalValue = jQuery.trim( cur );
if ( elem.className !== finalValue ) {
elem.className = finalValue;
}
}
}
}
return this;
},
removeClass: function( value ) {
var classes, elem, cur, clazz, j, finalValue,
i = 0,
len = this.length,
proceed = arguments.length === 0 || typeof value === "string" && value;
if ( jQuery.isFunction( value ) ) {
return this.each(function( j ) {
jQuery( this ).removeClass( value.call( this, j, this.className ) );
});
}
if ( proceed ) {
classes = ( value || "" ).match( rnotwhite ) || [];
for ( ; i < len; i++ ) {
elem = this[ i ];
// This expression is here for better compressibility (see addClass)
cur = elem.nodeType === 1 && ( elem.className ?
( " " + elem.className + " " ).replace( rclass, " " ) :
""
);
if ( cur ) {
j = 0;
while ( (clazz = classes[j++]) ) {
// Remove *all* instances
while ( cur.indexOf( " " + clazz + " " ) >= 0 ) {
cur = cur.replace( " " + clazz + " ", " " );
}
}
// only assign if different to avoid unneeded rendering.
finalValue = value ? jQuery.trim( cur ) : "";
if ( elem.className !== finalValue ) {
elem.className = finalValue;
}
}
}
}
return this;
},
toggleClass: function( value, stateVal ) {
var type = typeof value;
if ( typeof stateVal === "boolean" && type === "string" ) {
return stateVal ? this.addClass( value ) : this.removeClass( value );
}
if ( jQuery.isFunction( value ) ) {
return this.each(function( i ) {
jQuery( this ).toggleClass( value.call(this, i, this.className, stateVal), stateVal );
});
}
return this.each(function() {
if ( type === "string" ) {
// toggle individual class names
var className,
i = 0,
self = jQuery( this ),
classNames = value.match( rnotwhite ) || [];
while ( (className = classNames[ i++ ]) ) {
// check each className given, space separated list
if ( self.hasClass( className ) ) {
self.removeClass( className );
} else {
self.addClass( className );
}
}
// Toggle whole class name
} else if ( type === strundefined || type === "boolean" ) {
if ( this.className ) {
// store className if set
jQuery._data( this, "__className__", this.className );
}
// If the element has a class name or if we're passed "false",
// then remove the whole classname (if there was one, the above saved it).
// Otherwise bring back whatever was previously saved (if anything),
// falling back to the empty string if nothing was stored.
this.className = this.className || value === false ? "" : jQuery._data( this, "__className__" ) || "";
}
});
},
hasClass: function( selector ) {
var className = " " + selector + " ",
i = 0,
l = this.length;
for ( ; i < l; i++ ) {
if ( this[i].nodeType === 1 && (" " + this[i].className + " ").replace(rclass, " ").indexOf( className ) >= 0 ) {
return true;
}
}
return false;
}
});
// Return jQuery for attributes-only inclusion
jQuery.each( ("blur focus focusin focusout load resize scroll unload click dblclick " +
"mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave " +
"change select submit keydown keypress keyup error contextmenu").split(" "), function( i, name ) {
// Handle event binding
jQuery.fn[ name ] = function( data, fn ) {
return arguments.length > 0 ?
this.on( name, null, data, fn ) :
this.trigger( name );
};
});
jQuery.fn.extend({
hover: function( fnOver, fnOut ) {
return this.mouseenter( fnOver ).mouseleave( fnOut || fnOver );
},
bind: function( types, data, fn ) {
return this.on( types, null, data, fn );
},
unbind: function( types, fn ) {
return this.off( types, null, fn );
},
delegate: function( selector, types, data, fn ) {
return this.on( types, selector, data, fn );
},
undelegate: function( selector, types, fn ) {
// ( namespace ) or ( selector, types [, fn] )
return arguments.length === 1 ? this.off( selector, "**" ) : this.off( types, selector || "**", fn );
}
});
jQuery.fn.extend({
wrapAll: function( html ) {
if ( jQuery.isFunction( html ) ) {
return this.each(function(i) {
jQuery(this).wrapAll( html.call(this, i) );
});
}
if ( this[0] ) {
// The elements to wrap the target around
var wrap = jQuery( html, this[0].ownerDocument ).eq(0).clone(true);
if ( this[0].parentNode ) {
wrap.insertBefore( this[0] );
}
wrap.map(function() {
var elem = this;
while ( elem.firstChild && elem.firstChild.nodeType === 1 ) {
elem = elem.firstChild;
}
return elem;
}).append( this );
}
return this;
},
wrapInner: function( html ) {
if ( jQuery.isFunction( html ) ) {
return this.each(function(i) {
jQuery(this).wrapInner( html.call(this, i) );
});
}
return this.each(function() {
var self = jQuery( this ),
contents = self.contents();
if ( contents.length ) {
contents.wrapAll( html );
} else {
self.append( html );
}
});
},
wrap: function( html ) {
var isFunction = jQuery.isFunction( html );
return this.each(function(i) {
jQuery( this ).wrapAll( isFunction ? html.call(this, i) : html );
});
},
unwrap: function() {
return this.parent().each(function() {
if ( !jQuery.nodeName( this, "body" ) ) {
jQuery( this ).replaceWith( this.childNodes );
}
}).end();
}
});
jQuery.expr.filters.hidden = function( elem ) {
// Support: Opera <= 12.12
// Opera reports offsetWidths and offsetHeights less than zero on some elements
return elem.offsetWidth <= 0 && elem.offsetHeight <= 0 ||
(!support.reliableHiddenOffsets() &&
((elem.style && elem.style.display) || jQuery.css( elem, "display" )) === "none");
};
jQuery.expr.filters.visible = function( elem ) {
return !jQuery.expr.filters.hidden( elem );
};
var r20 = /%20/g,
rbracket = /\[\]$/,
rCRLF = /\r?\n/g,
rsubmitterTypes = /^(?:submit|button|image|reset|file)$/i,
rsubmittable = /^(?:input|select|textarea|keygen)/i;
function buildParams( prefix, obj, traditional, add ) {
var name;
if ( jQuery.isArray( obj ) ) {
// Serialize array item.
jQuery.each( obj, function( i, v ) {
if ( traditional || rbracket.test( prefix ) ) {
// Treat each array item as a scalar.
add( prefix, v );
} else {
// Item is non-scalar (array or object), encode its numeric index.
buildParams( prefix + "[" + ( typeof v === "object" ? i : "" ) + "]", v, traditional, add );
}
});
} else if ( !traditional && jQuery.type( obj ) === "object" ) {
// Serialize object item.
for ( name in obj ) {
buildParams( prefix + "[" + name + "]", obj[ name ], traditional, add );
}
} else {
// Serialize scalar item.
add( prefix, obj );
}
}
// Serialize an array of form elements or a set of
// key/values into a query string
jQuery.param = function( a, traditional ) {
var prefix,
s = [],
add = function( key, value ) {
// If value is a function, invoke it and return its value
value = jQuery.isFunction( value ) ? value() : ( value == null ? "" : value );
s[ s.length ] = encodeURIComponent( key ) + "=" + encodeURIComponent( value );
};
// Set traditional to true for jQuery <= 1.3.2 behavior.
if ( traditional === undefined ) {
traditional = jQuery.ajaxSettings && jQuery.ajaxSettings.traditional;
}
// If an array was passed in, assume that it is an array of form elements.
if ( jQuery.isArray( a ) || ( a.jquery && !jQuery.isPlainObject( a ) ) ) {
// Serialize the form elements
jQuery.each( a, function() {
add( this.name, this.value );
});
} else {
// If traditional, encode the "old" way (the way 1.3.2 or older
// did it), otherwise encode params recursively.
for ( prefix in a ) {
buildParams( prefix, a[ prefix ], traditional, add );
}
}
// Return the resulting serialization
return s.join( "&" ).replace( r20, "+" );
};
jQuery.fn.extend({
serialize: function() {
return jQuery.param( this.serializeArray() );
},
serializeArray: function() {
return this.map(function() {
// Can add propHook for "elements" to filter or add form elements
var elements = jQuery.prop( this, "elements" );
return elements ? jQuery.makeArray( elements ) : this;
})
.filter(function() {
var type = this.type;
// Use .is(":disabled") so that fieldset[disabled] works
return this.name && !jQuery( this ).is( ":disabled" ) &&
rsubmittable.test( this.nodeName ) && !rsubmitterTypes.test( type ) &&
( this.checked || !rcheckableType.test( type ) );
})
.map(function( i, elem ) {
var val = jQuery( this ).val();
return val == null ?
null :
jQuery.isArray( val ) ?
jQuery.map( val, function( val ) {
return { name: elem.name, value: val.replace( rCRLF, "\r\n" ) };
}) :
{ name: elem.name, value: val.replace( rCRLF, "\r\n" ) };
}).get();
}
});
// data: string of html
// context (optional): If specified, the fragment will be created in this context, defaults to document
// keepScripts (optional): If true, will include scripts passed in the html string
jQuery.parseHTML = function( data, context, keepScripts ) {
if ( !data || typeof data !== "string" ) {
return null;
}
if ( typeof context === "boolean" ) {
keepScripts = context;
context = false;
}
context = context || document;
var parsed = rsingleTag.exec( data ),
scripts = !keepScripts && [];
// Single tag
if ( parsed ) {
return [ context.createElement( parsed[1] ) ];
}
parsed = jQuery.buildFragment( [ data ], context, scripts );
if ( scripts && scripts.length ) {
jQuery( scripts ).remove();
}
return jQuery.merge( [], parsed.childNodes );
};
var docElem = window.document.documentElement;
/**
* Gets a window from an element
*/
function getWindow( elem ) {
return jQuery.isWindow( elem ) ?
elem :
elem.nodeType === 9 ?
elem.defaultView || elem.parentWindow :
false;
}
jQuery.offset = {
setOffset: function( elem, options, i ) {
var curPosition, curLeft, curCSSTop, curTop, curOffset, curCSSLeft, calculatePosition,
position = jQuery.css( elem, "position" ),
curElem = jQuery( elem ),
props = {};
// set position first, in-case top/left are set even on static elem
if ( position === "static" ) {
elem.style.position = "relative";
}
curOffset = curElem.offset();
curCSSTop = jQuery.css( elem, "top" );
curCSSLeft = jQuery.css( elem, "left" );
calculatePosition = ( position === "absolute" || position === "fixed" ) &&
jQuery.inArray("auto", [ curCSSTop, curCSSLeft ] ) > -1;
// need to be able to calculate position if either top or left is auto and position is either absolute or fixed
if ( calculatePosition ) {
curPosition = curElem.position();
curTop = curPosition.top;
curLeft = curPosition.left;
} else {
curTop = parseFloat( curCSSTop ) || 0;
curLeft = parseFloat( curCSSLeft ) || 0;
}
if ( jQuery.isFunction( options ) ) {
options = options.call( elem, i, curOffset );
}
if ( options.top != null ) {
props.top = ( options.top - curOffset.top ) + curTop;
}
if ( options.left != null ) {
props.left = ( options.left - curOffset.left ) + curLeft;
}
if ( "using" in options ) {
options.using.call( elem, props );
} else {
curElem.css( props );
}
}
};
jQuery.fn.extend({
offset: function( options ) {
if ( arguments.length ) {
return options === undefined ?
this :
this.each(function( i ) {
jQuery.offset.setOffset( this, options, i );
});
}
var docElem, win,
box = { top: 0, left: 0 },
elem = this[ 0 ],
doc = elem && elem.ownerDocument;
if ( !doc ) {
return;
}
docElem = doc.documentElement;
// Make sure it's not a disconnected DOM node
if ( !jQuery.contains( docElem, elem ) ) {
return box;
}
// If we don't have gBCR, just use 0,0 rather than error
// BlackBerry 5, iOS 3 (original iPhone)
if ( typeof elem.getBoundingClientRect !== strundefined ) {
box = elem.getBoundingClientRect();
}
win = getWindow( doc );
return {
top: box.top + ( win.pageYOffset || docElem.scrollTop ) - ( docElem.clientTop || 0 ),
left: box.left + ( win.pageXOffset || docElem.scrollLeft ) - ( docElem.clientLeft || 0 )
};
},
position: function() {
if ( !this[ 0 ] ) {
return;
}
var offsetParent, offset,
parentOffset = { top: 0, left: 0 },
elem = this[ 0 ];
// fixed elements are offset from window (parentOffset = {top:0, left: 0}, because it is its only offset parent
if ( jQuery.css( elem, "position" ) === "fixed" ) {
// we assume that getBoundingClientRect is available when computed position is fixed
offset = elem.getBoundingClientRect();
} else {
// Get *real* offsetParent
offsetParent = this.offsetParent();
// Get correct offsets
offset = this.offset();
if ( !jQuery.nodeName( offsetParent[ 0 ], "html" ) ) {
parentOffset = offsetParent.offset();
}
// Add offsetParent borders
parentOffset.top += jQuery.css( offsetParent[ 0 ], "borderTopWidth", true );
parentOffset.left += jQuery.css( offsetParent[ 0 ], "borderLeftWidth", true );
}
// Subtract parent offsets and element margins
// note: when an element has margin: auto the offsetLeft and marginLeft
// are the same in Safari causing offset.left to incorrectly be 0
return {
top: offset.top - parentOffset.top - jQuery.css( elem, "marginTop", true ),
left: offset.left - parentOffset.left - jQuery.css( elem, "marginLeft", true)
};
},
offsetParent: function() {
return this.map(function() {
var offsetParent = this.offsetParent || docElem;
while ( offsetParent && ( !jQuery.nodeName( offsetParent, "html" ) && jQuery.css( offsetParent, "position" ) === "static" ) ) {
offsetParent = offsetParent.offsetParent;
}
return offsetParent || docElem;
});
}
});
// Create scrollLeft and scrollTop methods
jQuery.each( { scrollLeft: "pageXOffset", scrollTop: "pageYOffset" }, function( method, prop ) {
var top = /Y/.test( prop );
jQuery.fn[ method ] = function( val ) {
return access( this, function( elem, method, val ) {
var win = getWindow( elem );
if ( val === undefined ) {
return win ? (prop in win) ? win[ prop ] :
win.document.documentElement[ method ] :
elem[ method ];
}
if ( win ) {
win.scrollTo(
!top ? val : jQuery( win ).scrollLeft(),
top ? val : jQuery( win ).scrollTop()
);
} else {
elem[ method ] = val;
}
}, method, val, arguments.length, null );
};
});
// Add the top/left cssHooks using jQuery.fn.position
// Webkit bug: https://bugs.webkit.org/show_bug.cgi?id=29084
// getComputedStyle returns percent when specified for top/left/bottom/right
// rather than make the css module depend on the offset module, we just check for it here
jQuery.each( [ "top", "left" ], function( i, prop ) {
jQuery.cssHooks[ prop ] = addGetHookIf( support.pixelPosition,
function( elem, computed ) {
if ( computed ) {
computed = curCSS( elem, prop );
// if curCSS returns percentage, fallback to offset
return rnumnonpx.test( computed ) ?
jQuery( elem ).position()[ prop ] + "px" :
computed;
}
}
);
});
// Create innerHeight, innerWidth, height, width, outerHeight and outerWidth methods
jQuery.each( { Height: "height", Width: "width" }, function( name, type ) {
jQuery.each( { padding: "inner" + name, content: type, "": "outer" + name }, function( defaultExtra, funcName ) {
// margin is only for outerHeight, outerWidth
jQuery.fn[ funcName ] = function( margin, value ) {
var chainable = arguments.length && ( defaultExtra || typeof margin !== "boolean" ),
extra = defaultExtra || ( margin === true || value === true ? "margin" : "border" );
return access( this, function( elem, type, value ) {
var doc;
if ( jQuery.isWindow( elem ) ) {
// As of 5/8/2012 this will yield incorrect results for Mobile Safari, but there
// isn't a whole lot we can do. See pull request at this URL for discussion:
// https://github.com/jquery/jquery/pull/764
return elem.document.documentElement[ "client" + name ];
}
// Get document width or height
if ( elem.nodeType === 9 ) {
doc = elem.documentElement;
// Either scroll[Width/Height] or offset[Width/Height] or client[Width/Height], whichever is greatest
// unfortunately, this causes bug #3838 in IE6/8 only, but there is currently no good, small way to fix it.
return Math.max(
elem.body[ "scroll" + name ], doc[ "scroll" + name ],
elem.body[ "offset" + name ], doc[ "offset" + name ],
doc[ "client" + name ]
);
}
return value === undefined ?
// Get width or height on the element, requesting but not forcing parseFloat
jQuery.css( elem, type, extra ) :
// Set width or height on the element
jQuery.style( elem, type, value, extra );
}, type, chainable ? margin : undefined, chainable, null );
};
});
});
// The number of elements contained in the matched element set
jQuery.fn.size = function() {
return this.length;
};
jQuery.fn.andSelf = jQuery.fn.addBack;
// Register as a named AMD module, since jQuery can be concatenated with other
// files that may use define, but not via a proper concatenation script that
// understands anonymous AMD modules. A named AMD is safest and most robust
// way to register. Lowercase jquery is used because AMD module names are
// derived from file names, and jQuery is normally delivered in a lowercase
// file name. Do this after creating the global so that if an AMD module wants
// to call noConflict to hide this version of jQuery, it will work.
if ( typeof define === "function" && define.amd ) {
define( "jquery", [], function() {
return jQuery;
});
}
var
// Map over jQuery in case of overwrite
_jQuery = window.jQuery,
// Map over the $ in case of overwrite
_$ = window.$;
jQuery.noConflict = function( deep ) {
if ( window.$ === jQuery ) {
window.$ = _$;
}
if ( deep && window.jQuery === jQuery ) {
window.jQuery = _jQuery;
}
return jQuery;
};
// Expose jQuery and $ identifiers, even in
// AMD (#7102#comment:10, https://github.com/jquery/jquery/pull/557)
// and CommonJS for browser emulators (#13566)
if ( typeof noGlobal === strundefined ) {
window.jQuery = window.$ = jQuery;
}
return jQuery;
}));
|
{
"content_hash": "de9f1e24e2b16f9dbfbbe5b47fe6c3d7",
"timestamp": "",
"source": "github",
"line_count": 8192,
"max_line_length": 239,
"avg_line_length": 27.580810546875,
"alnum_prop": 0.6158704446273823,
"repo_name": "jgallen23/jquery-builder",
"id": "3d06b5593cc8c2d1641ae3a52053732bc644ba06",
"size": "226443",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "dist/1.11.0/jquery-ajax-effects-event-alias.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "12296"
}
],
"symlink_target": ""
}
|
#[=======================================================================[.rst:
FindMPEG2
---------
Find the native MPEG2 includes and library
This module defines
::
MPEG2_INCLUDE_DIR, path to mpeg2dec/mpeg2.h, etc.
MPEG2_LIBRARIES, the libraries required to use MPEG2.
MPEG2_FOUND, If false, do not try to use MPEG2.
also defined, but not for general use are
::
MPEG2_mpeg2_LIBRARY, where to find the MPEG2 library.
MPEG2_vo_LIBRARY, where to find the vo library.
#]=======================================================================]
find_path(MPEG2_INCLUDE_DIR
NAMES mpeg2.h mpeg2dec/mpeg2.h)
find_library(MPEG2_mpeg2_LIBRARY mpeg2)
find_library(MPEG2_vo_LIBRARY vo)
include(${CMAKE_CURRENT_LIST_DIR}/FindPackageHandleStandardArgs.cmake)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(MPEG2 DEFAULT_MSG MPEG2_mpeg2_LIBRARY MPEG2_INCLUDE_DIR)
if(MPEG2_FOUND)
set(MPEG2_LIBRARIES ${MPEG2_mpeg2_LIBRARY})
if(MPEG2_vo_LIBRARY)
list(APPEND MPEG2_LIBRARIES ${MPEG2_vo_LIBRARY})
endif()
#some native mpeg2 installations will depend
#on libSDL, if found, add it in.
find_package(SDL)
if(SDL_FOUND)
set( MPEG2_LIBRARIES ${MPEG2_LIBRARIES} ${SDL_LIBRARY})
endif()
endif()
mark_as_advanced(MPEG2_INCLUDE_DIR MPEG2_mpeg2_LIBRARY MPEG2_vo_LIBRARY)
|
{
"content_hash": "e514cda8deedccc2316f1ff6121b5d8f",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 90,
"avg_line_length": 27.666666666666668,
"alnum_prop": 0.6317771084337349,
"repo_name": "jpvanoosten/VolumeTiledForwardShading",
"id": "f51836352878b4f045fc669aa562b74c0a85395b",
"size": "1472",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "Tools/cmake-3.22.1-windows-x86_64/share/cmake-3.22/Modules/FindMPEG2.cmake",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7431"
},
{
"name": "C",
"bytes": "38077"
},
{
"name": "C++",
"bytes": "1232198"
},
{
"name": "CMake",
"bytes": "4144863"
},
{
"name": "Cuda",
"bytes": "412"
},
{
"name": "Fortran",
"bytes": "3165"
},
{
"name": "HLSL",
"bytes": "150308"
},
{
"name": "Objective-C",
"bytes": "578"
},
{
"name": "Objective-C++",
"bytes": "579"
},
{
"name": "Shell",
"bytes": "9256"
},
{
"name": "Tcl",
"bytes": "3343"
},
{
"name": "TeX",
"bytes": "385393"
}
],
"symlink_target": ""
}
|
namespace pe {
namespace {
namespace cci = Microsoft_Cci_Pdb;
using base::AutoReset;
using block_graph::BlockGraph;
using block_graph::ConstTypedBlock;
using core::AbsoluteAddress;
using core::RelativeAddress;
typedef BlockGraph::Block Block;
typedef BlockGraph::BlockType BlockType;
typedef BlockGraph::Reference Reference;
typedef BlockGraph::ReferenceType ReferenceType;
typedef std::map<size_t, const char*> ComdatMap;
const char kHeadersBlockName[] = "<headers>";
const char kSymbolsBlockName[] = "<symbols>";
const char kStringsBlockName[] = "<strings>";
const char kRelocsBlockName[] = "<relocs>";
const size_t kDebugSubsectionAlignment = 4;
// Retrieve the relocation type and size for the specified COFF relocation.
//
// @param reloc the relocation.
// @param ref_type where to store the resulting reference type.
// @param ref_size where to store the resulting reference size.
// @returns true on success, or false if the information cannot be determined
// for the specified relocation.
bool GetRelocationTypeAndSize(const IMAGE_RELOCATION& reloc,
ReferenceType* ref_type,
BlockGraph::Size* ref_size) {
DCHECK(ref_type != NULL);
DCHECK(ref_size != NULL);
switch (reloc.Type) {
case IMAGE_REL_I386_ABSOLUTE:
// Ignored, as per the specifications.
return false;
case IMAGE_REL_I386_DIR32:
*ref_type = BlockGraph::RELOC_ABSOLUTE_REF;
*ref_size = sizeof(uint32);
return true;
case IMAGE_REL_I386_DIR32NB:
*ref_type = BlockGraph::RELOC_RELATIVE_REF;
*ref_size = sizeof(uint32);
return true;
case IMAGE_REL_I386_SECTION:
*ref_type = BlockGraph::RELOC_SECTION_REF;
*ref_size = sizeof(uint16);
return true;
case IMAGE_REL_I386_SECREL:
*ref_type = BlockGraph::RELOC_SECTION_OFFSET_REF;
*ref_size = sizeof(uint32);
return true;
case IMAGE_REL_I386_SECREL7:
*ref_type = BlockGraph::RELOC_SECTION_OFFSET_REF;
// TODO(chrisha): This is actually a 7-bit offset;
// BlockGraph::Reference only represents byte sizes. We pass as a 1-byte
// reference as there are no actual 8-bit references in COFF files.
*ref_size = 1;
return true;
case IMAGE_REL_I386_REL32:
*ref_type = BlockGraph::RELOC_PC_RELATIVE_REF;
*ref_size = sizeof(uint32);
return true;
default:
// Ignore other types; they are either explicitly mentioned as unsupported
// in the specifications, or for managed code.
LOG(WARNING) << "Unexpected COFF relocation type.";
return false;
}
}
// Retrieve the relocation encoded value at the relocated location.
//
// @tparam ValueType the type of data to read.
// @param source the source block.
// @param src_offset the offset of the relocated location.
// @param extra_offset where to place the additional offset read.
// @returns true on success, or false on failure.
template <typename ValueType>
bool ReadRelocationValue(Block* source,
BlockGraph::Offset src_offset,
BlockGraph::Offset* extra_offset) {
ConstTypedBlock<ValueType> value;
if (!value.Init(src_offset, source)) {
LOG(ERROR) << "Unable to read relocation location value.";
return false;
}
*extra_offset = *value;
return true;
}
// Parse a CodeView debug symbol subsection, adding references and attributes as
// needed to @p block.
//
// @param version The CodeView version to use in parsing the symbol stream.
// @param start the offset to the beginning of the contents (excluding type and
// length) of the subsection inside @p block.
// @param size the size of the subsection.
// @param block the debug section block.
// @returns true on success, or false on failure.
bool ParseDebugSymbols(
cci::CV_SIGNATURE version, size_t start, size_t size, Block* block) {
DCHECK(version == cci::C11 || version == cci::C13);
DCHECK_NE(reinterpret_cast<Block*>(NULL), block);
DCHECK_GT(block->data_size(), start);
DCHECK_GE(block->data_size(), start + size);
// We assume that functions do not nest, hence dependent debug symbols should
// all refer to the last function symbol, whose block is stored in
// current_func.
size_t section_index = block->section();
Block* current_func = NULL;
size_t cursor = start;
while (cursor < size) {
ConstTypedBlock<cci::SYMTYPE> dsym;
if (!dsym.Init(cursor, block)) {
LOG(ERROR) << "Unable to read debug symbol header at offset "
<< cursor << " in .debug$S section " << section_index << ".";
return false;
}
cursor += sizeof(*dsym);
switch (dsym->rectyp) {
case cci::S_GPROC32:
case cci::S_LPROC32:
case cci::S_GPROC32_VS2013:
case cci::S_LPROC32_VS2013: {
ConstTypedBlock<cci::ProcSym32> proc;
if (!proc.Init(cursor, block)) {
LOG(ERROR) << "Unable to read debug procedure (" << dsym->rectyp
<< ") symbol at offset " << cursor
<< " in .debug$S section " << section_index << ".";
return false;
}
// Get the existing relocation reference that points to the correct
// function block.
Reference reloc_ref;
if (!block->GetReference(cursor + offsetof(cci::ProcSym32, off),
&reloc_ref)) {
LOG(ERROR) << "No relocation reference in ProcSym32 "
<< "(missing COFF relocation?) at offset "
<< cursor + offsetof(cci::ProcSym32, off)
<< " in .debug$S section " << section_index << ".";
return false;
}
current_func = reloc_ref.referenced();
// These are function-relative offsets; we can use section offsets as we
// assume function-level linking.
if (!block->SetReference(cursor + offsetof(cci::ProcSym32, dbgStart),
Reference(BlockGraph::SECTION_OFFSET_REF,
sizeof(proc->dbgStart),
current_func,
proc->dbgStart, proc->dbgStart))) {
LOG(ERROR) << "Unable to create reference at offset "
<< cursor + offsetof(cci::ProcSym32, dbgStart)
<< " in .debug$S section " << section_index << ".";
return false;
}
if (!block->SetReference(cursor + offsetof(cci::ProcSym32, dbgEnd),
Reference(BlockGraph::SECTION_OFFSET_REF,
sizeof(proc->dbgEnd),
current_func,
proc->dbgEnd, proc->dbgEnd))) {
LOG(ERROR) << "Unable to create reference at offset "
<< cursor + offsetof(cci::ProcSym32, dbgEnd)
<< " in .debug$S section " << section_index << ".";
return false;
}
break;
}
case cci::S_FRAMEPROC: {
ConstTypedBlock<cci::FrameProcSym> frame;
if (!frame.Init(cursor, block)) {
LOG(ERROR) << "Unable to read debug frame (" << dsym->rectyp
<< ") symbol at offset " << cursor
<< " in .debug$S section " << section_index << ".";
return false;
}
DCHECK(current_func != NULL);
if ((frame->flags & cci::fHasInlAsm) != 0)
current_func->set_attribute(BlockGraph::HAS_INLINE_ASSEMBLY);
if ((frame->flags & cci::fHasSEH) != 0)
current_func->set_attribute(BlockGraph::HAS_EXCEPTION_HANDLING);
break;
}
case cci::S_ANNOTATION:
case cci::S_BLOCK32:
case cci::S_BPREL32:
case cci::S_CALLSITEINFO:
case cci::S_CONSTANT:
case cci::S_END:
case cci::S_FRAMECOOKIE:
case cci::S_GDATA32:
case cci::S_GDATA32_ST:
case cci::S_GTHREAD32:
case cci::S_LABEL32:
case cci::S_LDATA32:
case cci::S_LDATA32_ST:
case cci::S_LTHREAD32:
case cci::S_OBJNAME:
case cci::S_REGISTER:
case cci::S_REGREL32:
case cci::S_THUNK32:
case cci::S_UDT:
case cci::S_UDT_ST:
break;
case cci::S_COMPILE3:
case cci::S_MSTOOLENV_V3:
break;
// CodeView2 symbols that we can safely ignore.
case cci::S_COMPILE_CV2:
case cci::S_COMPILE2_ST:
case cci::S_OBJNAME_CV2:
break;
// These are unknown symbol types, but currently seen. From inspection
// they don't appear to contain references that need to be parsed.
// TODO(chrisha): Figure out what these symbols are. Many of them appear
// to have been added only as of VS2013.
case 0x113E:
case 0x1141:
case 0x1142:
case 0x1143:
case 0x1144:
case 0x1145:
case 0x114D:
case 0x114E:
case 0x1153:
case 0x115A:
break;
default:
LOG(ERROR) << "Unsupported debug symbol type 0x"
<< std::hex << dsym->rectyp << std::dec
<< " at offset "
<< cursor - sizeof(*dsym) + offsetof(cci::SYMTYPE, rectyp)
<< " in .debug$S section " << section_index << ".";
return false;
}
cursor += dsym->reclen - sizeof(*dsym) + sizeof(dsym->reclen);
}
return true;
}
// Parse a CodeView debug line number subsection, adding references as needed
// to @p block.
//
// @param start the offset to the beginning of the contents (excluding type and
// length) of the subsection inside @p block.
// @param size the size of the subsection.
// @param block the debug section block.
// @returns true on success, or false on failure.
bool ParseDebugLines(size_t start, size_t size, Block* block) {
DCHECK(block != NULL);
size_t section_index = block->section();
size_t cursor = start;
// Parse the section info.
ConstTypedBlock<cci::CV_LineSection> line_section;
if (!line_section.Init(cursor, block)) {
LOG(ERROR) << "Unable to read debug line section header at offset "
<< cursor << " in .debug$S section " << section_index << ".";
return false;
}
// Get the existing relocation reference that points to the function block
// these lines are for.
Reference reloc_ref;
if (!block->GetReference(cursor + offsetof(cci::CV_LineSection, off),
&reloc_ref)) {
LOG(ERROR) << "No relocation reference in CV_LineSection "
<< "(missing COFF relocation?) at offset "
<< cursor + offsetof(cci::ProcSym32, off)
<< " in .debug$S section " << section_index << ".";
return false;
}
Block* func = reloc_ref.referenced();
cursor += sizeof(*line_section);
// Parse the source info.
ConstTypedBlock<cci::CV_SourceFile> line_file;
if (!line_file.Init(cursor, block)) {
LOG(ERROR) << "Unable to read debug line file header at offset "
<< cursor << " in .debug$S section " << section_index << ".";
return false;
}
DCHECK_GE(size, line_file->linsiz);
cursor += sizeof(*line_file);
// The rest of the subsection is an array of CV_Line structures.
ConstTypedBlock<cci::CV_Line> lines;
if (!lines.Init(cursor, block)) {
LOG(ERROR) << "Unable to read debug line file header at offset "
<< cursor << " in .debug$S section " << section_index << ".";
return false;
}
DCHECK_GE(lines.ElementCount(), line_file->count);
for (size_t i = 0; i < line_file->count; ++i) {
// This should be a function-relative offset; we can use a section offset as
// we assume function-level linking.
Reference ref(BlockGraph::SECTION_OFFSET_REF, sizeof(lines[i].offset),
func, lines[i].offset, lines[i].offset);
if (!block->SetReference(cursor + offsetof(cci::CV_Line, offset), ref)) {
LOG(ERROR) << "Unable to create reference at offset "
<< cursor + offsetof(cci::CV_Line, offset)
<< " in .debug$S section " << section_index << ".";
return false;
}
cursor += sizeof(lines[i]);
}
return true;
}
// Parse all CodeView4 debug subsections in the specified debug section block.
//
// @param block the debug section block.
// @returns true on success, or false on failure.
bool ParseDebugSubsections4(Block* block) {
DCHECK(block != NULL);
size_t section_index = block->section();
size_t cursor = sizeof(uint32);
while (cursor < block->data_size()) {
ConstTypedBlock<uint32> type;
if (!type.Init(cursor, block)) {
LOG(ERROR) << "Unable to read debug subsection type at offset "
<< cursor << " in .debug$S section " << section_index << ".";
return false;
}
cursor += sizeof(*type);
ConstTypedBlock<uint32> size;
if (!size.Init(cursor, block)) {
LOG(ERROR) << "Unable to read debug subsection size at offset "
<< cursor << " in .debug$S section " << section_index << ".";
return false;
}
cursor += sizeof(*size);
// A sentinel bit marks some sections ignored. We parse them even if they
// are ignored.
switch (*type & ~cci::DEBUG_S_IGNORE) {
case cci::DEBUG_S_SYMBOLS:
if (!ParseDebugSymbols(cci::C13, cursor, *size, block))
return false;
break;
case cci::DEBUG_S_LINES:
if (!ParseDebugLines(cursor, *size, block))
return false;
break;
case cci::DEBUG_S_STRINGTABLE:
case cci::DEBUG_S_FILECHKSMS:
case cci::DEBUG_S_FRAMEDATA:
break;
// This is a new debug symbol type as of VS2013.
// TODO(chrisha): Figure out the contents of this subsection type.
case 0xF6:
break;
default:
LOG(ERROR) << "Unsupported debug subsection type " << std::hex << *type
<< std::dec << " at offset " << cursor
<< " in .debug$S section " << section_index << ".";
return false;
}
cursor += common::AlignUp(*size, sizeof(kDebugSubsectionAlignment));
}
return true;
}
// Parse all CodeView2 debug subsections in the specified debug section block.
// This is simply a raw stream of code view symbols, like a CodeView4
// DEBUG_S_SYMBOLS subsection.
//
// @param block the debug section block.
// @returns true on success, or false on failure.
bool ParseDebugSubsections2(Block* block) {
DCHECK(block != NULL);
size_t section_index = block->section();
size_t cursor = sizeof(uint32);
if (!ParseDebugSymbols(cci::C11, cursor, block->size() - cursor, block))
return false;
return true;
}
} // namespace
const char CoffDecomposer::kSectionComdatSep[] = "; COMDAT=";
CoffDecomposer::CoffDecomposer(const CoffFile& image_file)
: image_file_(image_file),
image_layout_(NULL),
image_(NULL) {
}
bool CoffDecomposer::Decompose(ImageLayout* image_layout) {
DCHECK_NE(reinterpret_cast<ImageLayout*>(NULL), image_layout);
// Internal temporaries.
DCHECK_EQ(reinterpret_cast<ImageLayout*>(NULL), image_layout_);
DCHECK_EQ(reinterpret_cast<BlockGraph::AddressSpace*>(NULL),image_);
AutoReset<ImageLayout*> auto_reset_image_layout(&image_layout_, image_layout);
AutoReset<BlockGraph::AddressSpace*> auto_reset_image(&image_,
&image_layout->blocks);
// Set the image format.
image_layout->blocks.graph()->set_image_format(BlockGraph::COFF_IMAGE);
// Copy the image headers to the layout.
CopySectionHeadersToImageLayout(
image_file_.file_header()->NumberOfSections,
image_file_.section_headers(),
&image_layout_->sections);
if (!CopySectionInfoToBlockGraph(image_file_, image_->graph()))
return false;
if (!CreateBlocksFromSections())
return false;
if (!CreateBlocksAndReferencesFromNonSections())
return false;
if (!CreateReferencesFromRelocations())
return false;
if (!CreateReferencesFromDebugInfo())
return false;
if (!CreateLabelsFromSymbols())
return false;
return true;
}
bool CoffDecomposer::CreateBlocksAndReferencesFromNonSections() {
DCHECK(image_ != NULL);
if (!CreateBlocksAndReferencesFromSymbolAndStringTables())
return false;
if (!CreateBlocksFromRelocationTables())
return false;
if (!CreateBlocksAndReferencesFromHeaders())
return false;
return true;
}
bool CoffDecomposer::CreateBlocksAndReferencesFromHeaders() {
const IMAGE_FILE_HEADER* file_header = image_file_.file_header();
DCHECK(file_header != NULL);
// Create a block for COFF and section headers.
size_t headers_size =
sizeof(*file_header) +
file_header->NumberOfSections * sizeof(IMAGE_SECTION_HEADER);
Block* block = CreateBlock(BlockGraph::DATA_BLOCK, FileOffsetAddress(0),
headers_size, kHeadersBlockName);
if (block == NULL) {
LOG(ERROR) << "Unable to create block for headers.";
return false;
}
block->set_attribute(BlockGraph::COFF_HEADERS);
// Create a reference for the symbol table pointer.
FileOffsetAddress symbols_ptr_addr(
offsetof(IMAGE_FILE_HEADER, PointerToSymbolTable));
if (!CreateFileOffsetReference(
symbols_ptr_addr,
BlockGraph::FILE_OFFSET_REF,
sizeof(file_header->PointerToSymbolTable),
FileOffsetAddress(file_header->PointerToSymbolTable))) {
return false;
}
// Create a reference for the section and relocation pointers in each section
// header.
FileOffsetAddress section_headers_start(
sizeof(*file_header) + file_header->SizeOfOptionalHeader);
size_t num_sections = image_file_.file_header()->NumberOfSections;
for (size_t i = 0; i < num_sections; ++i) {
const IMAGE_SECTION_HEADER* header = image_file_.section_header(i);
DCHECK(header != NULL);
FileOffsetAddress start(section_headers_start + i * sizeof(*header));
FileOffsetAddress data_ptr_addr(
start + offsetof(IMAGE_SECTION_HEADER, PointerToRawData));
if (!CreateFileOffsetReference(
data_ptr_addr,
BlockGraph::FILE_OFFSET_REF,
sizeof(header->PointerToRawData),
FileOffsetAddress(header->PointerToRawData))) {
return false;
}
FileOffsetAddress relocs_ptr_addr(
start + offsetof(IMAGE_SECTION_HEADER, PointerToRelocations));
if (!CreateFileOffsetReference(
relocs_ptr_addr,
BlockGraph::FILE_OFFSET_REF,
sizeof(header->PointerToRelocations),
FileOffsetAddress(header->PointerToRelocations))) {
return false;
}
}
return true;
}
bool CoffDecomposer::CreateBlocksAndReferencesFromSymbolAndStringTables() {
// Create a block for the symbol table.
FileOffsetAddress symbols_start(image_file_.symbols_address());
size_t symbols_size = image_file_.symbols_size();
Block* block = CreateBlock(BlockGraph::DATA_BLOCK,
symbols_start, symbols_size, kSymbolsBlockName);
if (block == NULL) {
LOG(ERROR) << "Unable to create block for symbols.";
return false;
}
block->set_attribute(BlockGraph::COFF_SYMBOL_TABLE);
// Create a block for the strings table that follows.
FileOffsetAddress strings_start(image_file_.strings_address());
size_t strings_size = image_file_.strings_size();
block = CreateBlock(BlockGraph::DATA_BLOCK,
strings_start, strings_size, kStringsBlockName);
if (block == NULL) {
LOG(ERROR) << "Unable to create block for strings.";
return false;
}
block->set_attribute(BlockGraph::COFF_STRING_TABLE);
// Add references.
size_t num_symbols = image_file_.file_header()->NumberOfSymbols;
const IMAGE_SYMBOL* symbol = NULL;
for (size_t i = 0; i < num_symbols; i += 1 + symbol->NumberOfAuxSymbols) {
symbol = image_file_.symbol(i);
// Ignore external symbols (no references to blocks) and other kinds of
// non-reference symbols.
if (symbol->SectionNumber <= 0)
continue;
FileOffsetAddress start(symbols_start + i * sizeof(*symbol));
// Symbols with section storage class simply provide the characteristics
// of the section in the symbol value. Other symbols store an actual offset
// into a section in the value field.
if (symbol->StorageClass != IMAGE_SYM_CLASS_SECTION) {
FileOffsetAddress value_addr(start + offsetof(IMAGE_SYMBOL, Value));
if (!CreateSymbolOffsetReference(
value_addr,
BlockGraph::SECTION_OFFSET_REF,
sizeof(symbol->Value),
symbol,
symbol->Value)) {
return false;
}
}
FileOffsetAddress section_addr(
start + offsetof(IMAGE_SYMBOL, SectionNumber));
if (!CreateSymbolOffsetReference(
section_addr,
BlockGraph::SECTION_REF,
sizeof(symbol->SectionNumber),
symbol,
0)) {
return false;
}
// Section definitions for associative COMDAT sections require an additional
// section reference within the auxiliary symbol.
if (symbol->StorageClass == IMAGE_SYM_CLASS_STATIC &&
symbol->Type >> 4 != IMAGE_SYM_DTYPE_FUNCTION &&
symbol->NumberOfAuxSymbols == 1) {
const IMAGE_AUX_SYMBOL* aux =
reinterpret_cast<const IMAGE_AUX_SYMBOL*>(image_file_.symbol(i + 1));
DCHECK(aux != NULL);
if (aux->Section.Selection == IMAGE_COMDAT_SELECT_ASSOCIATIVE) {
FileOffsetAddress number_addr(
start + sizeof(IMAGE_SYMBOL) +
offsetof(IMAGE_AUX_SYMBOL, Section.Number));
if (!CreateSectionOffsetReference(
number_addr,
BlockGraph::SECTION_REF,
sizeof(short),
aux->Section.Number - 1,
0)) {
return false;
}
}
}
}
return true;
}
bool CoffDecomposer::CreateBlocksFromRelocationTables() {
size_t num_sections = image_file_.file_header()->NumberOfSections;
for (size_t i = 0; i < num_sections; ++i) {
const IMAGE_SECTION_HEADER* header = image_file_.section_header(i);
DCHECK(header != NULL);
if (header->NumberOfRelocations == 0)
continue;
FileOffsetAddress relocs_start(header->PointerToRelocations);
size_t relocs_size(header->NumberOfRelocations * sizeof(IMAGE_RELOCATION));
// Create a block for this relocation table.
Block* block =
CreateBlock(BlockGraph::DATA_BLOCK,
relocs_start, relocs_size, kRelocsBlockName);
if (block == NULL)
return false;
block->set_attribute(BlockGraph::COFF_RELOC_DATA);
}
return true;
}
bool CoffDecomposer::CreateBlocksFromSections() {
DCHECK(image_ != NULL);
// Build COMDAT symbol map, which associates each COMDAT section
// with the COMDAT (secondary) symbol. When compiling with
// function-level linking (/Gy for MSVC), all data and code lives in
// COMDAT sections. Each COMDAT section is associated with at least
// one symbol in the symbol table (the primary symbol), but usually
// two or more.
//
// The primary symbol must always be the section symbol, which
// indicates which final executable section the COMDAT section will
// need to be merged into (e.g., .text or .data).
//
// The secondary symbol, when it exists, is the first symbol bound
// to the COMDAT section that comes after the primary (usually but
// not necessarily right after). With function-level linking, the
// secondary symbol is always the name of the function or variable
// defined in the section.
//
// The COFF decomposer assumes functions live in their own sections,
// which is guaranteed by the MSVC compiler documentation for /Gy,
// but is more forgiving when it comes to variables, which may be
// grouped together in one or multiple data sections.
ComdatMap comdat_map;
size_t num_symbols = image_file_.file_header()->NumberOfSymbols;
const IMAGE_SYMBOL* symbol = NULL;
for (size_t i = 0; i < num_symbols; i += 1 + symbol->NumberOfAuxSymbols) {
symbol = image_file_.symbol(i);
DCHECK(symbol != NULL);
if (symbol->SectionNumber <= 0)
continue;
size_t section_index = symbol->SectionNumber - 1;
// Skip non-COMDAT sections.
const IMAGE_SECTION_HEADER* header =
image_file_.section_header(section_index);
DCHECK(header != NULL);
if ((header->Characteristics & IMAGE_SCN_LNK_COMDAT) == 0)
continue;
// Skip primary section symbols.
ComdatMap::iterator it = comdat_map.find(section_index);
if (it == comdat_map.end()) {
comdat_map.insert(std::make_pair(section_index,
static_cast<const char*>(0)));
} else {
// Skip symbols after the second one.
if (it->second != NULL)
continue;
// This should be the second symbol (assuming the first one is
// the section symbol, as mandated by the specifications), that
// is, the COMDAT symbol.
it->second = image_file_.GetSymbolName(i);
}
}
// Build a block for each data or code section.
size_t num_sections = image_file_.file_header()->NumberOfSections;
for (size_t i = 0; i < num_sections; ++i) {
const IMAGE_SECTION_HEADER* header = image_file_.section_header(i);
DCHECK(header != NULL);
BlockType block_type = GetSectionType(*header) == kSectionCode ?
BlockGraph::CODE_BLOCK : BlockGraph::DATA_BLOCK;
// Retrieve or make up a suitable name for the block.
std::string name(image_file_.GetSectionName(*header));
ComdatMap::iterator it = comdat_map.find(i);
if (it != comdat_map.end()) {
name.append(kSectionComdatSep);
if (it->second != NULL)
name.append(it->second);
}
// Compute the address of the block; when using function-level linking,
// each function begins at offset zero. Unmapped sections (BSS) get an
// unmapped block with an invalid address.
FileOffsetAddress addr(FileOffsetAddress::kInvalidAddress);
if (image_file_.IsSectionMapped(i)) {
CHECK(image_file_.SectionOffsetToFileOffset(i, 0, &addr));
}
// Put everything together into a block.
Block* block = CreateBlock(block_type,
addr, header->SizeOfRawData, name.c_str());
if (block == NULL) {
LOG(ERROR) << "Unable to create block for section " << i << " \""
<< name << "\".";
return false;
}
// Assuming block graph section IDs match those of the image file.
block->set_section(i);
block->set_attribute(image_file_.IsSectionMapped(i) ?
BlockGraph::SECTION_CONTRIB : BlockGraph::COFF_BSS);
// Add to section-block map so we can find it later.
section_block_map_.insert(std::make_pair(i, block));
}
return true;
}
bool CoffDecomposer::CreateReferencesFromDebugInfo() {
DCHECK(image_ != NULL);
// Read debug data directly from the block graph, since debug section
// blocks have already been inserted.
BlockGraph::BlockMap& blocks = image_->graph()->blocks_mutable();
BlockGraph::BlockMap::iterator it = blocks.begin();
for (; it != blocks.end(); ++it) {
size_t section_index = it->second.section();
BlockGraph::Section* section =
image_->graph()->GetSectionById(section_index);
if (section == NULL || section->name() != ".debug$S")
continue;
Block* block = &it->second;
ConstTypedBlock<uint32> magic;
if (!magic.Init(0, block)) {
LOG(ERROR) << "Unable to read magic number from .debug$S section "
<< section_index << ".";
return false;
}
// Parse subsections.
switch (*magic) {
case cci::C11: {
if (!ParseDebugSubsections2(block))
return false;
break;
}
case cci::C13: {
if (!ParseDebugSubsections4(block))
return false;
break;
}
default: {
LOG(ERROR) << "Unsupported CV version " << *magic
<< " in .debug$S section " << section_index << ".";
return false;
}
}
}
return true;
}
bool CoffDecomposer::CreateReferencesFromRelocations() {
DCHECK(image_ != NULL);
CoffFile::RelocMap reloc_map;
image_file_.DecodeRelocs(&reloc_map);
CoffFile::RelocMap::iterator it = reloc_map.begin();
for (; it != reloc_map.end(); ++it) {
DCHECK(it->second != NULL);
const IMAGE_SYMBOL* symbol =
image_file_.symbol(it->second->SymbolTableIndex);
DCHECK(symbol != NULL);
// Compute reference attributes.
ReferenceType ref_type = BlockGraph::REFERENCE_TYPE_MAX;
BlockGraph::Size ref_size = 0;
if (!GetRelocationTypeAndSize(*it->second, &ref_type, &ref_size))
continue;
DCHECK_LT(ref_type, BlockGraph::REFERENCE_TYPE_MAX);
DCHECK_GT(ref_size, 0u);
// Add reference.
size_t offset = symbol->SectionNumber == 0 ? 0 : symbol->Value;
if (!CreateSymbolOffsetReference(it->first, ref_type, ref_size,
symbol, offset)) {
return false;
}
}
return true;
}
bool CoffDecomposer::CreateLabelsFromSymbols() {
DCHECK(image_ != NULL);
size_t num_symbols = image_file_.file_header()->NumberOfSymbols;
const IMAGE_SYMBOL* symbol;
for (size_t i = 0; i < num_symbols; i += 1 + symbol->NumberOfAuxSymbols) {
symbol = image_file_.symbol(i);
// Data labels should reference a valid section, have storage
// class STATIC, a non-function type (contrary to static
// functions), and no auxiliary record (contrary to section
// definitions). Skip the rest.
//
// MSVC records section descriptions in the symbol table as STATIC
// data symbols; hence a section symbol and the first data symbol
// at offset zero will have the same storage class and offset;
// data symbols, however, occupy a single entry in the table,
// whereas section symbols take two records (hence one auxiliary
// record with class-specific data in addition of the main
// record).
if (!(symbol->SectionNumber > 0 &&
symbol->StorageClass == IMAGE_SYM_CLASS_STATIC &&
symbol->Type >> 4 != IMAGE_SYM_DTYPE_FUNCTION &&
symbol->NumberOfAuxSymbols == 0)) {
continue;
}
size_t section_index = symbol->SectionNumber - 1;
// Skip labels in non-code sections.
const IMAGE_SECTION_HEADER* header =
image_file_.section_header(section_index);
DCHECK(header != NULL);
if (GetSectionType(*header) != kSectionCode)
continue;
// Get block and offset.
SectionBlockMap::iterator it = section_block_map_.find(section_index);
DCHECK(it != section_block_map_.end());
Block* block = it->second;
DCHECK(block != NULL);
BlockGraph::Offset offset = symbol->Value;
// Tables only appear in code blocks; ignore others.
if (block->type() != BlockGraph::CODE_BLOCK)
continue;
// Compute label attributes. Jump tables are always an array of
// pointers, thus they coincide exactly with a reference. Case
// tables are simple arrays of integer values, thus do not
// coincide with a reference.
BlockGraph::LabelAttributes attrs = BlockGraph::DATA_LABEL;
if (block->references().find(offset) != block->references().end()) {
attrs |= BlockGraph::JUMP_TABLE_LABEL;
} else {
attrs |= BlockGraph::CASE_TABLE_LABEL;
}
// Add label.
const char* name = image_file_.GetSymbolName(i);
if (!AddLabelToBlock(offset, name, attrs, block))
return false;
}
return true;
}
Block* CoffDecomposer::CreateBlock(BlockType type,
FileOffsetAddress addr,
BlockGraph::Size size,
const base::StringPiece& name) {
DCHECK(image_ != NULL);
if (addr == FileOffsetAddress::kInvalidAddress) {
// Unmapped block.
Block* block = image_->graph()->AddBlock(type, size, name);
if (block == NULL) {
LOG(ERROR) << "Unable to add unmapped block \"" << name.as_string()
<< "\" with size " << size << ".";
return NULL;
}
return block;
}
// Otherwise, we have a normal mapped block.
BlockGraphAddress block_addr(FileOffsetToBlockGraphAddress(addr));
Block* block = image_->AddBlock(type, block_addr, size, name);
if (block == NULL) {
LOG(ERROR) << "Unable to add block \"" << name.as_string() << "\" at "
<< block_addr << " with size " << size << ".";
return NULL;
}
// Mark the source range from whence this block originates.
if (size > 0) {
bool pushed = block->source_ranges().Push(
Block::DataRange(0, size),
Block::SourceRange(block_addr, size));
DCHECK(pushed);
}
const uint8* data = image_file_.GetImageData(addr, size);
if (data != NULL)
block->SetData(data, size);
return block;
}
bool CoffDecomposer::CreateReference(FileOffsetAddress src_addr,
ReferenceType ref_type,
BlockGraph::Size ref_size,
Block* target,
BlockGraph::Offset offset) {
DCHECK(image_ != NULL);
// Get source block and offset.
Block* source = NULL;
BlockGraph::Offset src_offset = -1;
if (!FileOffsetToBlockOffset(src_addr, &source, &src_offset))
return false;
DCHECK(source != NULL);
DCHECK_GE(src_offset, 0);
// Read additional offset for relocations.
BlockGraph::Offset extra_offset = 0;
if ((ref_type & BlockGraph::RELOC_REF_BIT) != 0) {
switch (ref_size) {
case sizeof(uint32):
if (!ReadRelocationValue<uint32>(source, src_offset, &extra_offset))
return false;
break;
case sizeof(uint16):
if (!ReadRelocationValue<uint16>(source, src_offset, &extra_offset))
return false;
break;
case sizeof(uint8):
// TODO(chrisha): This is really a special 7-bit relocation; we do
// not touch these, for now.
break;
default:
LOG(ERROR) << "Unsupported relocation value size (" << ref_size << ").";
return false;
}
}
// Find an existing reference, or insert a new one.
Reference ref(ref_type, ref_size, target, offset + extra_offset, offset);
Block::ReferenceMap::const_iterator ref_it =
source->references().find(src_offset);
if (ref_it == source->references().end()) {
// New reference.
CHECK(source->SetReference(src_offset, ref));
} else {
// Collisions are only allowed if the references are identical.
if (!(ref == ref_it->second)) {
LOG(ERROR) << "Block \"" << source->name() << "\" has a conflicting "
<< "reference at offset " << src_offset << ".";
return false;
}
}
return true;
}
bool CoffDecomposer::CreateFileOffsetReference(FileOffsetAddress src_addr,
ReferenceType ref_type,
BlockGraph::Size ref_size,
FileOffsetAddress dst_addr) {
DCHECK(image_ != NULL);
// Get target section and offset.
Block* target = NULL;
BlockGraph::Offset offset = -1;
if (!FileOffsetToBlockOffset(dst_addr, &target, &offset))
return false;
DCHECK(target != NULL);
DCHECK_GE(offset, 0);
// Add reference.
if (!CreateReference(src_addr, ref_type, ref_size, target, offset))
return false;
return true;
}
bool CoffDecomposer::CreateSectionOffsetReference(FileOffsetAddress src_addr,
ReferenceType ref_type,
BlockGraph::Size ref_size,
size_t section_index,
size_t section_offset) {
DCHECK(image_ != NULL);
// Get target section and offset.
Block* target = NULL;
BlockGraph::Offset offset = -1;
if (!SectionOffsetToBlockOffset(section_index, section_offset,
&target, &offset)) {
return false;
}
DCHECK(target != NULL);
DCHECK_GE(offset, 0);
// Add reference.
if (!CreateReference(src_addr, ref_type, ref_size, target, offset))
return false;
return true;
}
bool CoffDecomposer::CreateSymbolOffsetReference(FileOffsetAddress src_addr,
ReferenceType ref_type,
BlockGraph::Size ref_size,
const IMAGE_SYMBOL* symbol,
size_t offset) {
DCHECK(image_ != NULL);
DCHECK(symbol != NULL);
if (symbol->SectionNumber < 0) {
LOG(ERROR) << "Symbol cannot be converted to a reference.";
return false;
}
if (symbol->SectionNumber != 0) {
// Section symbol.
return CreateSectionOffsetReference(src_addr, ref_type, ref_size,
symbol->SectionNumber - 1, offset);
} else {
// External symbol. As a convention, we use a reference to the symbol
// table, since there is no corresponding block. The offset is ignored
// (will be inferred from the symbol value and reference type).
size_t symbol_index = symbol - image_file_.symbols();
return CreateFileOffsetReference(
src_addr, ref_type, ref_size,
image_file_.symbols_address() + symbol_index * sizeof(*symbol));
}
}
bool CoffDecomposer::FileOffsetToBlockOffset(FileOffsetAddress addr,
Block** block,
BlockGraph::Offset* offset) {
DCHECK(image_ != NULL);
DCHECK(block != NULL);
DCHECK(offset != NULL);
// Get block and offset.
BlockGraphAddress actual_addr(FileOffsetToBlockGraphAddress(addr));
Block* containing_block = image_->GetBlockByAddress(actual_addr);
if (containing_block == NULL) {
LOG(ERROR) << "File offset " << addr << " does not lie within a block.";
return false;
}
BlockGraphAddress block_addr;
CHECK(image_->GetAddressOf(containing_block, &block_addr));
*block = containing_block;
*offset = actual_addr - block_addr;
return true;
}
bool CoffDecomposer::SectionOffsetToBlockOffset(size_t section_index,
size_t section_offset,
Block** block,
BlockGraph::Offset* offset) {
DCHECK(image_ != NULL);
DCHECK_NE(BlockGraph::kInvalidSectionId, section_index);
DCHECK_LT(section_index, image_file_.file_header()->NumberOfSections);
DCHECK_LE(section_offset,
image_file_.section_header(section_index)->SizeOfRawData);
DCHECK(block != NULL);
DCHECK(offset != NULL);
// Get block and offset.
SectionBlockMap::iterator it = section_block_map_.find(section_index);
if (it == section_block_map_.end()) {
LOG(ERROR) << "Section " << section_index << " is not mapped to a block.";
return false;
}
DCHECK(it->second != NULL);
DCHECK_LE(section_offset, it->second->size());
*block = it->second;
*offset = section_offset;
return true;
}
CoffDecomposer::BlockGraphAddress CoffDecomposer::FileOffsetToBlockGraphAddress(
FileOffsetAddress addr) {
return BlockGraphAddress(addr.value());
}
} // namespace pe
|
{
"content_hash": "eabf5943f1b358829998034de0bd719d",
"timestamp": "",
"source": "github",
"line_count": 1113,
"max_line_length": 80,
"avg_line_length": 35.498652291105124,
"alnum_prop": 0.6160971905846621,
"repo_name": "pombreda/syzygy",
"id": "4660c3a2da9d7cb4c0f2cf5e1497247ffb3db81f",
"size": "40372",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "syzygy/pe/coff_decomposer.cc",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "13748"
},
{
"name": "C",
"bytes": "8422"
},
{
"name": "C++",
"bytes": "7587976"
},
{
"name": "CSS",
"bytes": "1333"
},
{
"name": "HTML",
"bytes": "3182"
},
{
"name": "Protocol Buffer",
"bytes": "6472"
},
{
"name": "Python",
"bytes": "841811"
},
{
"name": "Shell",
"bytes": "19040"
}
],
"symlink_target": ""
}
|
function before_deploy {
echo "before deploy ..."
}
# step 2
function before_link {
# run remote command
echo "before link"
remote_cmd "cd ${CURRENT_RELEASE} && php clean_apc_cache.php"
}
# step 3
function after_link {
echo "after link ..."
}
# step 4
function after_deploy {
echo "after deploy ..."
}
|
{
"content_hash": "741fd5bdaa68286a7fdfa4564455d66f",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 65,
"avg_line_length": 16.45,
"alnum_prop": 0.6291793313069909,
"repo_name": "xianhuazhou/pac",
"id": "d63474f07a90ecab4bdf1884eb7f9ddd2c6deee0",
"size": "351",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": ".pac/hooks/deploy.sh",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Shell",
"bytes": "13171"
}
],
"symlink_target": ""
}
|
class GitHubLanguageFix { };
#endif
|
{
"content_hash": "d3005d4d63ebfc51899f65ebfb7f288c",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 28,
"avg_line_length": 18,
"alnum_prop": 0.75,
"repo_name": "IzaanSiddiqi/blight_engine",
"id": "ea8d10dea28ecee9e14efff115eb958bdb4ca5ab",
"size": "215",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source/event_handling/event_handling.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1639"
},
{
"name": "C",
"bytes": "251"
},
{
"name": "C++",
"bytes": "46309"
}
],
"symlink_target": ""
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace WebSockets.Server
{
public interface IService : IDisposable
{
/// <summary>
/// Sends data back to the client. This is built using the IConnectionFactory
/// </summary>
void Respond();
}
}
|
{
"content_hash": "37d9aa615401ef5a9c0bc30e3001906e",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 85,
"avg_line_length": 22.066666666666666,
"alnum_prop": 0.649546827794562,
"repo_name": "ninjasource/websocket-server",
"id": "396407af3f314690d75068d72858bb30093d4cad",
"size": "333",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WebSockets/Server/IService.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "136"
},
{
"name": "C#",
"bytes": "71385"
},
{
"name": "HTML",
"bytes": "2477"
}
],
"symlink_target": ""
}
|
<div class="chart-template">
<h3 class="cohort-name-header">{{cohort.name}}</h3>
<div class="row equal">
<div class="panel panel-default panel-table">
<div class="panel-heading">
Condition Prevalence
</div>
<div class="panel-body">
<div class="col-sm-12">
<ul class="nav nav-tabs" id="myTab" role="tablist">
<li class="active" id="treemap-tab"><a href="#condition-treemap-panel" role="tab" data-toggle="tab">Treemap</a>
</li>
<li><a href="#condition-table-panel" role="tab" data-toggle="tab">Table</a>
</li>
</ul>
<div id='content' class="tab-content">
<div role="tabpanel" class="tab-pane active" id="condition-treemap-panel">
<div id="treemap_container">
<div class="treemap_zoomtarget"></div>
</div>
<div class="treemap_legend">Box Size: Prevalence, Color: Records per Person (Blue to Green = Low to High), Use Ctrl-Click to Zoom, Alt-Click to Reset Zoom</div>
</div>
<div role="tabpanel" class="tab-pane" id="condition-table-panel">
<table id="condition_table" class="display">
<thead>
<tr>
<th>Concept Id</th>
<th>SOC</th>
<th>HLGT</th>
<th>HLT</th>
<th>PT</th>
<th>SNOMED</th>
<th>Person Count</th>
<th>Prevalence</th>
<th>Records per Person</th>
</tr>
</thead>
</table>
</div>
</div>
</div>
</div>
</div>
</div>
<div id="reportConditionOccurrencesDrilldown" class="hidden reportDrilldown">
<div id="conditionDrilldownTitle" class="reportTitle"></div>
<div class="row">
<div class="col-sm-12">
<div id="panelConditionPrevalence" class="panel panel-default">
<div class="panel-heading">
Condition Prevalence
</div>
<div class="panel-body">
<div class="drilldown" id="trellisLinePlot"></div>
</div>
</div>
</div>
</div>
<div class="row">
<div class="col-sm-12">
<div id="panelConditionOccurrencePrevalenceByMonth" class="panel panel-default">
<div class="panel-heading">
Condition Prevalence by Month
</div>
<div class="panel-body">
<div class="drilldown" id="conditionPrevalenceByMonth"></div>
</div>
</div>
</div>
</div>
<div class="row equal">
<div class="col-sm-6">
<div id="panelConditionsByType" class="panel panel-default">
<div class="panel-heading">
Conditions by Type
</div>
<div class="panel-body">
<div class="drilldown" id="conditionsByType"></div>
</div>
</div>
</div>
<div class="col-sm-6">
<div id="panelAgeAtFirstDiagnosis" class="panel panel-default">
<div class="panel-heading">
Age at First Diagnosis
</div>
<div class="panel-body">
<div class="drilldown" id="ageAtFirstDiagnosis"></div>
</div>
</div>
</div>
</div>
</div>
</div>
|
{
"content_hash": "a5e01d688cd96814aff02c81a3a69dd5",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 188,
"avg_line_length": 45.46808510638298,
"alnum_prop": 0.40009358914365933,
"repo_name": "OHDSI/Olympus",
"id": "db722890d0930253134920d85a017acc98f1236c",
"size": "4274",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/main/webapp/Heracles/src/templates/condition.html",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "16386"
},
{
"name": "CSS",
"bytes": "347419"
},
{
"name": "HTML",
"bytes": "451524"
},
{
"name": "Java",
"bytes": "79690"
},
{
"name": "JavaScript",
"bytes": "4113982"
},
{
"name": "Shell",
"bytes": "1853"
}
],
"symlink_target": ""
}
|
module Nanoc
module Core
# @private
class ActionProvider
extend DDPlugin::Plugin
def self.for(_site)
raise NotImplementedError
end
def rep_names_for(_item)
raise NotImplementedError
end
def action_sequence_for(_obj)
raise NotImplementedError
end
def need_preprocessing?
raise NotImplementedError
end
def preprocess(_site)
raise NotImplementedError
end
def postprocess(_site, _reps)
raise NotImplementedError
end
end
end
end
|
{
"content_hash": "0a81e0b735f108f76e568bb85186b6e8",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 35,
"avg_line_length": 17.875,
"alnum_prop": 0.6153846153846154,
"repo_name": "nanoc/nanoc",
"id": "39bdacef8ca1d8c8bf5afb6f4564f4eb13414d7a",
"size": "603",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "nanoc-core/lib/nanoc/core/action_provider.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "1635200"
}
],
"symlink_target": ""
}
|
'use strict';
var WebGLMyApps = WebGLMyApps || {};
WebGLMyApps.Module = WebGLMyApps.Module || {};
WebGLMyApps.Module.View = WebGLMyApps.Module.View || {};
WebGLMyApps.Module.Utils = WebGLMyApps.Module.Utils || {};
(() =>
{
class App
{
constructor()
{
this.threejsViewController = null;
this.threejsProps = {};
this.actorsController = null;
this.audioController = null;
this.timerController = null;
this.infomationController = null;
document.addEventListener('DOMContentLoaded', e => this.onLoad(e) );
document.addEventListener('keydown', e => this.onKeyDown(e) );
document.addEventListener('touchstart', e => this.onTouchStart(e) );
window.addEventListener('resize', e => this.onResize(e) );
}
init()
{
this.threejsProps = this.threejsViewController.init();
this.actorsController = this.threejsProps.actors;
this.update();
}
update()
{
let _props = {
level: this.audioController.update(),
time: this.timerController.getCurrentTime()
};
this.threejsViewController.update(_props);
this.infomationController.update(_props)
this.render();
}
render()
{
this.threejsViewController.render();
requestAnimationFrame(this.update.bind(this));
}
onLoad(e)
{
this.audioController = new WebGLMyApps.Module.Utils.AudioController();
this.infomationController = new WebGLMyApps.Module.OverlayInformationViewController();
this.audioController.init(()=>
{
this.audioController.addMusicChangedEvent(this.infomationController, this.infomationController.onMusicChangedHandler);
this.threejsViewController = WebGLMyApps.Module.ThreejsViewController.getInstance();
this.timerController = new WebGLMyApps.Module.Utils.TimerController();
this.init();
});
}
onResize(e)
{
this.threejsViewController.onResize(e);
}
onKeyDown(e)
{
this.threejsViewController.onKeyDown(e);
this.actorsController.onKeyDown(e);
this.audioController.onKeyDown(e);
}
onTouchStart(e)
{
e.keyCode = 32;
this.audioController.onKeyDown(e);
}
}
return new App();
})();
/*
* ModuleBase
*/
WebGLMyApps.Module.ModuleBase = (()=>
{
return class ModuleBase
{
constructor(){
this.view = null;
}
init(){}
update(){}
render(){}
getView(){
return this.view;
}
onResize(e){}
onLoad(e){}
onKeyDown(e){}
}
})();
/*
* ThreejsViewController
*/
WebGLMyApps.Module.ThreejsViewController = (()=>
{
let _instance = null;
let _symbol = Symbol();
return class ThreejsViewController extends WebGLMyApps.Module.ModuleBase
{
constructor(target)
{
super();
if ( _symbol !== target || _instance !== null ) {
throw new Error('You can get this instance by using getInstance()');
}
this.run = true;
this.viewport = {
width: window.innerWidth,
height: window.innerHeight,
targetDOM: document.getElementById('webgl')
};
this.props = {
actors: null,
scene: null,
camera: null,
controls: null,
renderer: null,
geometry: null,
material: null,
directional: null,
ambient: null
}
this.params = {
CAMERA_PARAMETER: {
fovy: 60,
aspect: this.viewport.width / this.viewport.height,
near: 0.1,
far: 800.0,
x: 0.0,
y: 10.0,
z: 70.0,
lookAt: new THREE.Vector3(0.0, 0.0, 0.0)
},
RENDERER_PARAMETER: {
clearColor: 0x000000,
width: this.viewport.width,
height: this.viewport.height
}
};
_instance = this;
return _instance;
}
static getInstance()
{
if ( _instance === null ) {
_instance = new ThreejsViewController(_symbol);
}
return _instance
}
init()
{
let { actors, scene, camera, controls, renderer, geometry, material, directional, ambient } = this.props;
const { CAMERA_PARAMETER, RENDERER_PARAMETER } = this.params;
scene = new THREE.Scene();
// initialize camera
camera = new THREE.PerspectiveCamera(
CAMERA_PARAMETER.fovy,
CAMERA_PARAMETER.aspect,
CAMERA_PARAMETER.near,
CAMERA_PARAMETER.far
);
camera.position.x = CAMERA_PARAMETER.x;
camera.position.y = CAMERA_PARAMETER.y;
camera.position.z = CAMERA_PARAMETER.z;
camera.lookAt(CAMERA_PARAMETER.lookAt);
// initialize renderer
renderer = new THREE.WebGLRenderer();
renderer.setClearColor(new THREE.Color(RENDERER_PARAMETER.clearColor));
renderer.setSize(RENDERER_PARAMETER.width, RENDERER_PARAMETER.height);
this.viewport.targetDOM.appendChild(renderer.domElement);
controls = new THREE.OrbitControls(camera, renderer.domElement);
actors = new WebGLMyApps.Module.ActorsController();
actors.init(scene);
// initialize light
directional = new THREE.DirectionalLight(0xffffff);
ambient = new THREE.AmbientLight(0xffffff, 0.2);
scene.add(directional);
scene.add(ambient);
this.props = { actors, scene, camera, controls, renderer, geometry, material, directional, ambient };
return this.props;
}
update(_props)
{
if(!this.run){ return; }
this.props.actors.update(_props);
}
render()
{
const { scene, camera, renderer } = this.props;
renderer.render(scene, camera);
}
onResize(e)
{
this.viewport = Object.assign({}, this.viewport, {
width: window.innerWidth,
height: window.innerHeight
});
this.props.renderer.setSize(this.viewport.width, this.viewport.height, true);
}
onKeyDown(e)
{
this.run = e.keyCode === 27 ? !this.run : this.run;
}
}
})();
/*
* ActorsController
*/
WebGLMyApps.Module.ActorsController = (()=>
{
return class ActorsController extends WebGLMyApps.Module.ModuleBase
{
constructor()
{
super();
this.scene = null;
this.actorTimer = null;
this.actors = [];
this.initial = {
MiscellaneousAmount: 30,
MiscellaneousAmountLimit: 50
};
}
init(_scene)
{
let { actors } = this;
let _amount = 0;
this.scene = _scene;
this.actorTimer = new WebGLMyApps.Module.View.TimerView();
this.actorTimer.init();
this.scene.add(this.actorTimer.getView());
while(_amount++ < this.initial.MiscellaneousAmount)
{
setTimeout(()=>{
this.addActor();
}, 2000 + (_amount + 1) * 1000);
}
}
addActor()
{
if(this.actors.length >= this.initial.MiscellaneousAmountLimit){ return; }
const _actor = new WebGLMyApps.Module.View.MiscellaneousView()
_actor.init();
this.actors.push(_actor);
this.scene.add(_actor.getView());
}
removeActor()
{
if(this.actors.length < 1){ return; }
const _actor = this.actors[0];
this.scene.remove(_actor.getView());
_actor.destroy();
this.actors.shift();
}
update(_props)
{
this.actorTimer.update(_props);
this.actors.forEach(_actor => _actor.update(_props) );
}
onKeyDown(e)
{
if(e.keyCode === 65)
{
this.addActor();
}
if(e.keyCode === 68)
{
this.removeActor();
}
}
}
})();
/*
* ThreejsActorsViewBase
*/
WebGLMyApps.Module.View.ThreejsActorsViewBase = (()=>
{
return class ThreejsViewBase extends WebGLMyApps.Module.ModuleBase
{
constructor()
{
super();
this.enable = true;
this.lifeTime = {
current: 0,
limit: 360
};
this.acceleration = 1;
this.transform = {
position: new THREE.Vector3(0, 0, 0),
rotation: new THREE.Vector3(0, 0, 0),
scale: new THREE.Vector3(1, 1, 1)
};
this.transformDefault = {};
this.view = null;
this.geometry = null;
this.material = null;
this.texture = null;
}
init()
{
super.init();
this.transformDefault = Object.assign({}, this.transform);
}
getRandomRange(_min = 0, _max = 1)
{
return _min + Math.random() * (Math.abs(_min) + _max);
}
update(_props)
{
super.update(_props);
const { position, rotation, scale } = this.transform;
this.updateMotion(_props, position, rotation, scale);
this.lifeTime.current = ++this.lifeTime.current > this.lifeTime.limit ? 0 : this.lifeTime.current;
}
updateMotion(_props, position, rotation, scale)
{
const _level = _props.level / 1000;
const _velocity = {
position: new THREE.Vector3(
Math.sin(this.lifeTime.current * this.acceleration / 40) * _level * this.acceleration,
0,
Math.cos(this.lifeTime.current * this.acceleration / 40) * _level * this.acceleration
),
rotation: new THREE.Vector3(
this.acceleration / 20,
this.acceleration / 20,
0
),
scale: new THREE.Vector3(_level, _level, _level)
};
const _updatedTransform = this.setTransformToView(this.view, this.transform, _velocity);
this.transform.rotation = _updatedTransform.rotation;
}
setTransformToView(_view, _transform, _velocity)
{
const { position, rotation, scale } = _transform;
['x', 'y', 'z'].forEach(_key =>{
_view.position[_key] = position[_key] + _velocity.position[_key];
_view.rotation[_key] = rotation[_key] + _velocity.rotation[_key];
_view.scale[_key] = scale[_key] + _velocity.scale[_key];
});
return _view;
}
destroy()
{
if(this.geometry != null){ this.geometry.dispose(); }
if(this.material != null){ this.material.dispose(); }
if(this.texture != null){ this.texture.dispose(); }
if(this.mesh != null){ this.mesh.dispose(); }
}
}
})();
/*
* TimerView
*/
WebGLMyApps.Module.View.TimerView = (()=>
{
return class TimerView extends WebGLMyApps.Module.View.ThreejsActorsViewBase
{
constructor()
{
super();
const MATERIAL_PARAMETER = {
blending: THREE.NormalBlending,
color: 0xffeedd,
opacity: 0.8,
transparent: true
};
const _scale = 4;
this.geometry = new THREE.BoxGeometry(_scale, _scale, _scale);
this.material = new THREE.MeshBasicMaterial(MATERIAL_PARAMETER);
this.view = new THREE.Mesh(this.geometry, this.material);
this.acceleration = 0.5;
this.transform = {
position: new THREE.Vector3(0, 0, 0),
rotation: new THREE.Vector3(0, 0, 0),
scale: new THREE.Vector3(_scale, _scale, _scale),
};
this.timerController = null;
this.enable = false;
}
init()
{
super.init();
this.timerController = new WebGLMyApps.Module.Utils.TimerController();
setTimeout(()=>{ this.enable = true; }, 4000);
}
update(_props)
{
super.update(_props);
let _texture = null;
this.timerController.update();
_texture = this.timerController.getViewAsTexture();
this.view.material.opacity -= 0.04;
if(_texture != null)
{
this.view.material.opacity = 0.8;
this.view.material.map = _texture;
}
}
updateMotion(_props, position, rotation, scale)
{
const _level = this.enable ? _props.level / 1000 : 3;
const _velocity = {
position: new THREE.Vector3(0, 0, 0),
rotation: new THREE.Vector3(
// this.acceleration / 20,
0,
this.acceleration / 20,
0
),
scale: new THREE.Vector3(_level, _level, _level)
};
const _updatedTransform = this.setTransformToView(this.view, this.transform, _velocity);
this.transform.rotation = _updatedTransform.rotation;
}
}
})();
/*
* MiscellaneousView
*/
WebGLMyApps.Module.View.MiscellaneousView = (()=>
{
return class MiscellaneousView extends WebGLMyApps.Module.View.ThreejsActorsViewBase
{
constructor()
{
super();
const _r = this.getRandomRange;
const MATERIAL_PARAMETER = {
color: 0xffffff * _r(),
blending: THREE.AdditiveBlending,
opacity: 0.9,
transparent: true
};
const _scale = _r() + 1;
this.geometry = new THREE.BoxGeometry(_scale, _scale, _scale);
this.material = new THREE.MeshStandardMaterial(MATERIAL_PARAMETER);
this.view = new THREE.Mesh(this.geometry, this.material);
this.acceleration = _r() + 1;
this.transform = {
position: new THREE.Vector3(_r(-10, 10), _r(-10, 10), _r(-10, 10)),
rotation: new THREE.Vector3(_r(0, 360), _r(0, 360), _r(0, 360)),
scale: new THREE.Vector3(_scale, _scale, _scale),
}
}
}
})();
/*
* TimeController
*/
WebGLMyApps.Module.Utils.TimerController = (()=>
{
return class TimerController extends WebGLMyApps.Module.ModuleBase
{
constructor()
{
super();
this.current = "";
this.isUpdated = false;
this.props = {
view: {
canvas: null,
context: null,
font: 'Times new roman',
fillStyle: 'rgba(255, 255, 255, 255)',
textBaseline: 'top',
size: 60,
x: 0,
y: 0,
width: 256,
height: 256
}
}
this.init();
}
init()
{
let { canvas, context, font, fillStyle, textBaseline, size, x, y, width, height } = this.props.view;
canvas = document.createElement('canvas');
canvas.width = width;
canvas.height = height;
context = canvas.getContext('2d');
context.font = `${size}px ${font}`;
context.textBaseline = textBaseline;
context.fillStyle = fillStyle;
x = width / 2 - size * 1.75;
y = height / 2 - size / 2;
this.props.view = { canvas, context, font, fillStyle, textBaseline, size, x, y, width, height };
}
update()
{
const { context, x, y, width, height } = this.props.view;
const _text = this.getCurrentTime();
if(this.current === _text){ return; }
this.current = _text;
this.isUpdated = true;
context.clearRect(0, 0, width, height);
context.fillText(_text, x, y);
}
getCurrentTime()
{
const _date = new Date();
const _hour = (_date.getHours() > 9)? _date.getHours() : '0'+_date.getHours();
const _minute = (_date.getMinutes() > 9)? _date.getMinutes() : '0'+_date.getMinutes();
const _second = (_date.getSeconds() > 9)? _date.getSeconds() : '0'+_date.getSeconds();
return `${_hour}:${_minute}:${_second}`;
}
getViewAsTexture()
{
if(!this.isUpdated){ return null; }
const _texture = new THREE.CanvasTexture(this.props.view.canvas);
_texture.needsUpdate = true;
this.isUpdated = false;
return _texture;
}
}
})();
/*
* AudioController
*/
WebGLMyApps.Module.Utils.AudioController = ((AudioContext) =>
{
return class AudioController extends WebGLMyApps.Module.ModuleBase
{
constructor()
{
super();
this.enable = false;
this.isLoading = true;
this.level = 0;
this.audioProps = {
sourceRoot: './assets/audio/',
musicList: [
{
name: 'Ringin’',
author: 'Mr.Kimy',
url: 'http://dova-s.jp/bgm/play4331.html',
source: 'Ringin-f.mp3'
},
{
name: 'The Cope Of Night',
author: 'taron',
url: 'http://dova-s.jp/bgm/play4980.html',
source: 'The_Cope_Of_Night.mp3'
},
{
name: 'haze',
author: 'Choco Mint',
url: 'http://dova-s.jp/bgm/play6376.html',
source: 'haze.mp3'
},
{
name: 'chill the sun',
author: 'gimgigam',
url: 'http://dova-s.jp/bgm/play6521.html',
source: 'chill_the_sun.mp3'
},
],
current: 0
};
this.context = new AudioContext();
this.source = null;
this.analyser = this.context.createAnalyser();
this.analyser.fftSize = 256;
this.analyser.connect(this.context.destination);
this.onMusicChanged = ()=>{};
}
init(_callback)
{
this.loadAsset(0, true, _callback);
}
getLevel()
{
return this.level;
}
getAudioBuffer(_url, _callback)
{
const _request = new XMLHttpRequest();
_request.responseType = 'arraybuffer';
_request.onreadystatechange = ()=>
{
if (_request.readyState === 4) {
if (_request.status === 0 || _request.status === 200) {
this.context.decodeAudioData(_request.response, (_buffer)=>
{
_callback(_buffer);
});
}
}
};
_request.open('GET', _url, true);
_request.send('');
}
addMusicChangedEvent(_sender, _callback)
{
this.onMusicChanged = _callback.bind(_sender);
}
play(_buffer)
{
if(this.source != null)
{
this.source.stop();
this.source = null;
}
this.source = this.context.createBufferSource();
this.source.buffer = _buffer;
this.source.loop = true;
this.source.connect(this.analyser);
this.source.start(0);
}
update()
{
if(!this.enable){ return 0; }
let _spectrums = new Uint8Array(this.analyser.frequencyBinCount);
let _level = 0;
this.analyser.getByteFrequencyData(_spectrums);
_spectrums.forEach(_value => _level += _value);
this.level = _level;
return _level;
}
loadAsset(_index = 0, _isAutoPlay = true, _callback = null)
{
const { sourceRoot, musicList } = this.audioProps;
if(_index >= musicList.length)
{
this.audioProps.current = _index = 0;
}
this.isLoading = true;
this.getAudioBuffer(sourceRoot + musicList[_index].source, (_buffer)=>
{
this.enable = true;
this.isLoading = false;
if(_isAutoPlay)
{
this.play(_buffer);
}
if(_callback != null)
{
_callback();
}
this.onMusicChanged(this.audioProps.musicList[this.audioProps.current]);
});
}
getInfo()
{
return this.audioProps.musicList[this.audioProps.current];
}
onKeyDown(e)
{
if(e.keyCode === 32 && !this.isLoading)
{
this.loadAsset(++this.audioProps.current);
}
}
};
})(window.AudioContext || window.webkitAudioContext);
/*
* OverlayInformationViewController
*/
WebGLMyApps.Module.OverlayInformationViewController = (()=>
{
return class ActorsContOverlayInformationViewController extends WebGLMyApps.Module.ModuleBase
{
constructor()
{
super();
this.container = null;
this.info = {
music: null,
time: null
}
this.init();
}
init()
{
this.container = document.getElementById('info__container');
this.info.music = document.getElementById('info__music');
this.info.time = document.getElementById('info__time');
}
update(_props = null)
{
if(_props == null){ return; }
this.info.time.textContent = _props.time;
}
onMusicChangedHandler(e)
{
this.info.music.innerHTML = `${e.name} / ${e.author} <a href="${e.url}" target="_blank">(Source from DOVA-SYNDROME)</a>`;
}
}
})();
|
{
"content_hash": "aa0bd7c86dbec642b25843a1efc9f308",
"timestamp": "",
"source": "github",
"line_count": 839,
"max_line_length": 132,
"avg_line_length": 23.828367103694873,
"alnum_prop": 0.5661764705882353,
"repo_name": "tspringk/webgl-audio-clock",
"id": "ebd3eb7f32619527499887cae914349f3dd9ce52",
"size": "20128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "script.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1082"
},
{
"name": "JavaScript",
"bytes": "2065492"
}
],
"symlink_target": ""
}
|
package gor
import (
raw "github.com/buger/gor/raw_socket_listener"
"log"
"net"
)
type RAWInput struct {
data chan []byte
address string
}
func NewRAWInput(address string) (i *RAWInput) {
i = new(RAWInput)
i.data = make(chan []byte)
i.address = address
go i.listen(address)
return
}
func (i *RAWInput) Read(data []byte) (int, error) {
buf := <-i.data
copy(data, buf)
return len(buf), nil
}
func (i *RAWInput) listen(address string) {
host, port, err := net.SplitHostPort(address)
if err != nil {
log.Fatal("input-raw: error while parsing address", err)
}
listener := raw.NewListener(host, port)
for {
// Receiving TCPMessage object
m := listener.Receive()
i.data <- m.Bytes()
}
}
func (i *RAWInput) String() string {
return "RAW Socket input: " + i.address
}
|
{
"content_hash": "0f4b5aa491f809eefa0d06ad744806ee",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 58,
"avg_line_length": 16.06,
"alnum_prop": 0.6562889165628891,
"repo_name": "alphagov/gor",
"id": "82b8a639aa39d33c22feb9643a77be33a4f73386",
"size": "803",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "input_raw.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "44078"
}
],
"symlink_target": ""
}
|
<HTML><HEAD>
<TITLE>Review for Doors, The (1991)</TITLE>
<LINK REL="STYLESHEET" TYPE="text/css" HREF="/ramr.css">
</HEAD>
<BODY BGCOLOR="#FFFFFF" TEXT="#000000">
<H1 ALIGN="CENTER" CLASS="title"><A HREF="/Title?0101761">Doors, The (1991)</A></H1><H3 ALIGN=CENTER>reviewed by<BR><A HREF="/ReviewsBy?Sandra+J.+Grossmann">Sandra J. Grossmann</A></H3><HR WIDTH="40%" SIZE="4">
<PRE> THE DOORS
A film review by Sandy Grossmann
Copyright 1991 by Sandra J. Grossmann</PRE>
<P>Cast: Val Kilmer, Meg Ryan, Kyle MacLachlan, Frank Whaley, Kevin Dillon
Director: Oliver Stone
Screenplay: Jay Randal Johnson and Oliver Stone</P>
<PRE>Synopsis: A brilliant, eerie, difficult-to-watch recreation of Jim
Morrison's inexorable dance toward death. Stone's work is
mature and restrained, and Kilmer is outstanding. Worth
seeing at full price on a big screen, but remember to bring
ear plugs: the sound will probably be way too loud. </PRE>
<P> "The show is about to begin." We look around at the audience, my
husband and I, wondering how many people in the theatre were old enough
to form sentences when Morrison died. Half? Less than half? Will the
young ones get it or will they be mesmerized?</P>
<P> By now you've probably seen pictures of Kilmer-as-Morrison. It's
uncanny. Tumbled locks of hair, slightly parted lips, the eyes
bemused... What was there about that face, about Morrison? Like Rudolf
Valentino, he had a femininity that attracted females. Like James Dean,
he had a rebellious streak that attracted males. And like Janis Joplin,
he had a self-destructive urge that attracted and repelled would-be
saviors.</P>
<P> THE DOORS is a disturbing film. A relentless soundtrack, visually
skewed images, warped colors, and shamanistic shapes combine into an
assault on the senses. Fascinated, we watch. Just as his fans did.
Like moths drawn to a bonfire. </P>
<P> Who lit the fire that consumed Morrison? Was it his fans? His
band? His girlfriend? His parents? His own visions? Was he an
overrated rock star? An underrated poet? Stone's film is like a
documentary: scene after scene replayed, vaguely familiar. We want to
hear Morrison explain what happened to him, but he doesn't. We want to
turn him away from that bathtub in Paris, but we can't. Morrison is
beautiful and he is hateful. He gives his soul to his fans and he
attacks his friends. He is naive and innocent one moment, brutal and
pretentious the next.</P>
<P> This film is about excesses and pushing past limits. In a sense,
the 60's were about that, too. Break the old boundaries: they no
longer apply. They were our parents' rules, but we are free of their
restrictions. We can do anything.</P>
<P> The limits, though, weren't the problem. Morrison and drugs and
death weren't the solution. </P>
<P> It is said that every generation must define meaning for itself,
and fortunately, most of us survive the experience, even if we fail the
test. Perhaps we use our leaders as scouts, cheering them as they blaze
the trail. We follow much later, if at all. In Morrison's case, well,
we watched Icarus fall from the sky, his wings melted. He plunges into
the sea -- in this case, a bathtub, in Paris, in 1971.</P>
<P> The closing scene is at Pere Lachaise, a cemetery in Paris. We see
the quiet graves of Chopin, Berlioz, Moliere. The last shot is of
Morrison's grave, graffiti-strewn and candlelit. It's an unquiet grave,
anointed by the adoration of fans who still worship his self-consuming
fire. </P>
<P> Yet some have learned the lesson. Will this film renew the lesson
or renew the blind passion? To his credit, Stone doesn't hit us in the
face with A Message. He sends us clippings instead and forces us to tie
the pieces together. He turns the camera on Morrison/Kilmer, frequently
showing us Kilmer's back so that we are, literally, following Morrison
to his death. </P>
<P> Kilmer is magnificent. He speaks pure babble as if it were
Shakespeare and he absolutely commands a crowd even when he can't focus
on it. (Kilmer wore black contact lenses to make his pupils look
dilated. The lenses had the added effect of screwing up his
equilibrium.) All of the concert shots feature Kilmer's vocals, which
means that he not only looks and acts like Morrison, he sounds like him,
too.</P>
<P> Kilmer is so good that the original band members had trouble
distinguishing some of the cuts Val sings from ones sung by Jim. When
viewing some of the footage, guitarist Robby Krieger said, "I'm really
glad that we finally got 'The End.' We never got a recording of that
live with Jim. Now we've got it." Kilmer has recreated Morrison. It's
eerie. Frightening, like a voice from the grave. I ask again: will the
young ones get it or will they be mesmerized?</P>
<P> The camera, you see, is in love with Morrison/Kilmer--it can't
resist him, and neither can the audience. Morrison's dazed eyes have
come back to haunt another generation. Let's hope this generation knows
a dead-end when they see one.</P>
<P>Sandy Grossmann <A HREF="mailto:sandyg@tekchips.labs.tek.com">sandyg@tekchips.labs.tek.com</A></P>
<PRE>.
</PRE>
<HR><P CLASS=flush><SMALL>The review above was posted to the
<A HREF="news:rec.arts.movies.reviews">rec.arts.movies.reviews</A> newsgroup (<A HREF="news:de.rec.film.kritiken">de.rec.film.kritiken</A> for German reviews).<BR>
The Internet Movie Database accepts no responsibility for the contents of the
review and has no editorial control. Unless stated otherwise, the copyright
belongs to the author.<BR>
Please direct comments/criticisms of the review to relevant newsgroups.<BR>
Broken URLs inthe reviews are the responsibility of the author.<BR>
The formatting of the review is likely to differ from the original due
to ASCII to HTML conversion.
</SMALL></P>
<P ALIGN=CENTER>Related links: <A HREF="/Reviews/">index of all rec.arts.movies.reviews reviews</A></P>
</P></BODY></HTML>
|
{
"content_hash": "59cad05c11cf6c0e40a586e09dbe94fd",
"timestamp": "",
"source": "github",
"line_count": 100,
"max_line_length": 210,
"avg_line_length": 62.07,
"alnum_prop": 0.7214435314966973,
"repo_name": "xianjunzhengbackup/code",
"id": "e3ec15213f80400ab1b134f2a4a10673a651e101",
"size": "6207",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data science/machine_learning_for_the_web/chapter_4/movie/0945.html",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "BitBake",
"bytes": "113"
},
{
"name": "BlitzBasic",
"bytes": "256"
},
{
"name": "CSS",
"bytes": "49827"
},
{
"name": "HTML",
"bytes": "157006325"
},
{
"name": "JavaScript",
"bytes": "14029"
},
{
"name": "Jupyter Notebook",
"bytes": "4875399"
},
{
"name": "Mako",
"bytes": "2060"
},
{
"name": "Perl",
"bytes": "716"
},
{
"name": "Python",
"bytes": "874414"
},
{
"name": "R",
"bytes": "454"
},
{
"name": "Shell",
"bytes": "3984"
}
],
"symlink_target": ""
}
|
package edu.cmu.iadss.wrap_monitor;
import java.io.IOException;
import java.nio.file.FileSystems;
import java.nio.file.Path;
/**
* Hello world!
*
*/
public class App
{
public static void main( String[] args ) throws IOException, InterruptedException
{
// start the file path watcher
String taskQueueName = "dicom_wrap_queue";
String hostName = "localhost";
FileWatcher watcher = new FileWatcher(taskQueueName, hostName);
Path pathToWatch = FileSystems.getDefault().getPath("/home/vagrant/Data/wrap_queue");
String extensionToWatch = ".json";
watcher.Watch(pathToWatch, extensionToWatch);
}
}
|
{
"content_hash": "b68c88df0172f07c115a06e984200daa",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 93,
"avg_line_length": 28.125,
"alnum_prop": 0.6755555555555556,
"repo_name": "BrianKolowitz/IADSS",
"id": "b5035621020aec81c708f046d7a9aadcd1458dea",
"size": "675",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "wrap_monitor/src/main/java/edu/cmu/iadss/wrap_monitor/App.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "179120"
},
{
"name": "Java",
"bytes": "25588"
},
{
"name": "JavaScript",
"bytes": "112124"
},
{
"name": "Python",
"bytes": "6078"
},
{
"name": "Ruby",
"bytes": "301"
},
{
"name": "Shell",
"bytes": "95372"
}
],
"symlink_target": ""
}
|
---
uid: SolidEdgeFramework.RibbonBarControl.Visible
summary:
remarks:
---
|
{
"content_hash": "e7b3450d3e021ad3f37f9c05ead66f6c",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 48,
"avg_line_length": 15.6,
"alnum_prop": 0.7435897435897436,
"repo_name": "SolidEdgeCommunity/docs",
"id": "058496a8db163d725e4a7b08564e5bd3beda1ace",
"size": "80",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "docfx_project/apidoc/SolidEdgeFramework.RibbonBarControl.Visible.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "38"
},
{
"name": "C#",
"bytes": "5048212"
},
{
"name": "C++",
"bytes": "2265"
},
{
"name": "CSS",
"bytes": "148"
},
{
"name": "PowerShell",
"bytes": "180"
},
{
"name": "Smalltalk",
"bytes": "1996"
},
{
"name": "Visual Basic",
"bytes": "10236277"
}
],
"symlink_target": ""
}
|
A Circle is similar to a Polygon in that you can define custom colors, weights, and opacities for the edge of the
circle (the "stroke") and custom colors and opacities for the area within the enclosed region (the "fill"). Unlike a
Polygon, you do not define paths for a Circle. Instead, a circle has two additional properties which define its shape:
center of the circle, radius of the circle, in meters.
## Build your circle
### By configuration file
By default, the bundle doesn't need any configuration. Most of the service have a default configuration which allows
you to use the given objects like they are. The ``fungio_google_map.circle`` service is. The configuration describes
below is this default configuration.
```yaml
# app/config/config.yml
fungio_google_map:
circle:
# Your own circle class
class: "My\Fucking\Circle"
# Your own circle helper class
helper_class: "My\Fucking\CircleHelper"
# Prefix used for the generation of the circle javascript variable
prefix_javascript_variable: "circle_"
# Circle center
center:
latitude: 0
longitude: 0
no_wrap: true
# Circle radius
radius: 1
# Custom circle options
# By default, there is no options
options:
clickable: false
strokeWeight: 2
```
``` php
<?php
// Requests the fungio google map circle service
$circle = $this->get('fungio_google_map.circle');
```
### By coding
If you want to learn more, you can read
[this documentation](https://github.com/fungio/fungio-google-map/blob/master/doc/usage/overlays/circle.md).
|
{
"content_hash": "f1ddecf9e602a157def11aed43865027",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 118,
"avg_line_length": 30.796296296296298,
"alnum_prop": 0.6843054720384847,
"repo_name": "fungio/FungioGoogleMapBundle",
"id": "6b2528b2483ddf535a8f2e311c1ec4403fb0726f",
"size": "1673",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Resources/doc/usage/overlays/circle.md",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "590219"
},
{
"name": "Twig",
"bytes": "103"
}
],
"symlink_target": ""
}
|
/* eslint import/no-named-as-default: off */
import { init } from './snabbdom/snabbdom';
import attributesModule from './snabbdom/modules/attributes';
import propsModule from './snabbdom/modules/props';
import eventListenersModule from './eventslisteners';
const patch = init([
attributesModule,
propsModule,
eventListenersModule,
]);
export default patch;
|
{
"content_hash": "1a256448730718e8e0138a0e69bed2da",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 61,
"avg_line_length": 28.076923076923077,
"alnum_prop": 0.7589041095890411,
"repo_name": "AdrianV/Framework7",
"id": "740c3db7beff102e40477376d9bf5b27550dd527",
"size": "365",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "packages/core/modules/component/patch.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1016734"
},
{
"name": "HTML",
"bytes": "1022735"
},
{
"name": "JavaScript",
"bytes": "1334861"
},
{
"name": "PHP",
"bytes": "1056"
}
],
"symlink_target": ""
}
|
/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE122_Heap_Based_Buffer_Overflow__c_CWE805_wchar_t_loop_41.c
Label Definition File: CWE122_Heap_Based_Buffer_Overflow__c_CWE805.string.label.xml
Template File: sources-sink-41.tmpl.c
*/
/*
* @description
* CWE: 122 Heap Based Buffer Overflow
* BadSource: Allocate using malloc() and set data pointer to a small buffer
* GoodSource: Allocate using malloc() and set data pointer to a large buffer
* Sink: loop
* BadSink : Copy string to data using a loop
* Flow Variant: 41 Data flow: data passed as an argument from one function to another in the same source file
*
* */
#include "std_testcase.h"
#include <wchar.h>
#ifndef OMITBAD
void CWE122_Heap_Based_Buffer_Overflow__c_CWE805_wchar_t_loop_41_badSink(wchar_t * data)
{
{
size_t i;
wchar_t source[100];
wmemset(source, L'C', 100-1); /* fill with L'C's */
source[100-1] = L'\0'; /* null terminate */
/* POTENTIAL FLAW: Possible buffer overflow if source is larger than data */
for (i = 0; i < 100; i++)
{
data[i] = source[i];
}
data[100-1] = L'\0'; /* Ensure the destination buffer is null terminated */
printWLine(data);
free(data);
}
}
void CWE122_Heap_Based_Buffer_Overflow__c_CWE805_wchar_t_loop_41_bad()
{
wchar_t * data;
data = NULL;
/* FLAW: Allocate and point data to a small buffer that is smaller than the large buffer used in the sinks */
data = (wchar_t *)malloc(50*sizeof(wchar_t));
data[0] = L'\0'; /* null terminate */
CWE122_Heap_Based_Buffer_Overflow__c_CWE805_wchar_t_loop_41_badSink(data);
}
#endif /* OMITBAD */
#ifndef OMITGOOD
void CWE122_Heap_Based_Buffer_Overflow__c_CWE805_wchar_t_loop_41_goodG2BSink(wchar_t * data)
{
{
size_t i;
wchar_t source[100];
wmemset(source, L'C', 100-1); /* fill with L'C's */
source[100-1] = L'\0'; /* null terminate */
/* POTENTIAL FLAW: Possible buffer overflow if source is larger than data */
for (i = 0; i < 100; i++)
{
data[i] = source[i];
}
data[100-1] = L'\0'; /* Ensure the destination buffer is null terminated */
printWLine(data);
free(data);
}
}
/* goodG2B uses the GoodSource with the BadSink */
static void goodG2B()
{
wchar_t * data;
data = NULL;
/* FIX: Allocate and point data to a large buffer that is at least as large as the large buffer used in the sink */
data = (wchar_t *)malloc(100*sizeof(wchar_t));
data[0] = L'\0'; /* null terminate */
CWE122_Heap_Based_Buffer_Overflow__c_CWE805_wchar_t_loop_41_goodG2BSink(data);
}
void CWE122_Heap_Based_Buffer_Overflow__c_CWE805_wchar_t_loop_41_good()
{
goodG2B();
}
#endif /* OMITGOOD */
/* Below is the main(). It is only used when building this testcase on
* its own for testing or for building a binary to use in testing binary
* analysis tools. It is not used when compiling all the testcases as one
* application, which is how source code analysis tools are tested.
*/
#ifdef INCLUDEMAIN
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
CWE122_Heap_Based_Buffer_Overflow__c_CWE805_wchar_t_loop_41_good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
CWE122_Heap_Based_Buffer_Overflow__c_CWE805_wchar_t_loop_41_bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif
|
{
"content_hash": "6cedabae1484946f17fae56175395428",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 119,
"avg_line_length": 31.913793103448278,
"alnum_prop": 0.6207455429497569,
"repo_name": "maurer/tiamat",
"id": "0d462fdb162a4e6ed25298a6c6a3076e64583f1f",
"size": "3702",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "samples/Juliet/testcases/CWE122_Heap_Based_Buffer_Overflow/s08/CWE122_Heap_Based_Buffer_Overflow__c_CWE805_wchar_t_loop_41.c",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
ENV["RAILS_ENV"] = "test"
# instead of trying to control all logging, just delete the file on start
dummy_app_log_pathname = 'test/dummy/log/test.log'
if File.exist?(dummy_app_log_pathname)
puts "Deleting #{dummy_app_log_pathname}"
File.delete(dummy_app_log_pathname)
end
#$:.unshift File.dirname(__FILE__)
#require 'dummy/config/environment'
#require 'dummy/db/schema'
require 'rubygems'
require 'bundler/setup'
require 'combustion'
Combustion.path = 'test/dummy'
Combustion.initialize!(:all) do
config.active_support.test_order = :sorted
end
ActiveRecord::Base.class_eval do
include ActiveModel::ForbiddenAttributesProtection, CanCan::ModelAdditions
end
ActionController::Parameters.action_on_unpermitted_parameters = :raise
Irie.debug = false
require 'rails/test_help'
#$:.unshift File.expand_path('../support', __FILE__)
#Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f }
puts "Testing Rails v#{Rails.version}"
Rails.backtrace_cleaner.remove_silencers!
require 'database_cleaner'
DatabaseCleaner.strategy = :transaction
require 'irie'
#Irie.debug = true
#ActiveRecord::Base.logger = Logger.new(STDOUT)
#ActionController::Base.logger = Logger.new(STDOUT)
#ActionController::Base.logger.level = Logger::DEBUG
# important: we want to ensure that if there is any problem with one class load affecting another
# (e.g. with helper_method usage for url and path helpers) that we expose that by loading all
# controller bodies in the beginning via eager loading everything
Rails.application.eager_load!
# Debug routes in Appraisals, since can't just `rake routes`.
#all_routes = Rails.application.routes.routes
#require 'action_dispatch/routing/inspector'
#inspector = ActionDispatch::Routing::RoutesInspector.new(all_routes)
#puts inspector.format(ActionDispatch::Routing::ConsoleFormatter.new)
class SomeSubtypeOfStandardError < StandardError
end
def xtest(*args, &block); end
# based on http://stackoverflow.com/a/5492207/178651
class QueryCollector
cattr_accessor :list
self.list = []
def call(*args)
self.class.list << args
end
def self.reset
self.list = []
end
def self.all
self.list
end
end
ActiveSupport::Notifications.subscribe('sql.active_record', QueryCollector.new)
|
{
"content_hash": "df6cf823851455ec81174eff74b850bf",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 97,
"avg_line_length": 28.1625,
"alnum_prop": 0.7549933422103862,
"repo_name": "FineLinePrototyping/irie",
"id": "b5a6def45e1775530fd6440484e9bf2bbdb7682d",
"size": "2253",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_helper.rb",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "295"
},
{
"name": "Ruby",
"bytes": "88817"
}
],
"symlink_target": ""
}
|
import { namespace as ns } from '../namespace';
export const namespace = `${ns}_personal-notes`;
|
{
"content_hash": "76f96e6b0582bc14d576480c27c569eb",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 48,
"avg_line_length": 48.5,
"alnum_prop": 0.6907216494845361,
"repo_name": "ofzza/github-projects-extension",
"id": "a4024c0d7427a0d429a7fa23939737dcef2cfdc5",
"size": "339",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/content/github/issue-personal-notes/namespace.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14944"
},
{
"name": "JavaScript",
"bytes": "38646"
}
],
"symlink_target": ""
}
|
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.apimanagement.models;
import com.azure.resourcemanager.apimanagement.fluent.models.ApiManagementServiceGetSsoTokenResultInner;
/** An immutable client-side representation of ApiManagementServiceGetSsoTokenResult. */
public interface ApiManagementServiceGetSsoTokenResult {
/**
* Gets the redirectUri property: Redirect URL to the Publisher Portal containing the SSO token.
*
* @return the redirectUri value.
*/
String redirectUri();
/**
* Gets the inner com.azure.resourcemanager.apimanagement.fluent.models.ApiManagementServiceGetSsoTokenResultInner
* object.
*
* @return the inner object.
*/
ApiManagementServiceGetSsoTokenResultInner innerModel();
}
|
{
"content_hash": "fc6999d407c746be51b20d00edf4ff43",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 118,
"avg_line_length": 36.32,
"alnum_prop": 0.7599118942731278,
"repo_name": "Azure/azure-sdk-for-java",
"id": "6502d76f74bc9ebf59e66c109be73d1fdf5653bc",
"size": "908",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/apimanagement/azure-resourcemanager-apimanagement/src/main/java/com/azure/resourcemanager/apimanagement/models/ApiManagementServiceGetSsoTokenResult.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "8762"
},
{
"name": "Bicep",
"bytes": "15055"
},
{
"name": "CSS",
"bytes": "7676"
},
{
"name": "Dockerfile",
"bytes": "2028"
},
{
"name": "Groovy",
"bytes": "3237482"
},
{
"name": "HTML",
"bytes": "42090"
},
{
"name": "Java",
"bytes": "432409546"
},
{
"name": "JavaScript",
"bytes": "36557"
},
{
"name": "Jupyter Notebook",
"bytes": "95868"
},
{
"name": "PowerShell",
"bytes": "737517"
},
{
"name": "Python",
"bytes": "240542"
},
{
"name": "Scala",
"bytes": "1143898"
},
{
"name": "Shell",
"bytes": "18488"
},
{
"name": "XSLT",
"bytes": "755"
}
],
"symlink_target": ""
}
|
<?php
use Cake\Core\Configure;
$this->layout = 'dev_error';
$pluginPath = Configure::read('App.paths.plugins.0');
$this->assign('title', 'Missing Plugin');
$this->assign('templateName', 'missing_plugin.php');
$this->start('subheading');
?>
<strong>Error</strong>
The application is trying to load a file from the <em><?= h($plugin) ?></em> plugin.
<br>
<br>
Make sure your plugin <em><?= h($plugin) ?></em> is in the <?= h($pluginPath) ?> directory and was loaded.
<?php $this->end() ?>
<?php $this->start('file') ?>
<?php
$code = <<<PHP
<?php
// src/Application.php
public function bootstrap()
{
parent::bootstrap();
\$this->addPlugin('{$plugin}');
}
PHP;
?>
<div class="code-dump"><?php highlight_string($code) ?></div>
<?php $this->end() ?>
|
{
"content_hash": "4e68d10ba576f6d28ee0dac976237ee4",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 110,
"avg_line_length": 21.10810810810811,
"alnum_prop": 0.6120358514724712,
"repo_name": "dakota/cakephp",
"id": "1d6f3fdfa50b5e6d2da0354b356816dfe489a3e4",
"size": "1366",
"binary": false,
"copies": "2",
"ref": "refs/heads/4.x",
"path": "templates/Error/missing_plugin.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "15730"
},
{
"name": "HTML",
"bytes": "405"
},
{
"name": "Hack",
"bytes": "752"
},
{
"name": "JavaScript",
"bytes": "171"
},
{
"name": "Makefile",
"bytes": "6407"
},
{
"name": "PHP",
"bytes": "10180619"
},
{
"name": "Shell",
"bytes": "723"
}
],
"symlink_target": ""
}
|
package org.qi4j.entitystore.sql;
import org.apache.derby.iapi.services.io.FileUtil;
import org.junit.BeforeClass;
import org.qi4j.api.common.Visibility;
import org.qi4j.bootstrap.AssemblyException;
import org.qi4j.bootstrap.ModuleAssembly;
import org.qi4j.entitystore.sql.assembly.SQLiteEntityStoreAssembler;
import org.qi4j.library.sql.assembly.DataSourceAssembler;
import org.qi4j.library.sql.dbcp.DBCPDataSourceServiceAssembler;
import org.qi4j.test.EntityTestAssembler;
import org.qi4j.test.entity.AbstractEntityStoreTest;
import org.qi4j.valueserialization.orgjson.OrgJsonValueSerializationAssembler;
import static org.qi4j.test.util.Assume.assumeNoIbmJdk;
public class SQLiteEntityStoreTest
extends AbstractEntityStoreTest
{
@BeforeClass
public static void beforeClass_IBMJDK()
{
assumeNoIbmJdk();
}
@Override
// START SNIPPET: assembly
public void assemble( ModuleAssembly module )
throws AssemblyException
{
// END SNIPPET: assembly
super.assemble( module );
ModuleAssembly config = module.layer().module( "config" );
new EntityTestAssembler().assemble( config );
new OrgJsonValueSerializationAssembler().assemble( module );
// START SNIPPET: assembly
// DataSourceService
new DBCPDataSourceServiceAssembler().
identifiedBy( "sqlite-datasource-service" ).
visibleIn( Visibility.module ).
withConfig( config ).
withConfigVisibility( Visibility.layer ).
assemble( module );
// DataSource
new DataSourceAssembler().
withDataSourceServiceIdentity( "sqlite-datasource-service" ).
identifiedBy( "sqlite-datasource" ).
visibleIn( Visibility.module ).
withCircuitBreaker().
assemble( module );
// SQL EntityStore
new SQLiteEntityStoreAssembler().
visibleIn( Visibility.application ).
withConfig( config ).
withConfigVisibility( Visibility.layer ).
assemble( module );
}
// END SNIPPET: assembly
@Override
public void tearDown()
throws Exception
{
try
{
FileUtil.removeDirectory( "target/qi4j-data" );
}
finally
{
super.tearDown();
}
}
}
|
{
"content_hash": "aac34c0943d27c7d7f68a003a42448f5",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 78,
"avg_line_length": 30.358974358974358,
"alnum_prop": 0.6621621621621622,
"repo_name": "ramtej/Qi4j.Feature.Spatial",
"id": "3ce46a0b8958922e3b8b1c92997f947f325dec4c",
"size": "2958",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "extensions/entitystore-sql/src/test/java/org/qi4j/entitystore/sql/SQLiteEntityStoreTest.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
module CsrHelper
end
|
{
"content_hash": "5ba818e7bd9c8240c708f0fb100b28da",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 16,
"avg_line_length": 10.5,
"alnum_prop": 0.8571428571428571,
"repo_name": "dei79/sslfun",
"id": "d7611d10f730a03069ebdbcea28623b8cb7bdd74",
"size": "21",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/dummy/app/helpers/csr_helper.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "935"
},
{
"name": "Ruby",
"bytes": "28089"
}
],
"symlink_target": ""
}
|
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="ssLanguageName" comment="User interface language name in the UI language">Español</string>
<string name="ssApplicationTitle">Instalador de $APPNAME $VERSION</string>
<string name="ssTitle">Instalar $APPNAME $VERSION</string>
<string name="ssInstallSuccess">$APPNAME $VERSION se ha instalado correctamente.</string>
<string name="ssCancelQuery">¿Estás seguro de que deseas cancelar la instalación de $APPNAME?</string>
<string name="ssBootstrapExtractingBundle">Extrayendo paquete...</string>
<string name="ssBootstrapCheckingPackages">Comprobando paquetes...</string>
<string name="ssBootstrapCheckingForUpdates">Buscando actualizaciones en línea...</string>
<string name="ssBootstrapCheckingInstalledVersions">Comprobando versiones instaladas...</string>
<string name="ssBootstrapReady">Finalizando...</string>
<!-- Parameters: %0:s: version, %1:s: ssActionDownload -->
<string name="ssActionInstallKeyman">• $APPNAME %0:s %1:s</string>
<!-- Parameters: %0:s: package name %1:s: version %2:s: ssActionDownload -->
<string name="ssActionInstallPackage">• %0:s %1:s %2:s</string>
<!-- Parameters: %0:s: package name %1:s: version %2:s: language %3:s: ssActionDownload -->
<string name="ssActionInstallPackageLanguage">• %0:s %1:s para %2:s %3:s</string>
<string name="ssActionNothingToInstall">No hay nada para instalar.</string>
<!-- Parameters: %0:s download size -->
<string name="ssActionDownload">(Descarga de %0:s)</string>
<string name="ssActionInstall">Se instalará:</string>
<string name="ssFreeCaption">$APPNAME $VERSION es gratis y de código abierto</string>
<string name="ssLicenseLink">Lee&r la licencia</string>
<string name="ssInstallOptionsLink">&Opciones de instalación</string>
<string name="ssMessageBoxTitle">Instalador de $APPNAME</string>
<string name="ssOkButton">Aceptar</string>
<string name="ssInstallButton">&Instalar</string>
<string name="ssCancelButton">Cancelar</string>
<string name="ssExitButton">Sa&lir</string>
<string name="ssStatusInstalling">Instalando $APPNAME</string>
<string name="ssStatusRemovingOlderVersions">Eliminando versiones anteriores</string>
<string name="ssStatusComplete">Instalación completa</string>
<string name="ssQueryRestart">Debes reiniciar Windows antes de que se complete la instalación. Cuando reinicies Windows, la instalación continuará.
¿Reiniciar ahora?</string>
<string name="ssErrorUnableToAutomaticallyRestart">No se pudo reiniciar Windows automáticamente. Deberías reiniciar Windows antes de intentar iniciar $APPNAME.</string>
<string name="ssMustRestart">Debes reiniciar Windows para completar la instalación. Cuando reinicies Windows, la instalación finalizará.</string>
<string name="ssOldOsVersionInstallKeyboards">$APPNAME $VERSION requiere Windows 7 o posterior para instalar, pero se ha detectado Keyman Desktop 7, 8 o 9. ¿Deseas instalar los teclados incluidos en este instalador en la versión instalada de Keyman Desktop?</string>
<string name="ssOldOsVersionDownload">Esta versión de $APPNAME requiere Windows 7 o posterior para instalarse. ¿Deseas descargar Keyman Desktop 8?</string>
<string name="ssOptionsTitle">Opciones de instalación</string>
<string name="ssOptionsTitleInstallOptions">Opciones de instalación</string>
<string name="ssOptionsTitleDefaultKeymanSettings">Configuración predeterminada de $APPNAME</string>
<string name="ssOptionsTitleSelectModulesToInstall">Módulos a instalar o actualizar</string>
<string name="ssOptionsTitleAssociatedKeyboardLanguage">Idioma asociado al teclado</string>
<string name="ssOptionsTitleLocation">Versión a instalar</string>
<string name="ssOptionsStartWithWindows">Iniciar $APPNAME cuando inicie Windows</string>
<string name="ssOptionsStartAfterInstall">Iniciar $APPNAME cuando finalice la instalación</string>
<string name="ssOptionsCheckForUpdates">Buscar actualizaciones en línea periódicamente</string>
<string name="ssOptionsUpgradeKeyboards">Actualizar teclados instalados con versiones anteriores a la versión $VERSION</string>
<string name="ssOptionsAutomaticallyReportUsage">Compartir estadísticas de uso anónimas con keyman.com</string>
<!-- Parameters: %0:s: version of installer (may differ from Keyman version) -->
<string name="ssInstallerVersion">Versión del instalador: %0:s</string>
<string name="ssOptionsInstallKeyman">Instalar $APPNAME</string>
<string name="ssOptionsUpgradeKeyman">Actualizar $APPNAME</string>
<!-- Parameters: %0:s: installed version -->
<string name="ssOptionsKeymanAlreadyInstalled">$APPNAME %0:s ya está instalado.</string>
<!-- Parameters: %0:s: version, %1:s: size -->
<string name="ssOptionsDownloadKeymanVersion">Descargar versión %0:s (%1:s)</string>
<!-- Parameters: %0:s: version -->
<string name="ssOptionsInstallKeymanVersion">Versión %0:s</string>
<!-- Parameters: %0:s: package name %1:s -->
<string name="ssOptionsInstallPackage">Instalar %0:s</string>
<!-- Parameters: %0:s: package version %1:s package size -->
<string name="ssOptionsDownloadPackageVersion">Descargar versión %0:s (%1:s)</string>
<!-- Parameters: %0:s: package version -->
<string name="ssOptionsInstallPackageVersion">Versión %0:s</string>
<!-- Parameters: %0:s package name -->
<string name="ssOptionsPackageLanguageAssociation">Selecciona el idioma que deseas asociar con el teclado %0:s</string>
<string name="ssOptionsDefaultLanguage">Idioma predeterminado</string>
<!-- Parameters: %0:s: filename -->
<string name="ssDownloadingTitle">Descargando %0:s</string>
<!-- Parameters: %0:s: filename -->
<string name="ssDownloadingText">Descargando %0:s</string>
<string name="ssOffline">El instalador de $APPNAME no pudo conectarse a keyman.com para descargar los recursos adicionales.
Comprueba que estás en línea y da permiso al instalador de $APPNAME para acceder a Internet en la configuración de tu cortafuegos.
Haz clic en Abortar para salir del instalador, en Reintentar para descargar los recursos de nuevo o en Ignorar para continuar sin conexión.</string>
</resources>
|
{
"content_hash": "9262deecb084e2d283aa8c8d90cde630",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 268,
"avg_line_length": 77.0875,
"alnum_prop": 0.7653640343765202,
"repo_name": "tavultesoft/keymanweb",
"id": "b6a634d654c53a205e07cb506bb7cd18be039c55",
"size": "6216",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "windows/src/desktop/setup/locale/es-419/strings.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "3974"
},
{
"name": "CSS",
"bytes": "29800"
},
{
"name": "HTML",
"bytes": "1145"
},
{
"name": "JavaScript",
"bytes": "556520"
}
],
"symlink_target": ""
}
|
package com.techhounds.robot.commands.driving;
import com.techhounds.robot.commands.CommandBase;
import com.techhounds.robot.subsystems.DriveSubsystem;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
/**
*
* @author Tiger Huang
*/
public class HomeModules extends CommandBase {
private DriveSubsystem drive;
public HomeModules() {
super("HomeModules");
drive = DriveSubsystem.getInstance();
requires(drive);
setInterruptible(true);
}
// Called just before this Command runs the first time
protected void initialize() {
SmartDashboard.putBoolean("Homing", true);
drive.homeModulesInit();
}
// Called repeatedly when this Command is scheduled to run
protected void execute() {
drive.homeModules();
}
// Make this return true when this Command no longer needs to run execute()
protected boolean isFinished() {
return drive.doneHomeModules();
}
// Called once after isFinished returns true
protected void end() {
SmartDashboard.putBoolean("Homing", false);
}
// Called when another command which requires one or more of the same
// subsystems is scheduled to run
protected void interrupted() {
end();
}
}
|
{
"content_hash": "8f48718c9536d0fd109e4219098b721b",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 79,
"avg_line_length": 26.854166666666668,
"alnum_prop": 0.674166020170675,
"repo_name": "frc868/swerve-2014",
"id": "2be30474661d9c5deb543f5597de2f25e21b55b3",
"size": "1289",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/com/techhounds/robot/commands/driving/HomeModules.java",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "50313"
}
],
"symlink_target": ""
}
|
<?xml version="1.0" encoding="utf-8"?>
<!--
~ Copyright (C) 2015 Simon Vig Therkildsen
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<net.simonvt.cathode.common.widget.AppBarScrollParent xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:layout_width="match_parent"
android:layout_height="match_parent">
<include layout="@layout/appbar_backdrop"/>
<androidx.swiperefreshlayout.widget.SwipeRefreshLayout
android:id="@+id/swipeRefresh"
android:layout_width="match_parent"
android:layout_height="match_parent">
<net.simonvt.cathode.common.widget.ObservableScrollView
android:id="@+id/scrollView"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:fillViewport="true">
<net.simonvt.cathode.common.widget.AppBarRelativeLayout
android:id="@+id/appBarLayout"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:fitsSystemWindows="@bool/drawsBehindStatusBar"
android:orientation="vertical"
app:expandedTitleMarginEnd="@dimen/appBarTitleMarginEnd"
app:expandedTitleMarginStart="@dimen/appBarTitleMarginStart">
<FrameLayout
android:id="@id/appBarContent"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_alignParentTop="true"
android:layout_marginEnd="@dimen/appBarContentMargin"
android:layout_marginStart="@dimen/appBarContentMargin"
android:layout_marginTop="@dimen/appBarContentOffset"
android:background="@color/contentBackground"
android:orientation="vertical"
android:paddingBottom="16dp"/>
<include layout="@layout/appbar_toolbar"/>
</net.simonvt.cathode.common.widget.AppBarRelativeLayout>
</net.simonvt.cathode.common.widget.ObservableScrollView>
</androidx.swiperefreshlayout.widget.SwipeRefreshLayout>
</net.simonvt.cathode.common.widget.AppBarScrollParent>
|
{
"content_hash": "7b92da867020bb1fe0e73fca15db599a",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 112,
"avg_line_length": 44.40983606557377,
"alnum_prop": 0.7032115171650055,
"repo_name": "SimonVT/cathode",
"id": "4dbefd2bb0765946b3b4283521438245747836d9",
"size": "2709",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cathode-common/src/main/res/layout-land/fragment_appbar_refreshable.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1411948"
},
{
"name": "Kotlin",
"bytes": "1180414"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from .models import AboutMe, ContactType, EmailContact, IMContact, PhoneContact, Resume, WebContact
class TypeAdmin(admin.ModelAdmin):
fields = ["display_order", "type"]
admin.site.register(EmailContact)
admin.site.register(PhoneContact)
admin.site.register(WebContact)
admin.site.register(IMContact)
admin.site.register(Resume)
admin.site.register(ContactType, TypeAdmin)
admin.site.register(AboutMe)
|
{
"content_hash": "292f8881c535f9510f69b09ff007681c",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 99,
"avg_line_length": 27.75,
"alnum_prop": 0.8040540540540541,
"repo_name": "lopopolo/hyperbola",
"id": "e7a1c8abb095f38673224b239ba5d08976e45edc",
"size": "444",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hyperbola/contact/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6635"
},
{
"name": "HCL",
"bytes": "46861"
},
{
"name": "HTML",
"bytes": "11186"
},
{
"name": "JavaScript",
"bytes": "1755"
},
{
"name": "Python",
"bytes": "56014"
},
{
"name": "Shell",
"bytes": "1297"
}
],
"symlink_target": ""
}
|
import itertools
import os
import unittest
import warnings
import pronto
from pronto.relationship import Relationship, RelationshipData
from .utils import DATADIR
from .test_entity import _TestEntitySet
class TestRelationship(unittest.TestCase):
@classmethod
def setUpClass(cls):
warnings.simplefilter('error')
@classmethod
def tearDownClass(cls):
warnings.simplefilter(warnings.defaultaction)
def test_properties(self):
"""Assert the data stored in data layer can be accessed in the view.
"""
for r in RelationshipData.__slots__:
self.assertTrue(hasattr(Relationship, r), f"no property for {r}")
def test_superproperties(self):
ont = pronto.Ontology()
friend_of = ont.create_relationship("friend_of")
best_friend_of = ont.create_relationship("best_friend_of")
best_friend_of.superproperties().add(friend_of)
self.assertIn(friend_of, sorted(best_friend_of.superproperties()))
def test_subproperties(self):
ont = pronto.Ontology()
best_friend_of = ont.create_relationship("best_friend_of")
friend_of = ont.create_relationship("friend_of")
friend_of.subproperties().add(best_friend_of)
self.assertIn(best_friend_of, sorted(friend_of.subproperties()))
class TestRelationshipSet(_TestEntitySet, unittest.TestCase):
def create_entity(self, ont, id):
return ont.create_relationship(id)
|
{
"content_hash": "39753382795e70bb71840a3b296975c2",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 77,
"avg_line_length": 31.804347826086957,
"alnum_prop": 0.69377990430622,
"repo_name": "althonos/pronto",
"id": "5c91104b502377e3c88befa4e355d1cd442d55ca",
"size": "1463",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_relationship.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "291657"
}
],
"symlink_target": ""
}
|
#ifndef OSMIUM_OSM_CHANGESET_HPP
#define OSMIUM_OSM_CHANGESET_HPP
#include <cstring>
#include <osmium/memory/collection.hpp>
#include <osmium/memory/item.hpp>
#include <osmium/osm/box.hpp>
#include <osmium/osm/entity.hpp>
#include <osmium/osm/item_type.hpp>
#include <osmium/osm/tag.hpp>
#include <osmium/osm/timestamp.hpp>
#include <osmium/osm/types.hpp>
#include <osmium/osm/types_from_string.hpp>
namespace osmium {
namespace builder {
class ChangesetDiscussionBuilder;
template <typename T> class ObjectBuilder;
} // namespace builder
class Changeset;
class ChangesetComment : public osmium::memory::detail::ItemHelper {
friend class osmium::builder::ChangesetDiscussionBuilder;
osmium::Timestamp m_date;
osmium::user_id_type m_uid {0};
string_size_type m_user_size;
string_size_type m_text_size;
ChangesetComment(const ChangesetComment&) = delete;
ChangesetComment(ChangesetComment&&) = delete;
ChangesetComment& operator=(const ChangesetComment&) = delete;
ChangesetComment& operator=(ChangesetComment&&) = delete;
unsigned char* endpos() {
return data() + osmium::memory::padded_length(sizeof(ChangesetComment) + m_user_size + m_text_size);
}
const unsigned char* endpos() const {
return data() + osmium::memory::padded_length(sizeof(ChangesetComment) + m_user_size + m_text_size);
}
template <typename TMember>
friend class osmium::memory::CollectionIterator;
unsigned char* next() {
return endpos();
}
unsigned const char* next() const {
return endpos();
}
void set_user_size(string_size_type size) noexcept {
m_user_size = size;
}
void set_text_size(string_size_type size) noexcept {
m_text_size = size;
}
public:
static constexpr item_type collection_type = item_type::changeset_discussion;
ChangesetComment(osmium::Timestamp date, osmium::user_id_type uid) noexcept :
m_date(date),
m_uid(uid),
m_user_size(0),
m_text_size(0) {
}
osmium::Timestamp date() const noexcept {
return m_date;
}
osmium::user_id_type uid() const noexcept {
return m_uid;
}
const char* user() const noexcept {
return reinterpret_cast<const char*>(data() + sizeof(ChangesetComment));
}
const char* text() const noexcept {
return reinterpret_cast<const char*>(data() + sizeof(ChangesetComment) + m_user_size);
}
}; // class ChangesetComment
class ChangesetDiscussion : public osmium::memory::Collection<ChangesetComment, osmium::item_type::changeset_discussion> {
friend class osmium::builder::ObjectBuilder<osmium::Changeset>;
public:
typedef size_t size_type;
ChangesetDiscussion() :
osmium::memory::Collection<ChangesetComment, osmium::item_type::changeset_discussion>() {
}
size_type size() const noexcept {
return static_cast<size_type>(std::distance(begin(), end()));
}
}; // class ChangesetDiscussion
static_assert(sizeof(ChangesetDiscussion) % osmium::memory::align_bytes == 0, "Class osmium::ChangesetDiscussion has wrong size to be aligned properly!");
/**
* \brief An OSM Changeset, a group of changes made by a single user over
* a short period of time.
*
* You can not create Changeset objects directly. Use the ChangesetBuilder
* class to create Changesets in a Buffer.
*/
class Changeset : public osmium::OSMEntity {
friend class osmium::builder::ObjectBuilder<osmium::Changeset>;
osmium::Box m_bounds;
osmium::Timestamp m_created_at;
osmium::Timestamp m_closed_at;
changeset_id_type m_id {0};
num_changes_type m_num_changes {0};
num_comments_type m_num_comments {0};
user_id_type m_uid {0};
string_size_type m_user_size;
int16_t m_padding1 {0};
int32_t m_padding2 {0};
Changeset() :
OSMEntity(sizeof(Changeset), osmium::item_type::changeset) {
}
void set_user_size(string_size_type size) {
m_user_size = size;
}
unsigned char* subitems_position() {
return data() + osmium::memory::padded_length(sizeof(Changeset) + m_user_size);
}
const unsigned char* subitems_position() const {
return data() + osmium::memory::padded_length(sizeof(Changeset) + m_user_size);
}
public:
/// Get ID of this changeset
changeset_id_type id() const noexcept {
return m_id;
}
/**
* Set ID of this changeset
*
* @param id The id.
* @returns Reference to changeset to make calls chainable.
*/
Changeset& set_id(changeset_id_type id) noexcept {
m_id = id;
return *this;
}
/**
* Set ID of this changeset.
*
* @param id The id.
* @returns Reference to object to make calls chainable.
*/
Changeset& set_id(const char* id) {
return set_id(osmium::string_to_changeset_id(id));
}
/// Get user id.
user_id_type uid() const noexcept {
return m_uid;
}
/**
* Set user id.
*
* @param uid The user id.
* @returns Reference to changeset to make calls chainable.
*/
Changeset& set_uid(user_id_type uid) noexcept {
m_uid = uid;
return *this;
}
/**
* Set user id to given uid or to 0 (anonymous user) if the given
* uid is smaller than 0.
*
* @param uid The user id.
* @returns Reference to changeset to make calls chainable.
*/
Changeset& set_uid_from_signed(signed_user_id_type uid) noexcept {
m_uid = uid < 0 ? 0 : static_cast<user_id_type>(uid);
return *this;
}
/**
* Set user id to given uid or to 0 (anonymous user) if the given
* uid is smaller than 0.
*
* @returns Reference to changeset to make calls chainable.
*/
Changeset& set_uid(const char* uid) {
return set_uid_from_signed(string_to_user_id(uid));
}
/// Is this user anonymous?
bool user_is_anonymous() const noexcept {
return m_uid == 0;
}
/// Get timestamp when this changeset was created.
osmium::Timestamp created_at() const noexcept {
return m_created_at;
}
/**
* Get timestamp when this changeset was closed.
*
* @returns Timestamp. Will return the empty Timestamp when the
* changeset is not yet closed.
*/
osmium::Timestamp closed_at() const noexcept {
return m_closed_at;
}
/// Is this changeset open?
bool open() const noexcept {
return m_closed_at == osmium::Timestamp();
}
/// Is this changeset closed?
bool closed() const noexcept {
return !open();
}
/**
* Set the timestamp when this changeset was created.
*
* @param timestamp Timestamp
* @returns Reference to changeset to make calls chainable.
*/
Changeset& set_created_at(const osmium::Timestamp& timestamp) {
m_created_at = timestamp;
return *this;
}
/**
* Set the timestamp when this changeset was closed.
*
* @param timestamp Timestamp
* @returns Reference to changeset to make calls chainable.
*/
Changeset& set_closed_at(const osmium::Timestamp& timestamp) {
m_closed_at = timestamp;
return *this;
}
/// Get the number of changes in this changeset
num_changes_type num_changes() const noexcept {
return m_num_changes;
}
/// Set the number of changes in this changeset
Changeset& set_num_changes(num_changes_type num_changes) noexcept {
m_num_changes = num_changes;
return *this;
}
/// Set the number of changes in this changeset
Changeset& set_num_changes(const char* num_changes) {
return set_num_changes(osmium::string_to_num_changes(num_changes));
}
/// Get the number of comments in this changeset
num_comments_type num_comments() const noexcept {
return m_num_comments;
}
/// Set the number of comments in this changeset
Changeset& set_num_comments(num_comments_type num_comments) noexcept {
m_num_comments = num_comments;
return *this;
}
/// Set the number of comments in this changeset
Changeset& set_num_comments(const char* num_comments) {
return set_num_comments(osmium::string_to_num_comments(num_comments));
}
/**
* Get the bounding box of this changeset.
*
* @returns Bounding box. Can be empty.
*/
osmium::Box& bounds() noexcept {
return m_bounds;
}
/**
* Get the bounding box of this changeset.
*
* @returns Bounding box. Can be empty.
*/
const osmium::Box& bounds() const noexcept {
return m_bounds;
}
/// Get user name.
const char* user() const {
return reinterpret_cast<const char*>(data() + sizeof(Changeset));
}
/// Get the list of tags.
const TagList& tags() const {
return osmium::detail::subitem_of_type<const TagList>(cbegin(), cend());
}
/**
* Set named attribute.
*
* @param attr Name of the attribute (must be one of "id", "version",
* "changeset", "timestamp", "uid", "visible")
* @param value Value of the attribute
*/
void set_attribute(const char* attr, const char* value) {
if (!strcmp(attr, "id")) {
set_id(value);
} else if (!strcmp(attr, "num_changes")) {
set_num_changes(value);
} else if (!strcmp(attr, "comments_count")) {
set_num_comments(value);
} else if (!strcmp(attr, "created_at")) {
set_created_at(osmium::Timestamp(value));
} else if (!strcmp(attr, "closed_at")) {
set_closed_at(osmium::Timestamp(value));
} else if (!strcmp(attr, "uid")) {
set_uid(value);
}
}
typedef osmium::memory::CollectionIterator<Item> iterator;
typedef osmium::memory::CollectionIterator<const Item> const_iterator;
iterator begin() {
return iterator(subitems_position());
}
iterator end() {
return iterator(data() + padded_size());
}
const_iterator cbegin() const {
return const_iterator(subitems_position());
}
const_iterator cend() const {
return const_iterator(data() + padded_size());
}
const_iterator begin() const {
return cbegin();
}
const_iterator end() const {
return cend();
}
ChangesetDiscussion& discussion() {
return osmium::detail::subitem_of_type<ChangesetDiscussion>(begin(), end());
}
const ChangesetDiscussion& discussion() const {
return osmium::detail::subitem_of_type<const ChangesetDiscussion>(cbegin(), cend());
}
}; // class Changeset
static_assert(sizeof(Changeset) % osmium::memory::align_bytes == 0, "Class osmium::Changeset has wrong size to be aligned properly!");
/**
* Changesets are equal if their IDs are equal.
*/
inline bool operator==(const Changeset& lhs, const Changeset& rhs) {
return lhs.id() == rhs.id();
}
inline bool operator!=(const Changeset& lhs, const Changeset& rhs) {
return ! (lhs == rhs);
}
/**
* Changesets can be ordered by id.
*/
inline bool operator<(const Changeset& lhs, const Changeset& rhs) {
return lhs.id() < rhs.id();
}
inline bool operator>(const Changeset& lhs, const Changeset& rhs) {
return rhs < lhs;
}
inline bool operator<=(const Changeset& lhs, const Changeset& rhs) {
return ! (rhs < lhs);
}
inline bool operator>=(const Changeset& lhs, const Changeset& rhs) {
return ! (lhs < rhs);
}
} // namespace osmium
#endif // OSMIUM_OSM_CHANGESET_HPP
|
{
"content_hash": "4f2e4c15c5a4299dc4e80aef768af114",
"timestamp": "",
"source": "github",
"line_count": 428,
"max_line_length": 158,
"avg_line_length": 30.397196261682243,
"alnum_prop": 0.5609531129900077,
"repo_name": "arnekaiser/osrm-backend",
"id": "f59db48084572b86160190236fbd8d0bc6551edf",
"size": "14492",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "third_party/libosmium/include/osmium/osm/changeset.hpp",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "5650"
},
{
"name": "C++",
"bytes": "1506228"
},
{
"name": "CMake",
"bytes": "66332"
},
{
"name": "Gherkin",
"bytes": "475337"
},
{
"name": "JavaScript",
"bytes": "103843"
},
{
"name": "Lua",
"bytes": "47104"
},
{
"name": "Makefile",
"bytes": "1700"
},
{
"name": "Python",
"bytes": "2886"
},
{
"name": "Shell",
"bytes": "5183"
}
],
"symlink_target": ""
}
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/metadata_service.proto
package com.google.cloud.aiplatform.v1;
public interface QueryExecutionInputsAndOutputsRequestOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.cloud.aiplatform.v1.QueryExecutionInputsAndOutputsRequest)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* Required. The resource name of the Execution whose input and output Artifacts should
* be retrieved as a LineageSubgraph.
* Format:
* `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
* </pre>
*
* <code>
* string execution = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The execution.
*/
java.lang.String getExecution();
/**
*
*
* <pre>
* Required. The resource name of the Execution whose input and output Artifacts should
* be retrieved as a LineageSubgraph.
* Format:
* `projects/{project}/locations/{location}/metadataStores/{metadatastore}/executions/{execution}`
* </pre>
*
* <code>
* string execution = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for execution.
*/
com.google.protobuf.ByteString getExecutionBytes();
}
|
{
"content_hash": "b67c5adbd099063e78be3a597d409f24",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 115,
"avg_line_length": 31.02173913043478,
"alnum_prop": 0.6853538892782061,
"repo_name": "googleapis/google-cloud-java",
"id": "f9c4a77559cc528f7469558cd632bd2d6feaab61",
"size": "2021",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/QueryExecutionInputsAndOutputsRequestOrBuilder.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2614"
},
{
"name": "HCL",
"bytes": "28592"
},
{
"name": "Java",
"bytes": "826434232"
},
{
"name": "Jinja",
"bytes": "2292"
},
{
"name": "Python",
"bytes": "200408"
},
{
"name": "Shell",
"bytes": "97954"
}
],
"symlink_target": ""
}
|
var turf = require('@turf/turf');
var COMBuilding = function () {
// this.name = name;
const gridsize = 0.03;
const elevationoffset = 1;
const footprintsize = 0.015;
const comHeights = [14, 25, 30, 22, 28];
const units = {
units: 'kilometers'
};
const bufferWidth = gridsize - 0.01; //30 meter buffer
const nearestSearch = [0, 1, 2];
const floorHeight = 5;
const avgUnitsize = 50;
var featProps;
var featExtent;
this.genGrid = function (curFeat) {
featProps = curFeat.properties;
featExtent = turf.bbox(curFeat);
var diagJSON = {
"type": "FeatureCollection",
"features": [curFeat]
};
var grid = turf.pointGrid(featExtent, gridsize, units);
var ptsWithin = turf.within(grid, diagJSON);
return [ptsWithin, featExtent];
};
this.generateBuildingFootprints = function (ptsWithin) {
var allGeneratedFeats = [];
var color = featProps.color;
var systag = featProps.systag;
var sysname = featProps.sysname;
var ptslen = ptsWithin.features.length;
var alreadyAdded = {
"type": "FeatureCollection",
"features": []
};
// create a unique ID for each feature.
var availablePts = {};
var ptslen = ptsWithin.features.length;
for (var k = 0; k < ptslen; k++) {
var id = makeid();
ptsWithin.features[k].properties.id = id;
availablePts[id] = ptsWithin.features[k];
}
ptslen = (ptslen > 7500) ? 7500 : ptslen;
// console.log(ptslen);
// every point is avaiable
for (var k1 = 0; k1 < ptslen; k1++) {
// console.log(k1);
var ifeat;
var curalreadyadded;
var alreadyaddedlen;
// how many nearest to find?
var nearest = nearestSearch[Math.floor(Math.random() * nearestSearch.length)];
// initialize all poitns
var allPts = [];
// get current POint.
var curPt = ptsWithin.features[k1];
delete availablePts[curPt.properties.id];
allPts.push(curPt.geometry.coordinates);
if (nearest) {
for (var k6 = 0; k6 < nearest; k6++) {
// already added
var availPts = {
"type": "FeatureCollection",
"features": []
};
for (key in availablePts) {
var cpt = availablePts[key];
availPts.features.push(cpt);
}
var nearestpt = false;
if (availPts.features.length > 0) {
nearestpt = turf.nearestPoint(curPt, availPts);
}
if (nearestpt) {
delete availablePts[nearestpt.properties.id];
allPts.push(nearestpt.geometry.coordinates);
}
}
if (allPts.length > 1) {
var ls = turf.lineString(allPts);
var buf = turf.buffer(ls, 0.0075, {
units: 'kilometers'
});
// console.log(JSON.stringify(bldg));
var bb = turf.bbox(buf);
var bldg = turf.bboxPolygon(bb);
var area = turf.area(bldg);
var hasIntersect = false;
var alreadyaddedlen = alreadyAdded.features.length;
for (var x1 = 0; x1 < alreadyaddedlen; x1++) {
curalreadyadded = alreadyAdded.features[x1];
try {
ifeat = turf.intersect(curalreadyadded, bldg);
} catch (err) {
// console.log(JSON.stringify(err));
}
if (ifeat) {
hasIntersect = true;
break;
}
}
if (hasIntersect === false) {
var height = elevationoffset + comHeights[Math.floor(Math.random() * comHeights.length)];
var numFloors = Math.round(height / floorHeight); // 5 meter per floor
var numUnitsperFloor = Math.round(area / avgUnitsize);
var totalUnits = numUnitsperFloor * numFloors;
var p = {
'totalunits': totalUnits,
'height': height,
'color': "#d0d0d0",
'roofColor': color,
'isStreet': 0,
'isBuilding': 1,
'sysname': sysname,
};
bldg.properties = p;
alreadyAdded.features.push(bldg);
allGeneratedFeats.push(bldg);
}
}
// put the list in the seen one
// build a bbounds polygon
} else {
var buffered = turf.buffer(curPt, bufferWidth, units); // buffer 48 meters
var bds = turf.bbox(buffered); // get the extent of the buffered features
var bfrdextPlgn = turf.bboxPolygon(bds);
var bldgfootprint = 0.015;
var centrepoint = turf.centroid(bfrdextPlgn);
var bldg = turf.buffer(centrepoint, bldgfootprint, units);
var bdgply = turf.bbox(bldg); // get the extent of the buffered features
var bpoly = turf.bboxPolygon(bdgply);
var area = turf.area(bpoly);
alreadyaddedlen = alreadyAdded.features.length;
var hasIntersect = false;
for (var x2 = 0; x2 < alreadyaddedlen; x2++) {
curalreadyadded = alreadyAdded.features[x2];
try {
ifeat = turf.intersect(curalreadyadded, bldg);
} catch (err) {
// console.log(JSON.stringify(err));
}
if (ifeat) {
hasIntersect = true;
break;
}
}
if (hasIntersect === false) {
var height = elevationoffset + comHeights[Math.floor(Math.random() * comHeights.length)];
var numFloors = Math.round(height / floorHeight); // 5 meter per floor
var numUnitsperFloor = Math.round(area / avgUnitsize);
var totalUnits = numUnitsperFloor * numFloors;
var chosenValue = Math.random() < 0.5 ? true : false;
var chosenValue = true;
if (chosenValue) {
var p = {
'totalunits': totalUnits,
'height': height,
'color': "#d0d0d0",
'roofColor': color,
'isStreet': 0,
'isBuilding': 1,
'sysname': sysname
};
bpoly.properties = p;
alreadyAdded.features.push(bpoly);
allGeneratedFeats.push(bpoly);
}
}
}
}
return allGeneratedFeats;
}
};
var LDHousing = function () {
// this.name = name;
const density = 30; // dwellings / hectare
const buildingsperhectare = 20;
const gridsize = 0.04;
const footprintsize = 0.012;
const ldhheights = [1, 2, 3]; // in meters
const units = {
units: 'kilometers'
};
const elevationoffset = 1;
const floorHeight = 5;
const avgUnitsize = 100;
var featProps;
var featExtent;
this.genGrid = function (curFeat) {
featProps = curFeat.properties;
featExtent = turf.bbox(curFeat);
var diagJSON = {
"type": "FeatureCollection",
"features": [curFeat]
};
var grid = turf.pointGrid(featExtent, gridsize, units);
var ptsWithin = turf.within(grid, diagJSON);
return [ptsWithin, featExtent];
};
this.generateBuildingFootprints = function (ptsWithin) {
var allGeneratedFeats = [];
var color = featProps.color;
var systag = featProps.systag;
var sysname = featProps.sysname;
var ptslen = ptsWithin.features.length;
var bufferWidth = gridsize - 0.01; //30 meter buffer
ptslen = (ptslen > 7500) ? 7500 : ptslen;
// console.log(ptslen);
// if it is HDH type feature
for (var k = 0; k < ptslen; k++) {
// console.log(k)
var curPt = ptsWithin.features[k];
var buffered = turf.buffer(curPt, bufferWidth, units); // buffer 48 meters
var bds = turf.bbox(buffered); // get the extent of the buffered features
var bfrdextPlgn = turf.bboxPolygon(bds);
var bldgfootprint = 0.015;
var centrepoint = turf.centroid(bfrdextPlgn);
var bldg = turf.buffer(centrepoint, bldgfootprint, units);
var bdgply = turf.bbox(bldg); // get the extent of the buffered features
var bpoly = turf.bboxPolygon(bdgply);
var area = turf.area(bpoly);
var height = elevationoffset + ldhheights[Math.floor(Math.random() * ldhheights.length)];
var numFloors = Math.round(height / floorHeight); // 5 meter per floor
var numUnitsperFloor = Math.round(area / avgUnitsize);
var totalUnits = numUnitsperFloor * numFloors;
var p = {
'totalunits': totalUnits,
'height': height,
'color': "#d0d0d0",
'roofColor': color,
'isStreet': 0,
'isBuilding': 1,
'sysname': sysname
};
bpoly.properties = p;
allGeneratedFeats.push(bpoly);
}
return allGeneratedFeats;
}
};
var HDHousing = function () {
// this.name = name;
const density = 80; // dwellings / hectare
const buildingsperhectare = 2;
const gridsize = 0.05; // changes the maximum area
const footprintsize = 0.015;
const heights = [36, 60, 90]; // in meters
const units = {
units: 'kilometers'
};
const elevationoffset = 1;
var featProps;
const floorHeight = 5;
const avgUnitsize = 50;
this.generateSquareGridandConstrain = function (featureGeometry) {
var featarea = turf.area(featureGeometry);
var numberofextrusions = Math.round((featarea * 0.0001) * buildingsperhectare);
featProps = featureGeometry.properties;
var featExtent = turf.bbox(featureGeometry);
var sqgrid = turf.squareGrid(featExtent, gridsize, units);
// constrain grid.
var constrainedgrid = {
"type": "FeatureCollection",
"features": []
};
var sqfeatslen = sqgrid.features.length;
// number of extrusions is counted.
// console.log(numberofextrusions, sqfeatslen);
var ratio = (numberofextrusions / sqfeatslen);
var extrudedfeaturescount = 0;
if (ratio < 0.20 || numberofextrusions < 15) {
for (var x = 0; x < sqfeatslen; x++) {
if (extrudedfeaturescount < numberofextrusions) {
var cursqfeat = sqgrid.features[x];
try {
var ifeat = turf.intersect(cursqfeat, featureGeometry);
} catch (err) {
// console.log(JSON.stringify(err));
}
if (ifeat) {
constrainedgrid.features.push(ifeat);
} else {
constrainedgrid.features.push(cursqfeat);
}
extrudedfeaturescount += 1;
}
}
} else {
var gridStore = {};
var gridid = 0;
for (var x1 = 0; x1 < sqfeatslen; x1++) {
var cursqgrid = sqgrid.features[x1];
gridStore[gridid] = cursqgrid;
gridid += 1;
}
while (extrudedfeaturescount < numberofextrusions + 1) {
var randomgridid = Math.floor(Math.random() * (sqfeatslen - 0 + 1)) + 0;
// get the id from gridStore
var cursqfeat = gridStore[randomgridid];
// have the feature
try {
var ifeat = turf.intersect(cursqfeat, featureGeometry);
} catch (err) {
// console.log(JSON.stringify(err));
}
if (ifeat) {
constrainedgrid.features.push(ifeat);
} else {
constrainedgrid.features.push(cursqfeat);
}
extrudedfeaturescount += 1;
}
}
return constrainedgrid;
};
this.generateBuildings = function (constrainedgrid) {
var consgridlen = constrainedgrid.features.length;
var generatedGeoJSON = {
"type": "FeatureCollection",
"features": []
};
// find centroid
consgridlen = (consgridlen > 7500) ? 7500 : consgridlen;
// console.log(consgridlen);
var extrusionconter = 0;
for (var k1 = 0; k1 < consgridlen; k1++) {
// console.log(k1)
var curconsfeat = constrainedgrid.features[k1];
var curarea;
try {
curarea = turf.area(curconsfeat);
} catch (err) {
curarea = 0;
}
if (curarea > 2000) { //max area is 2500 gridsize squared
var chosenValue = Math.random() > 0.6 ? true : false;
if (chosenValue) {
var centroid = turf.centroid(curconsfeat);
var bufferedCentroid = turf.buffer(centroid, footprintsize, {
units: 'kilometers'
});
var bbox = turf.bbox(bufferedCentroid);
var bboxpoly = turf.bboxPolygon(bbox);
var height = elevationoffset + heights[Math.floor(Math.random() * heights.length)];
var area = turf.area(bboxpoly);
var numFloors = Math.round(height / floorHeight); // 5 meter per floor
var numUnitsperFloor = Math.round(area / avgUnitsize);
var totalUnits = numUnitsperFloor * numFloors;
var props = {
"totalunits": totalUnits,
"height": height,
"color": "#d0d0d0",
"roofColor": featProps.color,
"sysname": featProps.sysname,
"isStreet": 0,
'isBuilding': 1,
};
bboxpoly.properties = props;
generatedGeoJSON.features.push(bboxpoly);
}
}
}
return generatedGeoJSON;
}
};
var MXDBuildings = function () {
const density = 40; // dwellings per hectare.
const outerringradius = 0.04;
const middleringradius = 0.02;
const innerringradius = 0.01;
// this.name = name;
const gridsize = 0.08;
const elevationoffset = 1;
const innergridsize = 0.02;
const heights = [9, 12, 8, 11]; // in meters
const units = {
units: 'kilometers'
}
const floorHeight = 5;
const avgUnitsize = 75;
var featProps;
this.generateSquareGridandConstrain = function (featureGeometry) {
featProps = featureGeometry.properties;
var featExtent = turf.bbox(featureGeometry);
var sqgrid = turf.squareGrid(featExtent, gridsize, units);
// constrain grid.
var constrainedgrid = {
"type": "FeatureCollection",
"features": []
};
var sqfeatslen = sqgrid.features.length;
for (var x = 0; x < sqfeatslen; x++) {
var cursqfeat = sqgrid.features[x];
try {
var ifeat = turf.intersect(cursqfeat, featureGeometry);
} catch (err) {
// console.log(JSON.stringify(err));
}
if (ifeat) {
constrainedgrid.features.push(ifeat);
} else {
constrainedgrid.features.push(cursqfeat);
}
}
return constrainedgrid;
};
this.generateBuildings = function (constrainedgrid) {
var consgridlen = constrainedgrid.features.length;
var generatedGeoJSON = {
"type": "FeatureCollection",
"features": []
};
consgridlen = (consgridlen > 7500) ? 7500 : consgridlen;
// console.log(consgridlen);
// find centroid
for (var k1 = 0; k1 < consgridlen; k1++) {
// console.log(k1);
var curconsfeat = constrainedgrid.features[k1];
var curarea;
try {
curarea = turf.area(curconsfeat);
} catch (err) {
curarea = 0;
}
var center = turf.centroid(curconsfeat);
if (curarea > 6300) { //max area is 3600 need entire parcel.
var cv = Math.random() < 0.5 ? true : false;
if (cv) {
var outerring = turf.buffer(center, outerringradius, units);
var innerring = turf.buffer(center, innerringradius, units);
var middlering = turf.buffer(center, middleringradius, units);
// get bbox
var outerringbbox = turf.bbox(outerring);
var innerringbbox = turf.bbox(innerring);
var middleringbbox = turf.bbox(middlering);
//get bbox polygon
var outerringpoly = turf.bboxPolygon(outerringbbox);
var innerringpoly = turf.bboxPolygon(innerringbbox);
var middleringpoly = turf.bboxPolygon(middleringbbox);
//erase inner from outerring to get hybrid hole
var hybridhole = turf.difference(outerringpoly, innerringpoly);
// erease middle from hybrid hole
var buildingpoly = turf.difference(hybridhole, middleringpoly);
var height = elevationoffset + heights[Math.floor(Math.random() * heights.length)];
var numFloors = Math.round(height / floorHeight); // 5 meter per floor
var numUnitsperFloor = Math.round(curarea / avgUnitsize);
var totalUnits = numUnitsperFloor * numFloors;
var props = {
"totalunits": totalUnits,
"height": height,
"color": "#d0d0d0",
"roofColor": featProps.color,
"isStreet": 0,
'isBuilding': 1,
"sysname": featProps.sysname
};
buildingpoly.properties = props;
generatedGeoJSON.features.push(buildingpoly);
}
// generate square grid
// var sqrgrid = turf.squareGrid(outerringbbox, innergridsize, units);
// // interserct squre grid with hole.
// console.log(JSON.stringify(buildingpoly));
// // for each feature in the hole.
// for (var j1 = 0; j1 < sqrgrid.features.length; j1++) {
// var cursqgrid = sqrgrid.features[j1];
// var blgdfeat = turf.intersect(buildingpoly, cursqgrid);
// if (blgdfeat) {
// var area = turf.area(blgdfeat); // max area is 400
// // var cv = Math.random() < 0.5 ? true : false;
// if (area > 300) {
// var props = {
// "height": heights[Math.floor(Math.random() * heights.length)],
// "color": "#d0d0d0",
// "roofColor": featProps.color
// };
// blgdfeat.properties = props;
// generatedGeoJSON.features.push(blgdfeat);
// }
// }
// }
}
}
return generatedGeoJSON;
}
}
var LABBuildings = function () {
var reqtype;
var labHeights = [10, 15];
const nearestSearch = [0, 1, 2];
const units = {
units: 'kilometers'
}
const cellWidth = 0.03;
const elevationoffset = 1;
var availablePts = {};
var featProps;
var featExtent;
const floorHeight = 5;
const avgUnitsize = 100;
this.genGrid = function (curFeat) {
featProps = curFeat.properties;
featExtent = turf.bbox(curFeat);
var diagJSON = {
"type": "FeatureCollection",
"features": [curFeat]
};
var grid = turf.pointGrid(featExtent, cellWidth, units);
var ptsWithin = turf.within(grid, diagJSON);
return [ptsWithin, featExtent];
};
this.generateBuildingFootprints = function (ptsWithin) {
var allGeneratedFeats = [];
var color = featProps.color;
var roofColor = color;
var systag = featProps.systag;
var sysname = featProps.sysname;
var alreadyAdded = {
"type": "FeatureCollection",
"features": []
};
// if it is HDH type feature
// create a unique ID for each feature.
var availablePts = {};
var ptslen = ptsWithin.features.length;
ptslen = (ptslen > 7500) ? 7500 : ptslen;
// console.log(ptslen);
for (var k = 0; k < ptslen; k++) {
var id = makeid();
ptsWithin.features[k].properties.id = id;
availablePts[id] = ptsWithin.features[k];
}
// every point is avaiaable
for (var k1 = 0; k1 < ptslen; k1++) {
var ifeat;
var curalreadyadded;
var alreadyaddedlen;
// how many nearest to find?
var nearest = nearestSearch[Math.floor(Math.random() * nearestSearch.length)];
// initialize all poitns
var allPts = [];
// get current POint.
var curPt = ptsWithin.features[k1];
delete availablePts[curPt.properties.id];
allPts.push(curPt.geometry.coordinates);
if (nearest) {
for (var k6 = 0; k6 < nearest; k6++) {
// already added
var availPts = {
"type": "FeatureCollection",
"features": []
};
for (key in availablePts) {
var cpt = availablePts[key];
availPts.features.push(cpt);
}
var nearestpt = false;
if (availPts.features.length > 0) {
nearestpt = turf.nearestPoint(curPt, availPts);
}
if (nearestpt) {
delete availablePts[nearestpt.properties.id];
allPts.push(nearestpt.geometry.coordinates);
}
}
if (allPts.length > 1) {
try {
var ls = turf.lineString(allPts);
var buf = turf.buffer(ls, 0.0075, {
units: 'kilometers'
});
} catch (err) {
// console.log("Test" + JSON.stringify(err));
}
try {
var bb = turf.bbox(buf);
var bldg = turf.bboxPolygon(bb);
var area = turf.area(bldg);
} catch (err) {
// console.log("Test" + JSON.stringify(err));
}
var hasIntersect = false;
var alreadyaddedlen = alreadyAdded.features.length;
for (var x1 = 0; x1 < alreadyaddedlen; x1++) {
curalreadyadded = alreadyAdded.features[x1];
try {
ifeat = turf.intersect(curalreadyadded, bldg);
} catch (err) {
console.log(JSON.stringify(err));
}
if (ifeat) {
hasIntersect = true;
break;
}
}
if (hasIntersect === false) {
var height = elevationoffset + labHeights[Math.floor(Math.random() * labHeights.length)];
var numFloors = Math.round(height / floorHeight); // 5 meter per floor
var numUnitsperFloor = Math.round(area / avgUnitsize);
var totalUnits = numUnitsperFloor * numFloors;
var p = {
'totalunits': totalUnits,
'height': height,
'color': "#d0d0d0",
'roofColor': featProps.color,
'isStreet': 0,
'isBuilding': 1,
'sysname': featProps.sysname
};
bldg.properties = p;
alreadyAdded.features.push(bldg);
allGeneratedFeats.push(bldg);
}
}
}
}
return allGeneratedFeats;
}
};
var SMBBuildings = function () {
var reqtype;
var smbHeights = [3, 5, 6, 7, 10];
const gridsize = 0.04;
const footprintsize = 0.012;
const units = {
units: 'kilometers'
}
const nearestSearch = [0, 1, 2];
var featProps;
const elevationoffset = 1;
var featExtent;
const bufferWidth = gridsize - 0.015;
const bldgfootprint = 0.015;
this.genGrid = function (curFeat) {
featProps = curFeat.properties;
featExtent = turf.bbox(curFeat);
var diagJSON = {
"type": "FeatureCollection",
"features": [curFeat]
};
var grid = turf.pointGrid(featExtent, gridsize, units);
var ptsWithin = turf.within(grid, diagJSON);
return [ptsWithin, featExtent];
};
const floorHeight = 5;
const avgUnitsize = 75;
this.generateUnits = function (area) {
var height = elevationoffset + smbHeights[Math.floor(Math.random() * smbHeights.length)];
var numFloors = Math.round(height / floorHeight); // 5 meter per floor
var numUnitsperFloor = Math.round(area / avgUnitsize);
var totalUnits = numUnitsperFloor * numFloors;
return totalUnits;
};
this.generateBuildingFootprints = function (ptsWithin) {
var allGeneratedFeats = [];
var color = featProps.color;
var roofColor = color;
var systag = featProps.systag;
var sysname = featProps.sysname;
var alreadyAdded = {
"type": "FeatureCollection",
"features": []
};
var ptslen = ptsWithin.features.length;
ptslen = (ptslen > 7500) ? 7500 : ptslen;
for (var k = 0; k < ptslen; k++) {
// console.log(k);
var chosenValue = Math.random() < 0.5 ? true : false;
if (chosenValue) {
var curPt = ptsWithin.features[k];
var buffered = turf.buffer(curPt, bufferWidth, units); // buffer 48 meters
var bds = turf.bbox(buffered); // get the extent of the buffered features
var bfrdextPlgn = turf.bboxPolygon(bds);
var centrepoint = turf.centroid(bfrdextPlgn);
var bldg = turf.buffer(centrepoint, bldgfootprint, units);
var bdgply = turf.bbox(bldg); // get the extent of the buffered features
var bpoly = turf.bboxPolygon(bdgply);
var area = turf.area(bpoly);
var height = elevationoffset + smbHeights[Math.floor(Math.random() * smbHeights.length)];
var numFloors = Math.round(height / floorHeight); // 5 meter per floor
var numUnitsperFloor = Math.round(area / avgUnitsize);
var totalUnits = numUnitsperFloor * numFloors;
var p = {
"totalunits": totalUnits,
"height": height,
"color": "#d0d0d0",
"roofColor": color,
"isStreet": 0,
'isBuilding': 1,
"sysname": featProps.sysname
};
bpoly.properties = p;
allGeneratedFeats.push(bpoly);
}
}
return allGeneratedFeats;
}
};
var StreetsHelper = function () {
this.genStreetsGrid = function (pointsWithin, extent) {
// This module generates streets. given a grid of points.
var rows = [];
var elevationoffset = 1;
var columns = [];
var buildingPoints = [];
var roadPoints = [];
var buildingPointsVert = [];
var roadPointsVert = [];
for (var k = 0, ptslen = pointsWithin.features.length; k < ptslen; k++) {
var curPt = pointsWithin.features[k];
var curLng = curPt.geometry.coordinates[0];
var curLat = curPt.geometry.coordinates[1];
if (rows[curLng]) {} else {
rows[curLng] = [];
}
if (columns[curLat]) {} else {
columns[curLat] = [];
}
rows[curLng].push(curPt);
columns[curLat].push(curPt);
}
var allCols = [];
var allRows = [];
for (key in columns) {
allCols.push({
'key': key,
'points': columns[key]
});
}
for (key in rows) {
allRows.push({
'key': key,
'points': rows[key]
});
}
var rCounter = 0;
var cCounter = 0;
var sortedCols = allCols.sort(function (a, b) {
return parseFloat(a.key) - parseFloat(b.key);
});
var sortedRows = allRows.sort(function (a, b) {
return parseFloat(a.key) - parseFloat(b.key);
});
for (var x2 = 0, collen = sortedCols.length; x2 < collen; x2++) {
var feattype = (rCounter % 3 === 0) ? "road" : "building";
// var pts = sortedCols[x2].points;
(feattype === 'road') ? roadPoints.push(sortedCols[x2]): buildingPoints.push(sortedCols[x2]);
rCounter += 1;
}
for (var x3 = 0, rowlen = sortedRows.length; x3 < rowlen; x3++) {
var feattype = (cCounter % 5 === 0) ? "road" : "building";
// var pts = sortedCols[x2].points;
(feattype === 'road') ? roadPointsVert.push(sortedRows[x3]): buildingPointsVert.push(sortedRows[x3]);
cCounter += 1;
}
// var allLines = [];
var streets = [];
var distance = 0;
for (var k1 = 0, numRoads = roadPoints.length; k1 < numRoads; k1++) {
var curRoad = roadPoints[k1];
var tmpPts = [];
for (var p1 = 0, ptsLen = curRoad.points.length; p1 < ptsLen; p1++) {
tmpPts.push(curRoad.points[p1].geometry.coordinates);
}
if (tmpPts.length > 1) {
var linestring = turf.lineString(tmpPts);
// allLines.push(linestring);
var d = turf.length(linestring, {
units: 'kilometers'
});
distance = (distance > Math.round(d)) ? distance : Math.round(d);
var street = turf.buffer(linestring, 0.0075, {
units: 'kilometers'
});
if (street['type'] === "Feature") {
street = {
"type": "FeatureCollection",
"features": [street]
}
}
var height = elevationoffset + 0.1;
street.features[0].properties = {
"color": "#202020",
"roofColor": "#202020",
"height": height,
"isStreet": 1,
'isBuilding': 0,
};
streets.push.apply(streets, street.features);
}
}
if (distance >= 0.7) { // there is a road that is greater than 1KM, so we need vertical streets.
for (var k2 = 0, numRoads = roadPointsVert.length; k2 < numRoads; k2++) {
var curRoad = roadPointsVert[k2];
var tmpPts = [];
for (var p2 = 0, ptsLen = curRoad.points.length; p2 < ptsLen; p2++) {
tmpPts.push(curRoad.points[p2].geometry.coordinates);
}
if (tmpPts.length > 1) { // valid line
var linestring = turf.lineString(tmpPts);
var street = turf.buffer(linestring, 0.0075, {
units: 'kilometers'
});
if (street['type'] === "Feature") {
street = {
"type": "FeatureCollection",
"features": [street]
}
}
var height = elevationoffset + 0.1;
street.features[0].properties = {
"color": "#202020",
"roofColor": "#202020",
"height": height,
"isStreet": 1,
'isBuilding': 0,
};
streets.push.apply(streets, street.features);
}
}
}
var s = {
"type": "FeatureCollection",
"features": streets
};
return s;
}
this.filterStreets = function (streetgrid, inputFeats) {
var filteredFeatures = [];
for (var l = 0; l < inputFeats.length; l++) {
var curF1 = inputFeats[l];
var intersects = false;
for (var p = 0, stLen = streetgrid.features.length; p < stLen; p++) {
var curStF = streetgrid.features[p];
try {
var intersect = turf.intersect(curF1, curStF);
} catch (err) {
// console.log(JSON.stringify(err));
}
// chop road
if (intersect) {
intersects = true;
}
}
if (intersects) {} else {
filteredFeatures.push(curF1);
}
}
return filteredFeatures;
}
}
// TODO: Refactor Building generator
class BuildingsFactory {
}
function makeid() {
var text = "";
var possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for (var i = 0; i < 5; i++)
text += possible.charAt(Math.floor(Math.random() * possible.length));
return text;
}
// function bufferExistingRoads(inputroads) {
// var streets = [];
// for (var x = 0; x < inputroads.features.length; x++) {
// var linestring = inputroads.features[x];
// var street = turf.buffer(linestring, 0.0075, 'kilometers');
// if (street['type'] === "Feature") {
// streets.push(street);
// }
// }
// return {
// "type": "FeatureCollection",
// "features": streets
// }
// }
function generatePolicyFeatures(curFeat) {
var curFeatprops = curFeat.properties;
const elevationoffset = 1;
function getCW(d) {
return d > 10000000 ? 1 :
d > 6000000 ? 0.75 :
d > 5000000 ? 0.5 :
d > 3000000 ? 0.3 :
d > 2000000 ? 0.15 :
d > 1000000 ? 0.08 :
0.04;
}
var policyFeats = [];
var fe = turf.bbox(curFeat);
var area = Math.round(turf.area(curFeat));
var cw = getCW(area);
// var cw = 0.05;
const units = {
units: 'kilometers'
}
var dJSON = {
"type": "FeatureCollection",
"features": [curFeat]
};
// make the grid of 50 meter points
var grd = turf.pointGrid(fe, cw, units);
var pW = turf.within(grd, dJSON);
var pwLen = pW.features.length;
var height = elevationoffset + 0.01;
var prop = {
"roofColor": curFeatprops.color,
"height": height,
"isStreet": 0,
'isBuilding': 0,
"sysname": curFeatprops.sysname
}
for (var l1 = 0; l1 < pwLen; l1++) {
var curptwithin = pW.features[l1];
var bufFeat = turf.buffer(curptwithin, 0.0075, {
units: 'kilometers'
});
bufFeat.properties = prop;
policyFeats.push(bufFeat);
}
return policyFeats;
}
function generateCenter(constraintedModelDesigns) {
var centerPt = turf.center(constraintedModelDesigns);
var lat = centerPt.geometry.coordinates[1];
var lng = centerPt.geometry.coordinates[0];
return [lat, lng];
}
function generateFinal3DGeoms(currentFeature, genstreets) {
const elevationoffset = 1;
var genstreets = (genstreets === 'false') ? false : true;
var whiteListedSysName = ['HIGH-H', 'LOW-H', 'HDH', 'LDH', 'COM', 'COMIND', 'HSG', 'HSNG', 'MXD', 'MIX'];
var curGJFeats = [];
var curFeat = currentFeature
var curFeatSys = curFeat.properties.sysname;
// add desctiption
const diagramdesc = curFeat.properties.description;
// if it is a line then simply buffer it and paint it black with a small height
if (curFeat.geometry.type === "LineString") {
f = turf.buffer(curFeat, 0.001, {
units: 'kilometers'
});
if (f['type'] === "Feature") {
f = {
"type": "FeatureCollection",
"features": [f]
}
}
var linefeats = f.features;
var linefeatlen = linefeats.length;
for (var x1 = 0; x1 < linefeatlen; x1++) {
curlineFeat = linefeats[x1];
var height = elevationoffset + 0.5;
curlineFeat.properties = {
"color": curFeat.properties.color,
"description": curFeat.properties.description,
"roofColor": curFeat.properties.color,
"isStreet": 0,
'isBuilding': 0,
"sysname": curFeat.properties.sysname,
"height": height,
"description": diagramdesc
};
curGJFeats.push(curlineFeat);
}
} else if (curFeat.geometry.type === "Polygon") {
var featProps = curFeat.properties;
if (whiteListedSysName.indexOf(curFeatSys) >= 0) { // system is whitelisted
if (curFeat.properties.areatype === 'project') {
//100 meter cell width
if ((featProps.sysname === 'HDH') || (featProps.sysname === 'HSNG') || (featProps.sysname === 'HSG') || (featProps.sysname === 'MIX')) {
var hdh = new HDHousing();
var constrainedgrid = hdh.generateSquareGridandConstrain(curFeat);
var bldgs = hdh.generateBuildings(constrainedgrid);
for (var k2 = 0; k2 < bldgs.features.length; k2++) {
bldgs.features[k2].properties.description = diagramdesc;
curGJFeats.push(bldgs.features[k2]);
}
} else if ((featProps.sysname === 'MXD')) {
var mxd = new MXDBuildings();
var mxdgrid = mxd.generateSquareGridandConstrain(curFeat);
var mxdbld = mxd.generateBuildings(mxdgrid);
for (var k3 = 0; k3 < mxdbld.features.length; k3++) {
mxdbld.features[k3].properties.description = diagramdesc;
curGJFeats.push(mxdbld.features[k3]);
}
} else if ((featProps.sysname === 'LDH') || (featProps.sysname === 'LOW-H')) {
var ldh = new LDHousing();
var p = ldh.genGrid(curFeat);
var ptsWithin = p[0];
var featExtent = p[1];
var bldgs = ldh.generateBuildingFootprints(ptsWithin);
var ldhstreets = new StreetsHelper();
var ldhstreetFeatureCollection = ldhstreets.genStreetsGrid(ptsWithin, featExtent);
var ldhfinalFeatures = ldhstreets.filterStreets(ldhstreetFeatureCollection, bldgs);
if (genstreets) {
ldhfinalFeatures.push.apply(ldhfinalFeatures, ldhstreetFeatureCollection.features);
}
for (var k1 = 0; k1 < ldhfinalFeatures.length; k1++) {
ldhfinalFeatures[k1].properties.description = diagramdesc;
curGJFeats.push(ldhfinalFeatures[k1]);
}
} else if ((featProps.sysname === 'COM') || (featProps.sysname === 'COMIND')) {
var com = new COMBuilding();
var comp = com.genGrid(curFeat);
var comptsWithin = comp[0];
var comfeatExtent = comp[1];
var combldgs = com.generateBuildingFootprints(comptsWithin);
var comstreets = new StreetsHelper();
var comstreetFeatureCollection = comstreets.genStreetsGrid(comptsWithin, comfeatExtent);
var comfinalFeatures = comstreets.filterStreets(comstreetFeatureCollection, combldgs);
if (genstreets) {
comfinalFeatures.push.apply(comfinalFeatures, comstreetFeatureCollection.features);
}
for (var k1 = 0; k1 < comfinalFeatures.length; k1++) {
comfinalFeatures[k1].properties.description = diagramdesc;
curGJFeats.push(comfinalFeatures[k1]);
}
}
} else if (curFeat.properties.areatype === 'policy') { // whitelisted policy
var policyF = generatePolicyFeatures(curFeat);
for (var pf = 0; pf < policyF.length; pf++) {
policyF[pf].properties.description = diagramdesc;
curGJFeats.push(policyF[pf]);
}
}
}
// for non white listed systems that are buildings
else if ((featProps.systag === 'Large buildings, Industry, commerce') && (featProps.areatype === 'project')) { //
var lab = new LABBuildings();
var labgrid = lab.genGrid(curFeat);
var labptsWithin = labgrid[0];
var labfeatExtent = labgrid[1];
var labbldgs = lab.generateBuildingFootprints(labptsWithin);
var labstreets = new StreetsHelper();
var labStreetsFC = labstreets.genStreetsGrid(labptsWithin, labfeatExtent);
var labFinalFeatures = labstreets.filterStreets(labStreetsFC, labbldgs);
if (genstreets) {
labFinalFeatures.push.apply(labFinalFeatures, labStreetsFC.features);
}
for (var k6 = 0; k6 < labFinalFeatures.length; k6++) {
labFinalFeatures[k6].properties.description = diagramdesc;
curGJFeats.push(labFinalFeatures[k6]);
}
} else if ((featProps.systag === 'Small buildings, low density housing') && (featProps.areatype === 'project')) {
var smb = new SMBBuildings();
var smbgrid = smb.genGrid(curFeat);
var smbptsWithin = smbgrid[0];
var smbfeatExtent = smbgrid[1];
var smbbldgs = smb.generateBuildingFootprints(smbptsWithin);
var smbStreets = new StreetsHelper();
var smbStreetFeat = smbStreets.genStreetsGrid(smbptsWithin, smbfeatExtent);
var smbFinalFeatures = smbStreets.filterStreets(smbStreetFeat, smbbldgs);
if (genstreets) {
smbFinalFeatures.push.apply(smbFinalFeatures, smbStreetFeat.features);
}
for (var k1 = 0; k1 < smbFinalFeatures.length; k1++) {
smbFinalFeatures[k1].properties.description = diagramdesc;
curGJFeats.push(smbFinalFeatures[k1]);
}
} else { // all systems that not buildings
if (curFeat.properties.areatype === 'project') {
var height = elevationoffset + 0.01;
var prop = {
"description": curFeat.properties.description,
"roofColor": curFeat.properties.color,
"isStreet": 0,
'isBuilding': 0,
"height": height,
"sysname": curFeat.properties.sysname,
"description": curFeat.properties.description
}
curFeat.properties = prop;
curGJFeats.push.apply(curGJFeats, [curFeat]);
}
// else if (curFeat.properties.areatype === 'policy') {
// var policyF = generatePolicyFeatures(curFeat);
// for (var pf = 0; pf < policyF.length; pf++) {
// curGJFeats.push(policyF[pf]);
// }
// }
}
}
return curGJFeats;
}
module.exports = {
// constrainFeatures: constrainFeatures,
generateCenter: generateCenter,
generateFinal3DGeoms: generateFinal3DGeoms,
// bufferExistingRoads: bufferExistingRoads
// generate3DGeoms: generate3DGeoms
};
|
{
"content_hash": "5b4fab0bea7cefda18bb3aa0755e2c9c",
"timestamp": "",
"source": "github",
"line_count": 1193,
"max_line_length": 152,
"avg_line_length": 39.22212908633696,
"alnum_prop": 0.49192169601641306,
"repo_name": "geodesignhub/Globe3DViewer",
"id": "bd00754db3ec71e25389eda99954be0362c44949",
"size": "46792",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "3Dprocessor/tools.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "34370"
},
{
"name": "EJS",
"bytes": "25888"
},
{
"name": "JavaScript",
"bytes": "88029"
},
{
"name": "Procfile",
"bytes": "19"
}
],
"symlink_target": ""
}
|
package net.sf.appia.core;
import java.util.concurrent.ThreadFactory;
import net.sf.appia.core.events.channel.ChannelEvent;
import net.sf.appia.core.events.channel.PeriodicTimer;
import net.sf.appia.core.events.channel.Timer;
/** <I>Appia</I> timers manager.
* This is an independent <I>thread</I> that gathers all timers
* requested by layers and resends them back when the timer expires.
*
* @author Alexandre Pinto
* @version 1.0
* @see net.sf.appia.core.events.channel.Timer
* @see net.sf.appia.core.events.channel.PeriodicTimer
*/
public class TimerManager implements Runnable, TimeProvider {
private static final long MICROS = 1000;
/**
* This class defines a MyTimer
*
* @version 1.0
*/
private class MyTimer {
/**
* The ID of the timer.
*/
public String id;
public long time;
public long period;
public ChannelEvent event;
public MyTimer next;
public MyTimer(String timerID, long time, long period, ChannelEvent event) {
this.id=timerID;
this.time=time;
this.period=period;
this.event=event;
}
}
private boolean alive=false;
private Thread thread;
private MyTimer next=null;
/**
* This class defines a MyClock
*
* @version 1.0
*/
private class MyClock {
public final long syncTime=1000;
private long lastSync=0;
private long now=0;
public MyClock() {
sync();
}
public synchronized void sync() {
now=currentTimeMillis();
lastSync=now;
}
public synchronized void update(long elapsed) {
now+=elapsed;
}
public synchronized long read() {
return now;
}
public synchronized void conditionalSync() {
if (now > lastSync + syncTime) {
now=currentTimeMillis();
lastSync=now;
}
}
}
private MyClock clock=new MyClock();
private synchronized void insert(MyTimer timer) {
MyTimer prev=null;
MyTimer t=next;
while ( (t != null) && (t.time <= timer.time) ) {
prev=t;
t=t.next;
}
if (prev == null) {
timer.next=next;
next=timer;
} else {
timer.next=t;
prev.next=timer;
}
}
private synchronized void remove(String timerID) {
MyTimer prev=null;
MyTimer t=next;
while ( (t != null) && (!t.id.equals(timerID)) ) {
prev=t;
t=t.next;
}
if (t != null) {
if (prev == null) {
next=t.next;
} else {
prev.next=t.next;
}
}
}
private synchronized MyTimer getNextTimer(long now) {
if ( (next != null) && (next.time <= now) ) {
final MyTimer t=next;
next=next.next;
return t;
} else
return null;
}
private synchronized void goToSleep(long now) {
long sleep;
if ( next != null ) {
sleep=next.time-now;
} else {
sleep=clock.syncTime;
}
if ( sleep > 0 ) {
try {
this.wait(sleep);
clock.update(sleep);
} catch (InterruptedException e) {
clock.sync();
}
}
}
private synchronized void setAlive(boolean alive) {
this.alive=alive;
}
private synchronized boolean isAlive() {
return alive;
}
/** Creates a new TimerManager
*/
public TimerManager(ThreadFactory thf) {
thread = thf.newThread(this);
thread.setName("Appia Timer Manager");
thread.setDaemon(true);
}
//////////////////////////////////////////////
/** Returns the time interval until the next timer expires.
* In milliseconds.
* @return The time until the next timer. In milliseconds.
*/
public long nextTimerEvent() { return -1; }
/** Receives a timer to manage.
* @param timer The timer to manage.
* @see net.sf.appia.core.events.channel.Timer
*/
public void handleTimerRequest(Timer timer) {
final int q=timer.getQualifierMode();
if ( q != EventQualifier.NOTIFY ) {
if ( q == EventQualifier.ON ) {
insert(new MyTimer(timer.timerID,currentTimeMillis()+timer.getTimeout(),0,timer));
} else {
remove(timer.timerID);
}
thread.interrupt();
}
}
/**
* Processes the next timer to expire.<BR>
* <B>Currently it does nothing.</B>
*/
public void consumeTimerEvent() {}
/** Receives a periodic timer to manage.
* @param timer The periodic timer to manage.
* @see net.sf.appia.core.events.channel.PeriodicTimer
*/
public void handlePeriodicTimer(PeriodicTimer timer) {
final int q=timer.getQualifierMode();
if ( q != EventQualifier.NOTIFY ) {
if ( q == EventQualifier.ON ) {
final long period=timer.getPeriod();
clock.sync();
insert(new MyTimer(timer.timerID,currentTimeMillis()+period,period,timer));
} else {
remove(timer.timerID);
}
thread.interrupt();
}
}
/** Start execution of the manager thread.
* @see java.lang.Thread#start
*/
public void start() {
setAlive(true);
thread.start();
}
/** Stops execution of the manager thread.
* @see java.lang.Thread#stop
*/
public void stop() {
setAlive(false);
thread.interrupt();
}
/**
* Current time in milliseconds. <br>
* Uses {@linkplain System#currentTimeMillis()}.
*
* @see System#currentTimeMillis()
*/
public long currentTimeMillis(){
return System.currentTimeMillis();
}
/**
* Current time in microseconds. <br>
* Uses {@linkplain System#currentTimeMillis()} therefore only has a millisecond precision.
*/
public long currentTimeMicros() {
return System.nanoTime()/MICROS;
}
/**
* Current time in nanoseconds. <br>
* Uses {@linkplain System#currentTimeMillis()} therefore only has a millisecond precision.
*/
public long nanoTime() {
return System.nanoTime();
}
////////////////////////////////////////////
/** The code executed by the manager thread.
* @see java.lang.Thread#run
*/
public void run() {
while (isAlive()) {
clock.conditionalSync();
final long now=clock.read();
MyTimer timer;
while ( (timer=getNextTimer(now)) != null ) {
ChannelEvent event;
try {
if (timer.period > 0) {
event=(PeriodicTimer)timer.event.cloneEvent();
} else {
event=timer.event;
}
event.setQualifierMode(EventQualifier.NOTIFY);
event.asyncGo(event.getChannel(),Direction.invert(event.getDir()));
if (timer.period > 0) {
timer.time=now+timer.period;
insert(timer);
}
}
catch (AppiaEventException e) {
//e.printStackTrace();
}
catch (CloneNotSupportedException e) {
throw new AppiaError("TimerManager: CloneNotSupportedException ");
}
}
goToSleep(now);
}
}
}
|
{
"content_hash": "52063a2bcad1980b5e0ba38e36a586cd",
"timestamp": "",
"source": "github",
"line_count": 311,
"max_line_length": 93,
"avg_line_length": 22.688102893890676,
"alnum_prop": 0.5783730158730159,
"repo_name": "p158276/vanilladb-comm",
"id": "dad098f8ec798022607118b342eeafc5120843b1",
"size": "7858",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "src/core/net/sf/appia/core/TimerManager.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "670096"
}
],
"symlink_target": ""
}
|
{% extends "dataviewer/base.html" %}
{% block page %}
{% set map_config = h.get_common_map_config_geoviews() %}
{% set ol_config = h.get_openlayers_viewer_config() %}
<div id="data-preview">
<div id="map-container"
data-module="olpreview"
data-module-gapi_key="{{ gapi_key }}"
data-module-proxy_url="{{ proxy_url }}"
data-module-proxy_service_url="{{ proxy_service_url }}"
data-module-site_url="{{ h.dump_json(h.url_for('/', locale='default', qualified=true)) }}"
data-module-map_config="{{ h.dump_json(map_config) }}"
data-module-ol_config="{{ h.dump_json(ol_config) }}"
{% if resource_view_json %} data-module-resource-view = "{{ h.dump_json(resource_view_json) }}" {% endif %}
{% if basemapsConfig %} data-module-basemaps-config = "{{ h.dump_json(basemapsConfig) }}" {% endif %}
>
<h4 class="loading-dialog">
<div class="loading-spinner"></div>
<div class="left">{{ _('Loading...') }}</div>
</h4>
</div>
<div class="layerswitcher olControlLayerSwitcher"></div>
</div>
{% set type = 'asset' if h.ckan_version().split('.')[1] | int >= 9 else 'resource' %}
{% include 'geoview/snippets/openlayers_' ~ type ~ '.html' %}
{% endblock %}
|
{
"content_hash": "e311cb3f02eeb0a54a5fed68a57b0ba2",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 120,
"avg_line_length": 45.6,
"alnum_prop": 0.5394736842105263,
"repo_name": "ckan/ckanext-geoview",
"id": "12cfc7e2ceacebae15148ace348d6ab4f07d3303",
"size": "1368",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ckanext/geoview/templates/dataviewer/openlayers.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5941"
},
{
"name": "HTML",
"bytes": "5938"
},
{
"name": "JavaScript",
"bytes": "31776"
},
{
"name": "Python",
"bytes": "22709"
}
],
"symlink_target": ""
}
|
"""Collection of functions to compute properties of forwards."""
import tensorflow.compat.v2 as tf
from tf_quant_finance.math import segment_ops
def forward_rates(df_start_dates,
df_end_dates,
daycount_fractions,
dtype=None,
name=None):
"""Computes forward rates from daycount fractions and discount factors.
#### Example
```python
# Discount factors at start dates
df_start_dates = [[0.95, 0.9, 0.75], [0.95, 0.99, 0.85]]
# Discount factors at end dates
df_end_dates = [[0.8, 0.6, 0.5], [0.8, 0.9, 0.5]]
# Daycount fractions between the dates
daycount_fractions = [[0.5, 1.0, 2], [0.6, 0.4, 4.0]]
# Expected:
# [[0.375 , 0.5 , 0.25 ],
# [0.3125, 0.25 , 0.175 ]]
forward_rates(df_start_dates, df_end_dates, daycount_fractions,
dtype=tf.float64)
```
Args:
df_start_dates: A real `Tensor` representing discount factors at the start
dates.
df_end_dates: A real `Tensor` representing discount factors at the end
dates.
daycount_fractions: A real `Tensor` representing year fractions for the
coupon accrual.
dtype: `tf.Dtype`. If supplied the dtype for the input and output `Tensor`s.
Default value: None which maps to the default dtype inferred from
`df_start_dates`.
name: Python str. The name to give to the ops created by this function.
Default value: None which maps to 'forward_rates'.
Returns:
"""
name = name or 'forward_rates'
with tf.name_scope(name):
df_start_dates = tf.convert_to_tensor(
df_start_dates, dtype, name='df_start_dates')
dtype = dtype or df_start_dates.dtype
df_end_dates = tf.convert_to_tensor(
df_end_dates, dtype, name='df_end_dates')
daycount_fractions = tf.convert_to_tensor(
daycount_fractions, dtype, name='daycount_fractions')
return tf.math.divide_no_nan(
tf.math.divide_no_nan(df_start_dates, df_end_dates) - 1,
daycount_fractions)
def forward_rates_from_yields(yields,
times,
groups=None,
dtype=None,
name=None):
"""Computes forward rates given a set of zero rates.
Denote the price of a zero coupon bond maturing at time `t` by `Z(t)`. Then
the zero rate to time `t` is defined as
```None
r(t) = - ln(Z(t)) / t (1)
```
This is the (continuously compounded) interest rate that applies between time
`0` and time `t` as seen at time `0`. The forward rate between times `t1` and
`t2` is defined as the interest rate that applies to the period `[t1, t2]`
as seen from today. It is related to the zero coupon bond prices by
```None
exp(-f(t1, t2)(t2-t1)) = Z(t2) / Z(t1) (2)
f(t1, t2) = - (ln Z(t2) - ln Z(t1)) / (t2 - t1) (3)
f(t1, t2) = (t2 * r(t2) - t1 * r(t1)) / (t2 - t1) (4)
```
Given a sequence of increasing times `[t1, t2, ... tn]` and the zero rates
for those times, this function computes the forward rates that apply to the
consecutive time intervals i.e. `[0, t1], [t1, t2], ... [t_{n-1}, tn]` using
Eq. (4) above. Note that for the interval `[0, t1]` the forward rate is the
same as the zero rate.
Additionally, this function supports this computation for a batch of such
rates. Batching is made slightly complicated by the fact that different
zero curves may have different numbers of tenors (the parameter `n` above).
Instead of a batch as an extra dimension, we support the concept of groups
(also see documentation for `tf.segment_sum` which uses the same concept).
#### Example
The following example illustrates this method along with the concept of
groups. Assuming there are two sets of zero rates (e.g. for different
currencies) whose implied forward rates are needed. The first set has a total
of three marked tenors at `[0.25, 0.5, 1.0]`. The second set
has four marked tenors at `[0.25, 0.5, 1.0, 1.5]`.
Suppose, the zero rates for the first set are:
`[0.04, 0.041, 0.044]` and the second are `[0.022, 0.025, 0.028, 0.036]`.
Then this data is batched together as follows:
Groups: [0, 0 0, 1, 1, 1 1 ]
First three times for group 0, next four for group 1.
Times: [0.25, 0.5, 1.0, 0.25, 0.5, 1.0, 1.5]
First three rates for group 0, next four for group 1.
Rates: [0.04, 0.041, 0.044, 0.022, 0.025, 0.028, 0.036]
```python
dtype = np.float64
groups = np.array([0, 0, 0, 1, 1, 1, 1])
times = np.array([0.25, 0.5, 1.0, 0.25, 0.5, 1.0, 1.5], dtype=dtype)
rates = np.array([0.04, 0.041, 0.044, 0.022, 0.025, 0.028, 0.036],
dtype=dtype)
forward_rates = forward_rates_from_yields(
rates, times, groups=groups, dtype=dtype)
```
#### References:
[1]: John C. Hull. Options, Futures and Other Derivatives. Ninth Edition.
June 2006.
Args:
yields: Real rank 1 `Tensor` of size `n`. The discount/zero rates.
times: Real positive rank 1 `Tensor` of size `n`. The set of times
corresponding to the supplied zero rates. If no `groups` is supplied, then
the whole array should be sorted in an increasing order. If `groups` are
supplied, then the times within a group should be in an increasing order.
groups: Optional int `Tensor` of size `n` containing values between 0 and
`k-1` where `k` is the number of different curves.
Default value: None. This implies that all the rates are treated as a
single group.
dtype: `tf.Dtype`. If supplied the dtype for the `yields` and `times`.
Default value: None which maps to the default dtype inferred from
`yields`.
name: Python str. The name to give to the ops created by this function.
Default value: None which maps to 'forward_rates_from_yields'.
Returns:
Real rank 1 `Tensor` of size `n` containing the forward rate that applies
for each successive time interval (within each group if groups are
specified).
"""
with tf.compat.v1.name_scope(
name,
default_name='forward_rates_from_yields',
values=[yields, times, groups]):
yields = tf.convert_to_tensor(yields, dtype=dtype, name='yields')
dtype = dtype or yields.dtype
times = tf.convert_to_tensor(times, dtype=dtype, name='times')
if groups is not None:
groups = tf.convert_to_tensor(groups, name='groups')
# (t2 * r(t2) - t1 * r(t1)) / (t2 - t1)
rate_times = yields * times
diff_rate_times = segment_ops.segment_diff(
rate_times, order=1, exclusive=False, segment_ids=groups)
diff_times = segment_ops.segment_diff(
times, order=1, exclusive=False, segment_ids=groups)
return diff_rate_times / diff_times
def yields_from_forward_rates(discrete_forwards,
times,
groups=None,
dtype=None,
name=None):
"""Computes yield rates from discrete forward rates.
Denote the price of a zero coupon bond maturing at time `t` by `Z(t)`. Then
the zero rate to time `t` is defined as
```None
r(t) = - ln(Z(t)) / t (1)
```
This is the (continuously compounded) interest rate that applies between time
`0` and time `t` as seen at time `0`. The forward rate between times `t1` and
`t2` is defined as the interest rate that applies to the period `[t1, t2]`
as seen from today. It is related to the zero coupon bond prices by
```None
exp(-f(t1, t2)(t2-t1)) = Z(t2) / Z(t1) (2)
f(t1, t2) = - (ln Z(t2) - ln Z(t1)) / (t2 - t1) (3)
f(t1, t2) = (t2 * r(t2) - t1 * r(t1)) / (t2 - t1) (4)
```
Given a sequence of increasing times `[t1, t2, ... tn]` and the forward rates
for the consecutive time intervals, i.e. `[0, t1]`, `[t1, t2]` to
`[t_{n-1}, tn]`, this function computes the yields to maturity for maturities
`[t1, t2, ... tn]` using Eq. (4) above.
Additionally, this function supports this computation for a batch of such
forward rates. Batching is made slightly complicated by the fact that
different zero curves may have different numbers of tenors (the parameter `n`
above). Instead of a batch as an extra dimension, we support the concept of
groups (also see documentation for `tf.segment_sum` which uses the same
concept).
#### Example
The following example illustrates this method along with the concept of
groups. Assuming there are two sets of zero rates (e.g. for different
currencies) whose implied forward rates are needed. The first set has a total
of three marked tenors at `[0.25, 0.5, 1.0]`. The second set
has four marked tenors at `[0.25, 0.5, 1.0, 1.5]`.
Suppose, the forward rates for the first set are:
`[0.04, 0.041, 0.044]` and the second are `[0.022, 0.025, 0.028, 0.036]`.
Then this data is batched together as follows:
Groups: [0, 0 0, 1, 1, 1 1 ]
First three times for group 0, next four for group 1.
Times: [0.25, 0.5, 1.0, 0.25, 0.5, 1.0, 1.5]
First three discrete forwards for group 0, next four for group 1.
Forwards: [0.04, 0.042, 0.047, 0.022, 0.028, 0.031, 0.052]
```python
dtype = np.float64
groups = np.array([0, 0, 0, 1, 1, 1, 1])
times = np.array([0.25, 0.5, 1.0, 0.25, 0.5, 1.0, 1.5], dtype=dtype)
discrete_forwards = np.array(
[0.04, 0.042, 0.047, 0.022, 0.028, 0.031, 0.052], dtype=dtype)
yields = yields_from_forward_rates(discrete_forwards, times,
groups=groups, dtype=dtype)
# Produces: [0.04, 0.041, 0.044, 0.022, 0.025, 0.028, 0.036]
```
#### References:
[1]: John C. Hull. Options, Futures and Other Derivatives. Ninth Edition.
June 2006.
Args:
discrete_forwards: Real rank 1 `Tensor` of size `n`. The forward rates for
the time periods specified. Note that the first value applies between `0`
and time `times[0]`.
times: Real positive rank 1 `Tensor` of size `n`. The set of times
corresponding to the supplied zero rates. If no `groups` is supplied, then
the whole array should be sorted in an increasing order. If `groups` are
supplied, then the times within a group should be in an increasing order.
groups: Optional int `Tensor` of size `n` containing values between 0 and
`k-1` where `k` is the number of different curves.
Default value: None. This implies that all the rates are treated as a
single group.
dtype: `tf.Dtype`. If supplied the dtype for the `discrete_forwards` and
`times`.
Default value: None which maps to the default dtype inferred from
`discrete_forwards`.
name: Python str. The name to give to the ops created by this function.
Default value: None which maps to 'yields_from_forward_rates'.
Returns:
yields: Real rank 1 `Tensor` of size `n` containing the zero coupon yields
that for the supplied maturities (within each group if groups are
specified).
"""
with tf.compat.v1.name_scope(
name,
default_name='yields_from_forward_rates',
values=[discrete_forwards, times, groups]):
discrete_forwards = tf.convert_to_tensor(discrete_forwards, dtype=dtype,
name='discrete_forwards')
dtype = dtype or discrete_forwards.dtype
times = tf.convert_to_tensor(times, dtype=dtype, name='times')
if groups is not None:
groups = tf.convert_to_tensor(groups, name='groups')
# Strategy for solving this equation without loops.
# Define x_i = f_i (t_i - t_{i-1}) where f are the forward rates and
# t_{-1}=0. Also define y_i = r_i t_i
# Then the relationship between the forward rate and the yield can be
# written as: x_i = y_i - y_{i-1} which we need to solve for y.
# Hence, y_i = x_0 + x_1 + ... x_i.
intervals = segment_ops.segment_diff(
times, order=1, exclusive=False, segment_ids=groups)
x = intervals * discrete_forwards
y = segment_ops.segment_cumsum(x, exclusive=False, segment_ids=groups)
return tf.math.divide_no_nan(y, times)
__all__ = ['forward_rates_from_yields',
'yields_from_forward_rates',
'forward_rates']
|
{
"content_hash": "02435760fad991148d265bbbf11863e7",
"timestamp": "",
"source": "github",
"line_count": 288,
"max_line_length": 80,
"avg_line_length": 42.666666666666664,
"alnum_prop": 0.6338704427083334,
"repo_name": "google/tf-quant-finance",
"id": "69eb8d61de8372b5a38f31115733edfbddc4965b",
"size": "12863",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tf_quant_finance/rates/analytics/forwards.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "5759"
},
{
"name": "Jupyter Notebook",
"bytes": "1634001"
},
{
"name": "Python",
"bytes": "3661863"
},
{
"name": "Shell",
"bytes": "2338"
},
{
"name": "Starlark",
"bytes": "109192"
}
],
"symlink_target": ""
}
|
const DNSDomainName = require('./DNSDomainName.js');
const DNSMessageResource = function(name, type, rclass, ttl, rdata) {
this.name = name || new DNSDomainName();
this.type = type || 1;
this.rclass = rclass || 1;
this.ttl = ttl || 300;
this.rdlength = rdata ? rdata.length : 0;
this.rdata = rdata || new Buffer(0);
};
// This function ignores this.rdlength and uses this.rdata.length instead
DNSMessageResource.prototype.encode = function() {
var name = this.name.encode();
var buffer = new Buffer(name.length + this.rdata.length + 10);
name.copy(buffer, 0);
buffer.writeUInt16BE(this.type, name.length);
buffer.writeUInt16BE(this.rclass, name.length + 2);
buffer.writeUInt32BE(this.ttl, name.length + 4);
buffer.writeUInt16BE(this.rdata.length, name.length + 8);
this.rdata.copy(buffer, name.length + 10);
return buffer;
};
DNSMessageResource.prototype.toString = function() {
return '<DNSMessageResource>';
};
module.exports = DNSMessageResource;
|
{
"content_hash": "4f5578e2649f49d3f5aee8922d7b4e6c",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 73,
"avg_line_length": 30.5,
"alnum_prop": 0.7161885245901639,
"repo_name": "williamb96/nodedns",
"id": "26d5618c837b93927c999e5e100783a65dfec704",
"size": "976",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DNSMessageResource.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "8293"
}
],
"symlink_target": ""
}
|
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<mapper namespace="org.apache.guacamole.auth.jdbc.permission.UserPermissionMapper" >
<!-- Result mapper for user permissions -->
<resultMap id="UserPermissionResultMap" type="org.apache.guacamole.auth.jdbc.permission.ObjectPermissionModel">
<result column="user_id" property="userID" jdbcType="INTEGER"/>
<result column="username" property="username" jdbcType="VARCHAR"/>
<result column="permission" property="type" jdbcType="VARCHAR"
javaType="org.apache.guacamole.net.auth.permission.ObjectPermission$Type"/>
<result column="affected_username" property="objectIdentifier" jdbcType="INTEGER"/>
</resultMap>
<!-- Select all permissions for a given user -->
<select id="select" resultMap="UserPermissionResultMap">
SELECT
[guacamole_user_permission].user_id,
[guacamole_user].username,
permission,
affected.username AS affected_username
FROM [guacamole_user_permission]
JOIN [guacamole_user] ON [guacamole_user_permission].user_id = [guacamole_user].user_id
JOIN [guacamole_user] affected ON [guacamole_user_permission].affected_user_id = affected.user_id
WHERE [guacamole_user_permission].user_id = #{user.objectID,jdbcType=INTEGER}
</select>
<!-- Select the single permission matching the given criteria -->
<select id="selectOne" resultMap="UserPermissionResultMap">
SELECT
[guacamole_user_permission].user_id,
[guacamole_user].username,
permission,
affected.username AS affected_username
FROM [guacamole_user_permission]
JOIN [guacamole_user] ON [guacamole_user_permission].user_id = [guacamole_user].user_id
JOIN [guacamole_user] affected ON [guacamole_user_permission].affected_user_id = affected.user_id
WHERE
[guacamole_user_permission].user_id = #{user.objectID,jdbcType=INTEGER}
AND permission = #{type,jdbcType=VARCHAR}
AND affected.username = #{identifier,jdbcType=INTEGER}
</select>
<!-- Select identifiers accessible by the given user for the given permissions -->
<select id="selectAccessibleIdentifiers" resultType="string">
SELECT DISTINCT username
FROM [guacamole_user_permission]
JOIN [guacamole_user] ON [guacamole_user_permission].affected_user_id = [guacamole_user].user_id
WHERE
[guacamole_user_permission].user_id = #{user.objectID,jdbcType=INTEGER}
AND username IN
<foreach collection="identifiers" item="identifier"
open="(" separator="," close=")">
#{identifier,jdbcType=INTEGER}
</foreach>
AND permission IN
<foreach collection="permissions" item="permission"
open="(" separator="," close=")">
#{permission,jdbcType=VARCHAR}
</foreach>
</select>
<!-- Delete all given permissions -->
<delete id="delete" parameterType="org.apache.guacamole.auth.jdbc.permission.ObjectPermissionModel">
DELETE FROM [guacamole_user_permission]
USING [guacamole_user] affected
WHERE
[guacamole_user_permission].affected_user_id = affected.user_id
AND ([guacamole_user_permission].user_id, permission, affected.username) IN
<foreach collection="permissions" item="permission"
open="(" separator="," close=")">
(#{permission.userID,jdbcType=INTEGER},
#{permission.type,jdbcType=VARCHAR},
#{permission.objectIdentifier,jdbcType=INTEGER})
</foreach>
</delete>
<!-- Insert all given permissions -->
<insert id="insert" parameterType="org.apache.guacamole.auth.jdbc.permission.ObjectPermissionModel">
INSERT INTO [guacamole_user_permission] (
user_id,
permission,
affected_user_id
)
SELECT DISTINCT
permissions.user_id,
permissions.permission,
[guacamole_user].user_id
FROM
<foreach collection="permissions" item="permission"
open="(" separator="UNION ALL" close=")">
SELECT #{permission.userID,jdbcType=INTEGER} AS user_id,
#{permission.type,jdbcType=VARCHAR} AS permission,
#{permission.objectIdentifier,jdbcType=INTEGER} AS username
</foreach>
AS permissions
JOIN [guacamole_user] ON [guacamole_user].username = permissions.username
WHERE NOT EXISTS (SELECT 1 FROM [guacamole_user_permission]
WHERE [guacamole_user_permission].user_id = permissions.user_id
AND [guacamole_user_permission].permission = permissions.permission
AND [guacamole_user_permission].affected_user_id = [guacamole_user].user_id
);
</insert>
</mapper>
|
{
"content_hash": "4af714b1c763e27b69d9f936a2e33aa7",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 115,
"avg_line_length": 45.407407407407405,
"alnum_prop": 0.6285481239804241,
"repo_name": "lato333/guacamole-client",
"id": "453777d0c2b3a00065cb1ca5565fed9ec2c955cc",
"size": "6130",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "extensions/guacamole-auth-jdbc/modules/guacamole-auth-jdbc-sqlserver/src/main/resources/org/apache/guacamole/auth/jdbc/permission/UserPermissionMapper.xml",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "112391"
},
{
"name": "HTML",
"bytes": "64819"
},
{
"name": "Java",
"bytes": "2233955"
},
{
"name": "JavaScript",
"bytes": "1350413"
},
{
"name": "PLSQL",
"bytes": "3281"
},
{
"name": "Roff",
"bytes": "4868"
},
{
"name": "SQLPL",
"bytes": "1187"
},
{
"name": "Shell",
"bytes": "20170"
}
],
"symlink_target": ""
}
|
<?php
namespace NS\SentinelBundle\Form\IBD\Types;
use JMS\TranslationBundle\Translation\TranslationContainerInterface;
use NS\UtilBundle\Form\Types\TranslatableArrayChoice;
class CultureResult extends TranslatableArrayChoice implements TranslationContainerInterface
{
public const
NEGATIVE = 0,
SPN = 1,
HI = 2,
NM = 3,
OTHER = 4,
CONTAMINANT = 5,
UNKNOWN = 99;
protected $values = [
self::NEGATIVE => 'Negative',
self::SPN => 'Spn',
self::HI => 'Hi',
self::NM => 'Nm',
self::OTHER => 'Other',
self::CONTAMINANT => 'Contaminant',
self::UNKNOWN => 'Unknown',
];
}
|
{
"content_hash": "aad9c03d93f99bddb60c9dbaf2a41ccd",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 92,
"avg_line_length": 26.607142857142858,
"alnum_prop": 0.5543624161073826,
"repo_name": "IBVPD/Nuvi",
"id": "f36dee25ea0f5c0efef86bdf6fe02c99ec3ef53e",
"size": "745",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/NS/SentinelBundle/Form/IBD/Types/CultureResult.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3810"
},
{
"name": "HTML",
"bytes": "534456"
},
{
"name": "JavaScript",
"bytes": "8914"
},
{
"name": "PHP",
"bytes": "2146082"
},
{
"name": "PostScript",
"bytes": "402419"
},
{
"name": "Shell",
"bytes": "1503"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import apps.submission.models
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('viewflow', '0006_i18n'),
]
operations = [
migrations.CreateModel(
name='SubmissionProcess',
fields=[
('process_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='viewflow.Process')),
('label', models.CharField(help_text="Give this new submission a label so that you'll recover it easily", max_length=150, verbose_name='Label')),
('archive', models.FileField(upload_to=apps.submission.models.SubmissionProcess.archive_upload_to, verbose_name='Pixels submitted archive')),
('downloaded', models.BooleanField(default=False)),
('uploaded', models.BooleanField(default=False)),
('validated', models.BooleanField(default=False)),
('imported', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
bases=('viewflow.process',),
),
]
|
{
"content_hash": "45e4ae481e5363bc48778855a3002972",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 194,
"avg_line_length": 39.60606060606061,
"alnum_prop": 0.6182096403978576,
"repo_name": "Candihub/pixel",
"id": "e52eaeebe093dbfe4bd019649c180f7fa44ebe19",
"size": "1380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/submission/migrations/0001_initial.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "15017"
},
{
"name": "Dockerfile",
"bytes": "1819"
},
{
"name": "HTML",
"bytes": "58864"
},
{
"name": "JavaScript",
"bytes": "1180"
},
{
"name": "Makefile",
"bytes": "4184"
},
{
"name": "Python",
"bytes": "414705"
},
{
"name": "R",
"bytes": "3817"
},
{
"name": "Shell",
"bytes": "2928"
}
],
"symlink_target": ""
}
|
package softuni.dto.Export;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import softuni.entities.Camera;
import softuni.entities.Lens;
import java.util.Set;
public class PhotographerLandscaperExportDto {
@Expose
@SerializedName(value="FirstName")
private String firstName;
@Expose
@SerializedName(value="LastName")
private String lastName;
@Expose
@SerializedName(value="CameraMake")
private String primaryCameraMake;
@Expose
private Integer LensesCount;
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getPrimaryCameraMake() {
return primaryCameraMake;
}
public void setPrimaryCameraMake(String primaryCameraMake) {
this.primaryCameraMake = primaryCameraMake;
}
public Integer getLensesCount() {
return LensesCount;
}
public void setLensesCount(Integer lensesCount) {
LensesCount = lensesCount;
}
}
|
{
"content_hash": "6a4fdca20d2c95ba8773d081bcceab41",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 64,
"avg_line_length": 21.87719298245614,
"alnum_prop": 0.6928628708901363,
"repo_name": "yangra/SoftUni",
"id": "64a0d1d345f4f4d1369122324b6e8c645bcc36a9",
"size": "1247",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Java-DB-Fundamentals/DatabasesFrameworks-Hibernate&SpringData/EXAMS/photography_workshops_exam/src/main/java/softuni/dto/Export/PhotographerLandscaperExportDto.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "301"
},
{
"name": "Batchfile",
"bytes": "64953"
},
{
"name": "C#",
"bytes": "1237474"
},
{
"name": "CSS",
"bytes": "474654"
},
{
"name": "HTML",
"bytes": "187923"
},
{
"name": "Java",
"bytes": "2815020"
},
{
"name": "JavaScript",
"bytes": "636500"
},
{
"name": "PHP",
"bytes": "72489"
},
{
"name": "SQLPL",
"bytes": "27954"
},
{
"name": "Shell",
"bytes": "84674"
}
],
"symlink_target": ""
}
|
<div class="row">
<div class="col-md-12">
<form action="" method="POST" role="form">
<ul class="timeline">
<li class="time-label">
<span class="bg-grey">Entry Visi</span>
</li>
<li>
<i class="fa fa-arrow-down"></i>
<div class="timeline-item">
<div class="timeline-body">
<?php if( $visi == FALSE) : ?>
<div class="form-group">
<label>Visi</label>
<textarea name="visi" class="form-control" rows="3"><?php echo set_value('visi') ?></textarea>
<p class="help-block"><?php echo form_error('visi', '<small class="text-red">', '</small>'); ?></p>
</div>
<div class="form-group">
<label>Penjabaran Visi</label>
<p class="help-block"><?php echo form_error('penjabaran', '<small class="text-red">', '</small>'); ?></p>
<textarea name="penjabaran" class="form-control summernote"><?php echo set_value('penjabaran') ?></textarea>
</div>
<?php else : ?>
<div class="form-group">
<label>Visi</label>
<textarea name="visi" class="form-control" rows="3"><?php echo (set_value('visi')) ? set_value('visi') : $visi->deskripsi ?></textarea>
<p class="help-block"><?php echo form_error('visi', '<small class="text-red">', '</small>'); ?></p>
</div>
<div class="form-group">
<label>Penjabaran Visi</label>
<p class="help-block"><?php echo form_error('penjabaran', '<small class="text-red">', '</small>'); ?></p>
<textarea name="penjabaran" class="form-control summernote"><?php echo (set_value('penjabaran')) ? set_value('penjabaran') : $visi->penjabaran ?></textarea>
</div>
<?php endif; ?>
<div class="form-group">
<div class="col-md-6 col-md-offset-3">
<button class="btn btn-app pull-right"><i class="fa fa-save"></i> Simpan</button>
</div>
<div class="clearfix"></div>
</div>
</div>
</div>
</li>
</ul>
</form>
</div>
</div>
|
{
"content_hash": "e01548108cde75dfc42f29f2a345016a",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 184,
"avg_line_length": 56.46808510638298,
"alnum_prop": 0.41333835719668427,
"repo_name": "teitra-mega/e-sakip",
"id": "13e02fb71802583ced3700540bb45804fe574c06",
"size": "2654",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "application/views/skpd/vVisi.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "747527"
},
{
"name": "HTML",
"bytes": "3026025"
},
{
"name": "JavaScript",
"bytes": "7506763"
},
{
"name": "PHP",
"bytes": "2582907"
},
{
"name": "Python",
"bytes": "32324"
}
],
"symlink_target": ""
}
|
from server_process_base import BaseXmlRpcProcess
import subprocess
SERVICE_NAME = 'xml_rpc_server'
def service_installed():
exit_code = subprocess.call(['supervisorctl',SERVICE_NAME,'status' ])
return exit_code == 0
class XmlRpcProcess(BaseXmlRpcProcess):
def is_running(self):
running = False
cmd = 'supervisorctl status {}'.format(SERVICE_NAME)
output = subprocess.check_output(cmd.split())
for line in output.splitlines():
line = line.strip()
tmp = line.split()
if tmp:
if tmp[0] == 'STATE':
running = True if tmp[-1] == 'RUNNING' else False
return running
def start(self):
if self.is_running():
self.logger.warning('service already running')
return False
cmd = 'supervisorctl start {}'.format(SERVICE_NAME)
exit_code = subprocess.call(cmd.split())
if exit_code == 0:
self.logger.info('server is running')
else:
self.logger.error('unable to run server')
return exit_code == 0
def stop(self):
if not self.is_running():
self.logger.warning('service already stopped')
return False
cmd = 'supervisorctl start {}'.format(SERVICE_NAME)
exit_code = subprocess.call(cmd.split())
if exit_code == 0:
self.logger.info('server was stopped')
else:
self.logger.warning('unable stop server')
return exit_code == 0
def restart(self):
cmd = 'supervisorctl restart {}'.format(SERVICE_NAME)
exit_code = subprocess.call(cmd.split())
if exit_code == 0:
self.logger.info('server was stopped')
else:
self.logger.warning('unable stop server')
return exit_code == 0
|
{
"content_hash": "4fe8f45759312f6386b05b73ccea6dca",
"timestamp": "",
"source": "github",
"line_count": 58,
"max_line_length": 73,
"avg_line_length": 31.844827586206897,
"alnum_prop": 0.57877639415268,
"repo_name": "davidvoler/ate_meteor",
"id": "c3cec510624a296d13c50f4ea1c5f8914858b55e",
"size": "1847",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xmlrpc/server_process_linux.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4889"
},
{
"name": "JavaScript",
"bytes": "16340"
},
{
"name": "Python",
"bytes": "50989"
}
],
"symlink_target": ""
}
|
package pageblob_test
import (
"bytes"
"context"
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/sas"
"io"
"testing"
"time"
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"github.com/Azure/azure-sdk-for-go/sdk/azcore/streaming"
"github.com/Azure/azure-sdk-for-go/sdk/azcore/to"
"github.com/Azure/azure-sdk-for-go/sdk/internal/recording"
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/blob"
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/bloberror"
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/container"
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/internal/testcommon"
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/pageblob"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
)
func Test(t *testing.T) {
recordMode := recording.GetRecordMode()
t.Logf("Running pageblob Tests in %s mode\n", recordMode)
if recordMode == recording.LiveMode {
suite.Run(t, &PageBlobRecordedTestsSuite{})
suite.Run(t, &PageBlobUnrecordedTestsSuite{})
} else if recordMode == recording.PlaybackMode {
suite.Run(t, &PageBlobRecordedTestsSuite{})
} else if recordMode == recording.RecordingMode {
suite.Run(t, &PageBlobRecordedTestsSuite{})
}
}
func (s *PageBlobRecordedTestsSuite) BeforeTest(suite string, test string) {
testcommon.BeforeTest(s.T(), suite, test)
}
func (s *PageBlobRecordedTestsSuite) AfterTest(suite string, test string) {
testcommon.AfterTest(s.T(), suite, test)
}
func (s *PageBlobUnrecordedTestsSuite) BeforeTest(suite string, test string) {
}
func (s *PageBlobUnrecordedTestsSuite) AfterTest(suite string, test string) {
}
type PageBlobRecordedTestsSuite struct {
suite.Suite
}
type PageBlobUnrecordedTestsSuite struct {
suite.Suite
}
func getPageBlobClient(pageBlobName string, containerClient *container.Client) *pageblob.Client {
return containerClient.NewPageBlobClient(pageBlobName)
}
func createNewPageBlob(ctx context.Context, _require *require.Assertions, pageBlobName string, containerClient *container.Client) *pageblob.Client {
return createNewPageBlobWithSize(ctx, _require, pageBlobName, containerClient, pageblob.PageBytes*10)
}
func createNewPageBlobWithSize(ctx context.Context, _require *require.Assertions, pageBlobName string, containerClient *container.Client, sizeInBytes int64) *pageblob.Client {
pbClient := getPageBlobClient(pageBlobName, containerClient)
_, err := pbClient.Create(ctx, sizeInBytes, nil)
_require.Nil(err)
//_require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
return pbClient
}
func createNewPageBlobWithCPK(ctx context.Context, _require *require.Assertions, pageBlobName string, container *container.Client, sizeInBytes int64, cpkInfo *blob.CpkInfo, cpkScopeInfo *blob.CpkScopeInfo) (pbClient *pageblob.Client) {
pbClient = getPageBlobClient(pageBlobName, container)
_, err := pbClient.Create(ctx, sizeInBytes, &pageblob.CreateOptions{
CpkInfo: cpkInfo,
CpkScopeInfo: cpkScopeInfo,
})
_require.Nil(err)
// _require.Equal(resp.RawResponse.StatusCode, 201)
return
}
func (s *PageBlobRecordedTestsSuite) TestPutGetPages() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
offset, count := int64(0), int64(1024)
reader, _ := testcommon.GenerateData(1024)
putResp, err := pbClient.UploadPages(context.Background(), reader, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: count,
},
})
_require.Nil(err)
_require.NotNil(putResp.LastModified)
_require.Equal((*putResp.LastModified).IsZero(), false)
_require.NotNil(putResp.ETag)
_require.Nil(putResp.ContentMD5)
_require.Equal(*putResp.BlobSequenceNumber, int64(0))
_require.NotNil(*putResp.RequestID)
_require.NotNil(*putResp.Version)
_require.NotNil(putResp.Date)
_require.Equal((*putResp.Date).IsZero(), false)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{
Range: blob.HTTPRange{
Count: 1023,
},
})
for pager.More() {
pageListResp, err := pager.NextPage(context.Background())
_require.Nil(err)
_require.NotNil(pageListResp.LastModified)
_require.Equal((*pageListResp.LastModified).IsZero(), false)
_require.NotNil(pageListResp.ETag)
_require.Equal(*pageListResp.BlobContentLength, int64(512*10))
_require.NotNil(*pageListResp.RequestID)
_require.NotNil(*pageListResp.Version)
_require.NotNil(pageListResp.Date)
_require.Equal((*pageListResp.Date).IsZero(), false)
_require.NotNil(pageListResp.PageList)
pageRangeResp := pageListResp.PageList.PageRange
_require.Len(pageRangeResp, 1)
rawStart, rawEnd := rawPageRange((pageRangeResp)[0])
_require.Equal(rawStart, offset)
_require.Equal(rawEnd, count-1)
if err != nil {
break
}
}
}
//func (s *PageBlobUnrecordedTestsSuite) TestUploadPagesFromURL() {
// _require := require.New(s.T())
// testName := s.T().Name()
// svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
// if err != nil {
// _require.Fail("Unable to fetch service client because " + err.Error())
// }
//
// containerName := testcommon.GenerateContainerName(testName)
// containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// contentSize := 4 * 1024 * 1024 // 4MB
// r, sourceData := getRandomDataAndReader(contentSize)
// srcBlob := createNewPageBlobWithSize(_require, "srcblob", containerClient, int64(contentSize))
// destBlob := createNewPageBlobWithSize(_require, "dstblob", containerClient, int64(contentSize))
//
// offset, _, count := int64(0), int64(contentSize-1), int64(contentSize)
// uploadSrcResp1, err := srcBlob.UploadPages(context.Background(), streaming.NopCloser(r), &pageblob.UploadPagesOptions{
// Offset: to.Ptr(offset),
// Count: to.Ptr(count),
// }
// _require.Nil(err)
// _require.NotNil(uploadSrcResp1.LastModified)
// _require.Equal((*uploadSrcResp1.LastModified).IsZero(), false)
// _require.NotNil(uploadSrcResp1.ETag)
// _require.Nil(uploadSrcResp1.ContentMD5)
// _require.Equal(*uploadSrcResp1.BlobSequenceNumber, int64(0))
// _require.NotNil(*uploadSrcResp1.RequestID)
// _require.NotNil(*uploadSrcResp1.Version)
// _require.NotNil(uploadSrcResp1.Date)
// _require.Equal((*uploadSrcResp1.Date).IsZero(), false)
//
// // Get source pbClient URL with SAS for UploadPagesFromURL.
// credential, err := getGenericCredential(nil, testcommon.TestAccountDefault)
// _require.Nil(err)
// srcBlobParts, _ := NewBlobURLParts(srcBlob.URL())
//
// srcBlobParts.SAS, err = BlobSASSignatureValues{
// Protocol: SASProtocolHTTPS, // Users MUST use HTTPS (not HTTP)
// ExpiryTime: time.Now().UTC().Add(48 * time.Hour), // 48-hours before expiration
// ContainerName: srcBlobParts.ContainerName,
// BlobName: srcBlobParts.BlobName,
// Permissions: BlobSASPermissions{Read: true}.String(),
// }.Sign(credential)
// if err != nil {
// _require.Error(err)
// }
//
// srcBlobURLWithSAS := srcBlobParts.URL()
//
// // Upload page from URL.
// pResp1, err := destBlob.UploadPagesFromURL(ctx, srcBlobURLWithSAS, 0, 0, int64(contentSize), nil)
// _require.Nil(err)
// // _require.Equal(pResp1.RawResponse.StatusCode, 201)
// _require.NotNil(pResp1.ETag)
// _require.NotNil(pResp1.LastModified)
// _require.NotNil(pResp1.ContentMD5)
// _require.NotNil(pResp1.RequestID)
// _require.NotNil(pResp1.Version)
// _require.NotNil(pResp1.Date)
// _require.Equal((*pResp1.Date).IsZero(), false)
//
// // Check data integrity through downloading.
// downloadResp, err := destBlob.Download(ctx, nil)
// _require.Nil(err)
// destData, err := io.ReadAll(downloadResp.BodyReader(&blob.RetryReaderOptions{}))
// _require.Nil(err)
// _require.EqualValues(destData, sourceData)
//}
//
//func (s *PageBlobUnrecordedTestsSuite) TestUploadPagesFromURLWithMD5() {
// _require := require.New(s.T())
// testName := s.T().Name()
// svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
// if err != nil {
// _require.Fail("Unable to fetch service client because " + err.Error())
// }
//
// containerName := testcommon.GenerateContainerName(testName)
// containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// contentSize := 4 * 1024 * 1024 // 4MB
// r, sourceData := getRandomDataAndReader(contentSize)
// md5Value := md5.Sum(sourceData)
// contentMD5 := md5Value[:]
// ctx := ctx // Use default Background context
// srcBlob := createNewPageBlobWithSize(_require, "srcblob", containerClient, int64(contentSize))
// destBlob := createNewPageBlobWithSize(_require, "dstblob", containerClient, int64(contentSize))
//
// // Prepare source pbClient for copy.
// offset, _, count := int64(0), int64(contentSize-1), int64(contentSize)
// uploadPagesOptions := pageblob.UploadPagesOptions{Offset: to.Ptr(int64(offset)), Count: to.Ptr(int64(count)),}
// _, err = srcBlob.UploadPages(context.Background(), streaming.NopCloser(r), &uploadPagesOptions)
// _require.Nil(err)
// // _require.Equal(uploadSrcResp1.RawResponse.StatusCode, 201)
//
// // Get source pbClient URL with SAS for UploadPagesFromURL.
// credential, err := getGenericCredential(nil, testcommon.TestAccountDefault)
// _require.Nil(err)
// srcBlobParts, _ := NewBlobURLParts(srcBlob.URL())
//
// srcBlobParts.SAS, err = azblob.BlobSASSignatureValues{
// Protocol: SASProtocolHTTPS, // Users MUST use HTTPS (not HTTP)
// ExpiryTime: time.Now().UTC().Add(48 * time.Hour), // 48-hours before expiration
// ContainerName: srcBlobParts.ContainerName,
// BlobName: srcBlobParts.BlobName,
// Permissions: BlobSASPermissions{Read: true}.String(),
// }.Sign(credential)
// if err != nil {
// _require.Error(err)
// }
//
// srcBlobURLWithSAS := srcBlobParts.URL()
//
// // Upload page from URL with MD5.
// uploadPagesFromURLOptions := pageblob.UploadPagesFromURLOptions{
// SourceContentMD5: contentMD5,
// }
// pResp1, err := destBlob.UploadPagesFromURL(ctx, srcBlobURLWithSAS, 0, 0, int64(contentSize), &uploadPagesFromURLOptions)
// _require.Nil(err)
// // _require.Equal(pResp1.RawResponse.StatusCode, 201)
// _require.NotNil(pResp1.ETag)
// _require.NotNil(pResp1.LastModified)
// _require.NotNil(pResp1.ContentMD5)
// _require.EqualValues(pResp1.ContentMD5, contentMD5)
// _require.NotNil(pResp1.RequestID)
// _require.NotNil(pResp1.Version)
// _require.NotNil(pResp1.Date)
// _require.Equal((*pResp1.Date).IsZero(), false)
// _require.Equal(*pResp1.BlobSequenceNumber, int64(0))
//
// // Check data integrity through downloading.
// downloadResp, err := destBlob.Download(ctx, nil)
// _require.Nil(err)
// destData, err := io.ReadAll(downloadResp.BodyReader(&blob.RetryReaderOptions{}))
// _require.Nil(err)
// _require.EqualValues(destData, sourceData)
//
// // Upload page from URL with bad MD5
// _, badMD5 := getRandomDataAndReader(16)
// badContentMD5 := badMD5[:]
// uploadPagesFromURLOptions = pageblob.UploadPagesFromURLOptions{
// SourceContentMD5: badContentMD5,
// }
// _, err = destBlob.UploadPagesFromURL(ctx, srcBlobURLWithSAS, 0, 0, int64(contentSize), &uploadPagesFromURLOptions)
// _require.NotNil(err)
//
// testcommon.ValidateBlobErrorCode(_require, err, bloberror.MD5Mismatch)
//}
func (s *PageBlobUnrecordedTestsSuite) TestClearDiffPages() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
contentSize := 2 * 1024
r := testcommon.GetReaderToGeneratedBytes(contentSize)
_, err = pbClient.UploadPages(context.Background(), r, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: int64(contentSize),
},
})
_require.Nil(err)
snapshotResp, err := pbClient.CreateSnapshot(context.Background(), nil)
_require.Nil(err)
r1 := testcommon.GetReaderToGeneratedBytes(contentSize)
_, err = pbClient.UploadPages(context.Background(), r1, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Offset: int64(contentSize),
Count: int64(contentSize),
},
})
_require.Nil(err)
pager := pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
Range: blob.HTTPRange{
Count: 4096,
},
PrevSnapshot: snapshotResp.Snapshot,
})
for pager.More() {
pageListResp, err := pager.NextPage(context.Background())
_require.Nil(err)
pageRangeResp := pageListResp.PageList.PageRange
_require.NotNil(pageRangeResp)
_require.Len(pageRangeResp, 1)
rawStart, rawEnd := rawPageRange((pageRangeResp)[0])
_require.Equal(rawStart, int64(2048))
_require.Equal(rawEnd, int64(4095))
if err != nil {
break
}
}
_, err = pbClient.ClearPages(context.Background(), blob.HTTPRange{Offset: 2048, Count: 2048}, nil)
_require.Nil(err)
pager = pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
Range: blob.HTTPRange{
Count: 4096,
},
PrevSnapshot: snapshotResp.Snapshot,
})
for pager.More() {
pageListResp, err := pager.NextPage(context.Background())
_require.Nil(err)
pageRangeResp := pageListResp.PageList.PageRange
_require.Len(pageRangeResp, 0)
if err != nil {
break
}
}
}
func waitForIncrementalCopy(_require *require.Assertions, copyBlobClient *pageblob.Client, blobCopyResponse *pageblob.CopyIncrementalResponse) *string {
status := *blobCopyResponse.CopyStatus
var getPropertiesAndMetadataResult blob.GetPropertiesResponse
// Wait for the copy to finish
start := time.Now()
for status != blob.CopyStatusTypeSuccess {
getPropertiesAndMetadataResult, _ = copyBlobClient.GetProperties(context.Background(), nil)
status = *getPropertiesAndMetadataResult.CopyStatus
currentTime := time.Now()
if currentTime.Sub(start) >= time.Minute {
_require.Fail("")
}
}
return getPropertiesAndMetadataResult.DestinationSnapshot
}
func (s *PageBlobUnrecordedTestsSuite) TestIncrementalCopy() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
_, err = containerClient.SetAccessPolicy(context.Background(), nil, &container.SetAccessPolicyOptions{Access: to.Ptr(container.PublicAccessTypeBlob)})
_require.Nil(err)
srcBlob := createNewPageBlob(context.Background(), _require, "src"+testcommon.GenerateBlobName(testName), containerClient)
contentSize := 1024
r := testcommon.GetReaderToGeneratedBytes(contentSize)
offset, count := int64(0), int64(contentSize)
_, err = srcBlob.UploadPages(context.Background(), r, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Offset: offset,
Count: count,
},
})
_require.Nil(err)
snapshotResp, err := srcBlob.CreateSnapshot(context.Background(), nil)
_require.Nil(err)
dstBlob := containerClient.NewPageBlobClient("dst" + testcommon.GenerateBlobName(testName))
resp, err := dstBlob.StartCopyIncremental(context.Background(), srcBlob.URL(), *snapshotResp.Snapshot, nil)
_require.Nil(err)
_require.NotNil(resp.LastModified)
_require.Equal((*resp.LastModified).IsZero(), false)
_require.NotNil(resp.ETag)
_require.NotEqual(*resp.RequestID, "")
_require.NotEqual(*resp.Version, "")
_require.NotNil(resp.Date)
_require.Equal((*resp.Date).IsZero(), false)
_require.NotEqual(*resp.CopyID, "")
_require.Equal(*resp.CopyStatus, blob.CopyStatusTypePending)
waitForIncrementalCopy(_require, dstBlob, &resp)
}
func (s *PageBlobRecordedTestsSuite) TestResizePageBlob() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
_, err = pbClient.Resize(context.Background(), 2048, nil)
_require.Nil(err)
_, err = pbClient.Resize(context.Background(), 8192, nil)
_require.Nil(err)
resp2, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
_require.Equal(*resp2.ContentLength, int64(8192))
}
func (s *PageBlobRecordedTestsSuite) TestPageSequenceNumbers() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
sequenceNumber := int64(0)
actionType := pageblob.SequenceNumberActionTypeIncrement
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
sequenceNumber = int64(7)
actionType = pageblob.SequenceNumberActionTypeMax
updateSequenceNumberPageBlob = pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
updateSequenceNumberPageBlob = pageblob.UpdateSequenceNumberOptions{
SequenceNumber: to.Ptr(int64(11)),
ActionType: to.Ptr(pageblob.SequenceNumberActionTypeUpdate),
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
}
//func (s *PageBlobUnrecordedTestsSuite) TestPutPagesWithMD5() {
// _require := require.New(s.T())
// testName := s.T().Name()
// svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
// if err != nil {
// _require.Fail("Unable to fetch service client because " + err.Error())
// }
//
// containerName := testcommon.GenerateContainerName(testName)
// containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// blobName := testcommon.GenerateBlobName(testName)
// pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
//
// // put page with valid MD5
// contentSize := 1024
// readerToBody, body := getRandomDataAndReader(contentSize)
// offset, _, count := int64(0), int64(0)+int64(contentSize-1), int64(contentSize)
// md5Value := md5.Sum(body)
// _ = body
// contentMD5 := md5Value[:]
//
// putResp, err := pbClient.UploadPages(context.Background(), streaming.NopCloser(readerToBody), &pageblob.UploadPagesOptions{
// Offset: to.Ptr(offset),
// Count: to.Ptr(count),
// TransactionalContentMD5: contentMD5,
// })
// _require.Nil(err)
// // _require.Equal(putResp.RawResponse.StatusCode, 201)
// _require.NotNil(putResp.LastModified)
// _require.Equal((*putResp.LastModified).IsZero(), false)
// _require.NotNil(putResp.ETag)
// _require.NotNil(putResp.ContentMD5)
// _require.EqualValues(putResp.ContentMD5, contentMD5)
// _require.Equal(*putResp.BlobSequenceNumber, int64(0))
// _require.NotNil(*putResp.RequestID)
// _require.NotNil(*putResp.Version)
// _require.NotNil(putResp.Date)
// _require.Equal((*putResp.Date).IsZero(), false)
//
// // put page with bad MD5
// readerToBody, _ = getRandomDataAndReader(1024)
// _, badMD5 := getRandomDataAndReader(16)
// basContentMD5 := badMD5[:]
// putResp, err = pbClient.UploadPages(context.Background(), streaming.NopCloser(readerToBody), &pageblob.UploadPagesOptions{
// Offset: to.Ptr(offset),
// Count: to.Ptr(count),
// TransactionalContentMD5: basContentMD5,
// })
// _require.NotNil(err)
//
// testcommon.ValidateBlobErrorCode(_require, err, bloberror.MD5Mismatch)
//}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageSizeInvalid() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
sequenceNumber := int64(0)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
}
_, err = pbClient.Create(context.Background(), 1, &createPageBlobOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.InvalidHeaderValue)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageSequenceInvalid() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
sequenceNumber := int64(-1)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.NotNil(err)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageMetadataNonEmpty() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
sequenceNumber := int64(0)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
Metadata: testcommon.BasicMetadata,
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.Nil(err)
resp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
_require.NotNil(resp.Metadata)
_require.EqualValues(resp.Metadata, testcommon.BasicMetadata)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageMetadataEmpty() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
sequenceNumber := int64(0)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
Metadata: map[string]string{},
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.Nil(err)
resp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
_require.Nil(resp.Metadata)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageMetadataInvalid() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
sequenceNumber := int64(0)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
Metadata: map[string]string{"In valid1": "bar"},
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.NotNil(err)
_require.Contains(err.Error(), testcommon.InvalidHeaderErrorSubstring)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageHTTPHeaders() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
sequenceNumber := int64(0)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
HTTPHeaders: &testcommon.BasicHeaders,
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.Nil(err)
resp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
h := blob.ParseHTTPHeaders(resp)
_require.EqualValues(h, testcommon.BasicHeaders)
}
func validatePageBlobPut(_require *require.Assertions, pbClient *pageblob.Client) {
resp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
_require.NotNil(resp.Metadata)
_require.EqualValues(resp.Metadata, testcommon.BasicMetadata)
_require.EqualValues(blob.ParseHTTPHeaders(resp), testcommon.BasicHeaders)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageIfModifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResp, err := pbClient.Create(context.Background(), pageblob.PageBytes, nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResp.Date, -10)
sequenceNumber := int64(0)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
Metadata: testcommon.BasicMetadata,
HTTPHeaders: &testcommon.BasicHeaders,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
},
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.Nil(err)
validatePageBlobPut(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageIfModifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResp, err := pbClient.Create(context.Background(), pageblob.PageBytes, nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResp.Date, 10)
sequenceNumber := int64(0)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
Metadata: testcommon.BasicMetadata,
HTTPHeaders: &testcommon.BasicHeaders,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
},
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageIfUnmodifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResp, err := pbClient.Create(context.Background(), pageblob.PageBytes, nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResp.Date, 10)
sequenceNumber := int64(0)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
Metadata: testcommon.BasicMetadata,
HTTPHeaders: &testcommon.BasicHeaders,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
},
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.Nil(err)
validatePageBlobPut(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageIfUnmodifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResp, err := pbClient.Create(context.Background(), pageblob.PageBytes, nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResp.Date, -10)
sequenceNumber := int64(0)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
Metadata: testcommon.BasicMetadata,
HTTPHeaders: &testcommon.BasicHeaders,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
},
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageIfMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
resp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
sequenceNumber := int64(0)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
Metadata: testcommon.BasicMetadata,
HTTPHeaders: &testcommon.BasicHeaders,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: resp.ETag,
},
},
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.Nil(err)
validatePageBlobPut(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageIfMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
sequenceNumber := int64(0)
eTag := azcore.ETag("garbage")
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
Metadata: testcommon.BasicMetadata,
HTTPHeaders: &testcommon.BasicHeaders,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: &eTag,
},
},
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageIfNoneMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
sequenceNumber := int64(0)
eTag := azcore.ETag("garbage")
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
Metadata: testcommon.BasicMetadata,
HTTPHeaders: &testcommon.BasicHeaders,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: &eTag,
},
},
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.Nil(err)
validatePageBlobPut(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobCreatePageIfNoneMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
resp, _ := pbClient.GetProperties(context.Background(), nil)
sequenceNumber := int64(0)
createPageBlobOptions := pageblob.CreateOptions{
SequenceNumber: &sequenceNumber,
Metadata: testcommon.BasicMetadata,
HTTPHeaders: &testcommon.BasicHeaders,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: resp.ETag,
},
},
}
_, err = pbClient.Create(context.Background(), pageblob.PageBytes, &createPageBlobOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobUnrecordedTestsSuite) TestBlobPutPagesInvalidRange() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
contentSize := 1024
r := testcommon.GetReaderToGeneratedBytes(contentSize)
uploadPagesOptions := pageblob.UploadPagesOptions{Range: blob.HTTPRange{Count: int64(contentSize / 2)}}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.NotNil(err)
}
//// Body cannot be nil check already added in the request preparer
////func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesNilBody() {
//// svcClient := testcommon.GetServiceClient()
//// containerClient, _ := createNewContainer(c, svcClient)
//// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//// pbClient, _ := createNewPageBlob(c, containerClient)
////
//// _, err := pbClient.UploadPages(context.Background(), nil, nil)
//// _require.NotNil(err)
////}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesEmptyBody() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
r := bytes.NewReader([]byte{})
_, err = pbClient.UploadPages(context.Background(), streaming.NopCloser(r), nil)
_require.NotNil(err)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesNonExistentBlob() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
uploadPagesOptions := pageblob.UploadPagesOptions{Range: blob.HTTPRange{Count: pageblob.PageBytes}}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.BlobNotFound)
}
func validateUploadPages(_require *require.Assertions, pbClient *pageblob.Client) {
// This will only validate a single put page at 0-PageBlobPageBytes-1
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{})
for pager.More() {
pageListResp, err := pager.NextPage(context.Background())
_require.Nil(err)
start, end := int64(0), int64(pageblob.PageBytes-1)
rawStart, rawEnd := *(pageListResp.PageList.PageRange[0].Start), *(pageListResp.PageList.PageRange[0].End)
_require.Equal(rawStart, start)
_require.Equal(rawEnd, end)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfModifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, -10)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
_, err = pbClient.UploadPages(context.Background(), r, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
},
})
_require.Nil(err)
validateUploadPages(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfModifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, 10)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
_, err = pbClient.UploadPages(context.Background(), r, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
},
})
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfUnmodifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, 10)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
_, err = pbClient.UploadPages(context.Background(), r, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
},
})
_require.Nil(err)
validateUploadPages(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfUnmodifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, -10)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
_, err = pbClient.UploadPages(context.Background(), r, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
},
})
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
resp, _ := pbClient.GetProperties(context.Background(), nil)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
_, err = pbClient.UploadPages(context.Background(), r, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: resp.ETag,
},
},
})
_require.Nil(err)
validateUploadPages(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
eTag := azcore.ETag("garbage")
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: &eTag,
},
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfNoneMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
eTag := azcore.ETag("garbage")
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: &eTag,
},
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
validateUploadPages(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfNoneMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
resp, _ := pbClient.GetProperties(context.Background(), nil)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: resp.ETag,
},
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfSequenceNumberLessThanTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
ifSequenceNumberLessThan := int64(10)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThan: &ifSequenceNumberLessThan,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
validateUploadPages(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfSequenceNumberLessThanFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
sequenceNumber := int64(10)
actionType := pageblob.SequenceNumberActionTypeUpdate
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
ifSequenceNumberLessThan := int64(1)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThan: &ifSequenceNumberLessThan,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.SequenceNumberConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfSequenceNumberLessThanNegOne() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
ifSequenceNumberLessThanOrEqualTo := int64(-1)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThanOrEqualTo: &ifSequenceNumberLessThanOrEqualTo,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.InvalidInput)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfSequenceNumberLTETrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
sequenceNumber := int64(1)
actionType := pageblob.SequenceNumberActionTypeUpdate
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
ifSequenceNumberLessThanOrEqualTo := int64(1)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThanOrEqualTo: &ifSequenceNumberLessThanOrEqualTo,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
validateUploadPages(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfSequenceNumberLTEqualFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
sequenceNumber := int64(10)
actionType := pageblob.SequenceNumberActionTypeUpdate
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
ifSequenceNumberLessThanOrEqualTo := int64(1)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThanOrEqualTo: &ifSequenceNumberLessThanOrEqualTo,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.SequenceNumberConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfSequenceNumberLTENegOne() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
ifSequenceNumberLessThanOrEqualTo := int64(-1)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThanOrEqualTo: &ifSequenceNumberLessThanOrEqualTo,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.NotNil(err)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfSequenceNumberEqualTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
sequenceNumber := int64(1)
actionType := pageblob.SequenceNumberActionTypeUpdate
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
ifSequenceNumberEqualTo := int64(1)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberEqualTo: &ifSequenceNumberEqualTo,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
validateUploadPages(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfSequenceNumberEqualFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
ifSequenceNumberEqualTo := int64(1)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberEqualTo: &ifSequenceNumberEqualTo,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.SequenceNumberConditionNotMet)
}
//func (s *PageBlobRecordedTestsSuite) TestBlobPutPagesIfSequenceNumberEqualNegOne() {
// _require := require.New(s.T())
// testName := s.T().Name()
//// svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
// if err != nil {
// _require.Fail("Unable to fetch service client because " + err.Error())
// }
//
// containerName := testcommon.GenerateContainerName(testName)
// containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// blobName := testcommon.GenerateBlobName(testName)
// pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
//
// r, _ := testcommon.GenerateData(pageblob.PageBytes)
// offset, count := int64(0), int64(pageblob.PageBytes)
// ifSequenceNumberEqualTo := int64(-1)
// uploadPagesOptions := pageblob.UploadPagesOptions{
// Offset: to.Ptr(int64(offset)), Count: to.Ptr(int64(count)),
// SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
// IfSequenceNumberEqualTo: &ifSequenceNumberEqualTo,
// },
// }
// _, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions) // This will cause the library to set the value of the header to 0
// _require.Nil(err)
//}
func setupClearPagesTest(t *testing.T, _require *require.Assertions, testName string) (*container.Client, *pageblob.Client) {
svcClient, err := testcommon.GetServiceClient(t, testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
return containerClient, pbClient
}
func validateClearPagesTest(_require *require.Assertions, pbClient *pageblob.Client) {
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{})
for pager.More() {
pageListResp, err := pager.NextPage(context.Background())
_require.Nil(err)
_require.Nil(pageListResp.PageRange)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesInvalidRange() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
_, err := pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes + 1}, nil)
_require.NotNil(err)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfModifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
getPropertiesResp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(getPropertiesResp.Date, -10)
_, err = pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &pageblob.ClearPagesOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
}})
_require.Nil(err)
validateClearPagesTest(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfModifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
getPropertiesResp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(getPropertiesResp.Date, 10)
_, err = pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &pageblob.ClearPagesOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
},
})
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfUnmodifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
getPropertiesResp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(getPropertiesResp.Date, 10)
_, err = pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &pageblob.ClearPagesOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
},
})
_require.Nil(err)
validateClearPagesTest(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfUnmodifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
getPropertiesResp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(getPropertiesResp.Date, -10)
_, err = pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &pageblob.ClearPagesOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
},
})
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
getPropertiesResp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
clearPageOptions := pageblob.ClearPagesOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: getPropertiesResp.ETag,
},
},
}
_, err = pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions)
_require.Nil(err)
validateClearPagesTest(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
eTag := azcore.ETag("garbage")
clearPageOptions := pageblob.ClearPagesOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: &eTag,
},
},
}
_, err := pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfNoneMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
eTag := azcore.ETag("garbage")
clearPageOptions := pageblob.ClearPagesOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: &eTag,
},
},
}
_, err := pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions)
_require.Nil(err)
validateClearPagesTest(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfNoneMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
resp, _ := pbClient.GetProperties(context.Background(), nil)
clearPageOptions := pageblob.ClearPagesOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: resp.ETag,
},
},
}
_, err := pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfSequenceNumberLessThanTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
ifSequenceNumberLessThan := int64(10)
clearPageOptions := pageblob.ClearPagesOptions{
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThan: &ifSequenceNumberLessThan,
},
}
_, err := pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions)
_require.Nil(err)
validateClearPagesTest(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfSequenceNumberLessThanFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
sequenceNumber := int64(10)
actionType := pageblob.SequenceNumberActionTypeUpdate
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err := pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
ifSequenceNumberLessThan := int64(1)
clearPageOptions := pageblob.ClearPagesOptions{
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThan: &ifSequenceNumberLessThan,
},
}
_, err = pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.SequenceNumberConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfSequenceNumberLessThanNegOne() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
ifSequenceNumberLessThan := int64(-1)
clearPageOptions := pageblob.ClearPagesOptions{
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThan: &ifSequenceNumberLessThan,
},
}
_, err := pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.InvalidInput)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfSequenceNumberLTETrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
ifSequenceNumberLessThanOrEqualTo := int64(10)
clearPageOptions := pageblob.ClearPagesOptions{
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThanOrEqualTo: &ifSequenceNumberLessThanOrEqualTo,
},
}
_, err := pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions)
_require.Nil(err)
validateClearPagesTest(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfSequenceNumberLTEFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
sequenceNumber := int64(10)
actionType := pageblob.SequenceNumberActionTypeUpdate
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err := pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
ifSequenceNumberLessThanOrEqualTo := int64(1)
clearPageOptions := pageblob.ClearPagesOptions{
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThanOrEqualTo: &ifSequenceNumberLessThanOrEqualTo,
},
}
_, err = pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.SequenceNumberConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfSequenceNumberLTENegOne() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
ifSequenceNumberLessThanOrEqualTo := int64(-1)
clearPageOptions := pageblob.ClearPagesOptions{
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberLessThanOrEqualTo: &ifSequenceNumberLessThanOrEqualTo,
},
}
_, err := pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions) // This will cause the library to set the value of the header to 0
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.InvalidInput)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfSequenceNumberEqualTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
sequenceNumber := int64(10)
actionType := pageblob.SequenceNumberActionTypeUpdate
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err := pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
ifSequenceNumberEqualTo := int64(10)
clearPageOptions := pageblob.ClearPagesOptions{
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberEqualTo: &ifSequenceNumberEqualTo,
},
}
_, err = pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions)
_require.Nil(err)
validateClearPagesTest(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfSequenceNumberEqualFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
sequenceNumber := int64(10)
actionType := pageblob.SequenceNumberActionTypeUpdate
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err := pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
ifSequenceNumberEqualTo := int64(1)
clearPageOptions := pageblob.ClearPagesOptions{
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberEqualTo: &ifSequenceNumberEqualTo,
},
}
_, err = pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.SequenceNumberConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobClearPagesIfSequenceNumberEqualNegOne() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupClearPagesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
ifSequenceNumberEqualTo := int64(-1)
clearPageOptions := pageblob.ClearPagesOptions{
SequenceNumberAccessConditions: &pageblob.SequenceNumberAccessConditions{
IfSequenceNumberEqualTo: &ifSequenceNumberEqualTo,
},
}
_, err := pbClient.ClearPages(context.Background(), blob.HTTPRange{Count: pageblob.PageBytes}, &clearPageOptions) // This will cause the library to set the value of the header to 0
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.InvalidInput)
}
func setupGetPageRangesTest(t *testing.T, _require *require.Assertions, testName string) (containerClient *container.Client, pbClient *pageblob.Client) {
svcClient, err := testcommon.GetServiceClient(t, testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient = testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient = createNewPageBlob(context.Background(), _require, blobName, containerClient)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
return
}
func validateBasicGetPageRanges(_require *require.Assertions, resp pageblob.PageList, err error) {
_require.Nil(err)
_require.NotNil(resp.PageRange)
_require.Len(resp.PageRange, 1)
start, end := int64(0), int64(pageblob.PageBytes-1)
rawStart, rawEnd := rawPageRange((resp.PageRange)[0])
_require.Equal(rawStart, start)
_require.Equal(rawEnd, end)
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesEmptyBlob() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{})
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
_require.Nil(resp.PageRange)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesEmptyRange() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{})
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
_require.Nil(err)
validateBasicGetPageRanges(_require, resp.PageList, err)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesInvalidRange() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{
Range: blob.HTTPRange{
Offset: -2,
Count: 500,
},
})
for pager.More() {
_, err := pager.NextPage(context.Background())
_require.Nil(err)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesNonContiguousRanges() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
offset, count := int64(2*pageblob.PageBytes), int64(pageblob.PageBytes)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Offset: offset,
Count: count,
},
}
_, err := pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{})
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
pageListResp := resp.PageList.PageRange
_require.NotNil(pageListResp)
_require.Len(pageListResp, 2)
start, end := int64(0), int64(pageblob.PageBytes-1)
rawStart, rawEnd := rawPageRange(pageListResp[0])
_require.Equal(rawStart, start)
_require.Equal(rawEnd, end)
start, end = int64(pageblob.PageBytes*2), int64((pageblob.PageBytes*3)-1)
rawStart, rawEnd = rawPageRange(pageListResp[1])
_require.Equal(rawStart, start)
_require.Equal(rawEnd, end)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesNotPageAligned() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{
Range: blob.HTTPRange{
Count: 2000,
},
})
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
validateBasicGetPageRanges(_require, resp.PageList, err)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesSnapshot() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
resp, err := pbClient.CreateSnapshot(context.Background(), nil)
_require.Nil(err)
_require.NotNil(resp.Snapshot)
snapshotURL, _ := pbClient.WithSnapshot(*resp.Snapshot)
pager := snapshotURL.NewGetPageRangesPager(nil)
for pager.More() {
resp2, err := pager.NextPage(context.Background())
_require.Nil(err)
validateBasicGetPageRanges(_require, resp2.PageList, err)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesIfModifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
getPropertiesResp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(getPropertiesResp.Date, -10)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
}})
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
validateBasicGetPageRanges(_require, resp.PageList, err)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesIfModifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
getPropertiesResp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(getPropertiesResp.Date, 10)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
}})
for pager.More() {
_, err := pager.NextPage(context.Background())
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesIfUnmodifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
getPropertiesResp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(getPropertiesResp.Date, 10)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
}})
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
validateBasicGetPageRanges(_require, resp.PageList, err)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesIfUnmodifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
getPropertiesResp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
currentTime := testcommon.GetRelativeTimeFromAnchor(getPropertiesResp.Date, -10)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
}})
for pager.More() {
_, err := pager.NextPage(context.Background())
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesIfMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
resp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: resp.ETag,
},
}})
for pager.More() {
resp2, err := pager.NextPage(context.Background())
_require.Nil(err)
validateBasicGetPageRanges(_require, resp2.PageList, err)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesIfMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: to.Ptr(azcore.ETag("garbage")),
},
}})
for pager.More() {
_, err := pager.NextPage(context.Background())
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesIfNoneMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: to.Ptr(azcore.ETag("garbage")),
},
}})
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
validateBasicGetPageRanges(_require, resp.PageList, err)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobGetPageRangesIfNoneMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient := setupGetPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
resp, _ := pbClient.GetProperties(context.Background(), nil)
pager := pbClient.NewGetPageRangesPager(&pageblob.GetPageRangesOptions{AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: resp.ETag,
},
}})
for pager.More() {
_, err := pager.NextPage(context.Background())
_require.NotNil(err)
if err != nil {
break
}
}
//serr := err.(StorageError)
//_require.(serr.RawResponse.StatusCode, chk.Equals, 304) // Service Code not returned in the body for a HEAD
}
func setupDiffPageRangesTest(t *testing.T, _require *require.Assertions, testName string) (containerClient *container.Client, pbClient *pageblob.Client, snapshot string) {
svcClient, err := testcommon.GetServiceClient(t, testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient = testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient = createNewPageBlob(context.Background(), _require, blobName, containerClient)
r := testcommon.GetReaderToGeneratedBytes(pageblob.PageBytes)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
resp, err := pbClient.CreateSnapshot(context.Background(), nil)
_require.Nil(err)
snapshot = *resp.Snapshot
r = testcommon.GetReaderToGeneratedBytes(pageblob.PageBytes)
uploadPagesOptions = pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
}
_, err = pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
return
}
func rawPageRange(pr *pageblob.PageRange) (start, end int64) {
if pr.Start != nil {
start = *pr.Start
}
if pr.End != nil {
end = *pr.End
}
return
}
func validateDiffPageRanges(_require *require.Assertions, resp pageblob.PageList, err error) {
_require.Nil(err)
_require.NotNil(resp.PageRange)
_require.Len(resp.PageRange, 1)
rawStart, rawEnd := rawPageRange(resp.PageRange[0])
_require.EqualValues(rawStart, int64(0))
_require.EqualValues(rawEnd, int64(pageblob.PageBytes-1))
}
func (s *PageBlobUnrecordedTestsSuite) TestBlobDiffPageRangesNonExistentSnapshot() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient, snapshot := setupDiffPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
snapshotTime, _ := time.Parse(blob.SnapshotTimeFormat, snapshot)
snapshotTime = snapshotTime.Add(time.Minute)
pager := pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
PrevSnapshot: to.Ptr(snapshotTime.Format(blob.SnapshotTimeFormat))})
for pager.More() {
_, err := pager.NextPage(context.Background())
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.PreviousSnapshotNotFound)
if err != nil {
break
}
}
}
func (s *PageBlobUnrecordedTestsSuite) TestBlobDiffPageRangeInvalidRange() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient, snapshot := setupDiffPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pager := pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
Range: blob.HTTPRange{
Count: 14,
Offset: -22,
},
Snapshot: &snapshot,
})
for pager.More() {
_, err := pager.NextPage(context.Background())
_require.Nil(err)
if err != nil {
break
}
}
}
func (s *PageBlobUnrecordedTestsSuite) TestBlobDiffPageRangeIfModifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient, snapshot := setupDiffPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
currentTime := testcommon.GetRelativeTimeGMT(-10)
pager := pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
Snapshot: to.Ptr(snapshot),
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{IfModifiedSince: ¤tTime}},
})
for pager.More() {
resp2, err := pager.NextPage(context.Background())
_require.Nil(err)
validateDiffPageRanges(_require, resp2.PageList, err)
if err != nil {
break
}
}
}
func (s *PageBlobUnrecordedTestsSuite) TestBlobDiffPageRangeIfModifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient, snapshot := setupDiffPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
currentTime := testcommon.GetRelativeTimeGMT(10)
pager := pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
Snapshot: to.Ptr(snapshot),
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
},
})
for pager.More() {
_, err := pager.NextPage(context.Background())
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
if err != nil {
break
}
}
}
func (s *PageBlobUnrecordedTestsSuite) TestBlobDiffPageRangeIfUnmodifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient, snapshot := setupDiffPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
currentTime := testcommon.GetRelativeTimeGMT(10)
pager := pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
Snapshot: to.Ptr(snapshot),
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{IfUnmodifiedSince: ¤tTime},
},
})
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
validateDiffPageRanges(_require, resp.PageList, err)
if err != nil {
break
}
}
}
func (s *PageBlobUnrecordedTestsSuite) TestBlobDiffPageRangeIfUnmodifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient, snapshot := setupDiffPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
currentTime := testcommon.GetRelativeTimeGMT(-10)
pager := pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
Snapshot: to.Ptr(snapshot),
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{IfUnmodifiedSince: ¤tTime},
},
})
for pager.More() {
_, err := pager.NextPage(context.Background())
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
if err != nil {
break
}
}
}
//func (s *PageBlobUnrecordedTestsSuite) TestBlobDiffPageRangeIfMatchTrue() {
// _require := require.New(s.T())
// testName := s.T().Name()
// containerClient, pbClient, snapshot := setupDiffPageRangesTest(s.T(), _require, testName)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// resp, err := pbClient.GetProperties(context.Background(), nil)
// _require.Nil(err)
//
// pager := pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
// Snapshot: to.Ptr(snapshot),
// LeaseAccessConditions: &blob.LeaseAccessConditions{
// ModifiedAccessConditions: &blob.ModifiedAccessConditions{
// IfMatch: resp.ETag,
// },
// },
// })
// for pager.More() {
// resp2, err := pager.NextPage(context.Background())
// _require.Nil(err)
// validateDiffPageRanges(_require, resp2.PageList, err)
// if err != nil {
// break
// }
// }
//}
func (s *PageBlobUnrecordedTestsSuite) TestBlobDiffPageRangeIfMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient, snapshotStr := setupDiffPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pager := pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
Snapshot: to.Ptr(snapshotStr),
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: to.Ptr(azcore.ETag("garbage")),
},
}})
for pager.More() {
_, err := pager.NextPage(context.Background())
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
if err != nil {
break
}
}
}
func (s *PageBlobUnrecordedTestsSuite) TestBlobDiffPageRangeIfNoneMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient, snapshotStr := setupDiffPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pager := pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
PrevSnapshot: to.Ptr(snapshotStr),
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: to.Ptr(azcore.ETag("garbage")),
},
}})
for pager.More() {
resp2, err := pager.NextPage(context.Background())
_require.Nil(err)
validateDiffPageRanges(_require, resp2.PageList, err)
if err != nil {
break
}
}
}
func (s *PageBlobUnrecordedTestsSuite) TestBlobDiffPageRangeIfNoneMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
containerClient, pbClient, snapshot := setupDiffPageRangesTest(s.T(), _require, testName)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
resp, _ := pbClient.GetProperties(context.Background(), nil)
pager := pbClient.NewGetPageRangesDiffPager(&pageblob.GetPageRangesDiffOptions{
PrevSnapshot: to.Ptr(snapshot),
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{IfNoneMatch: resp.ETag},
},
})
for pager.More() {
_, err := pager.NextPage(context.Background())
_require.NotNil(err)
if err != nil {
break
}
}
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeZero() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
// The default pbClient is created with size > 0, so this should actually update
_, err = pbClient.Resize(context.Background(), 0, nil)
_require.Nil(err)
resp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
_require.Equal(*resp.ContentLength, int64(0))
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeInvalidSizeNegative() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
_, err = pbClient.Resize(context.Background(), -4, nil)
_require.NotNil(err)
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeInvalidSizeMisaligned() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
_, err = pbClient.Resize(context.Background(), 12, nil)
_require.NotNil(err)
}
func validateResize(_require *require.Assertions, pbClient *pageblob.Client) {
resp, _ := pbClient.GetProperties(context.Background(), nil)
_require.Equal(*resp.ContentLength, int64(pageblob.PageBytes))
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeIfModifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, -10)
resizePageBlobOptions := pageblob.ResizeOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
},
}
_, err = pbClient.Resize(context.Background(), pageblob.PageBytes, &resizePageBlobOptions)
_require.Nil(err)
validateResize(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeIfModifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, 10)
resizePageBlobOptions := pageblob.ResizeOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
},
}
_, err = pbClient.Resize(context.Background(), pageblob.PageBytes, &resizePageBlobOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeIfUnmodifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, 10)
resizePageBlobOptions := pageblob.ResizeOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
},
}
_, err = pbClient.Resize(context.Background(), pageblob.PageBytes, &resizePageBlobOptions)
_require.Nil(err)
validateResize(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeIfUnmodifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, -10)
resizePageBlobOptions := pageblob.ResizeOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
},
}
_, err = pbClient.Resize(context.Background(), pageblob.PageBytes, &resizePageBlobOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeIfMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
resp, _ := pbClient.GetProperties(context.Background(), nil)
resizePageBlobOptions := pageblob.ResizeOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: resp.ETag,
},
},
}
_, err = pbClient.Resize(context.Background(), pageblob.PageBytes, &resizePageBlobOptions)
_require.Nil(err)
validateResize(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeIfMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
eTag := azcore.ETag("garbage")
resizePageBlobOptions := pageblob.ResizeOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: &eTag,
},
},
}
_, err = pbClient.Resize(context.Background(), pageblob.PageBytes, &resizePageBlobOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeIfNoneMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
eTag := azcore.ETag("garbage")
resizePageBlobOptions := pageblob.ResizeOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: &eTag,
},
},
}
_, err = pbClient.Resize(context.Background(), pageblob.PageBytes, &resizePageBlobOptions)
_require.Nil(err)
validateResize(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeIfNoneMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
resp, _ := pbClient.GetProperties(context.Background(), nil)
resizePageBlobOptions := pageblob.ResizeOptions{
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: resp.ETag,
},
},
}
_, err = pbClient.Resize(context.Background(), pageblob.PageBytes, &resizePageBlobOptions)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobSetSequenceNumberActionTypeInvalid() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
sequenceNumber := int64(1)
actionType := pageblob.SequenceNumberActionType("garbage")
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.InvalidHeaderValue)
}
func (s *PageBlobRecordedTestsSuite) TestBlobSetSequenceNumberSequenceNumberInvalid() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
defer func() { // Invalid sequence number should panic
_ = recover()
}()
sequenceNumber := int64(-1)
actionType := pageblob.SequenceNumberActionTypeUpdate
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
SequenceNumber: &sequenceNumber,
ActionType: &actionType,
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.InvalidHeaderValue)
}
func validateSequenceNumberSet(_require *require.Assertions, pbClient *pageblob.Client) {
resp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
_require.Equal(*resp.BlobSequenceNumber, int64(1))
}
func (s *PageBlobRecordedTestsSuite) TestBlobSetSequenceNumberIfModifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, -10)
actionType := pageblob.SequenceNumberActionTypeIncrement
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
ActionType: &actionType,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
},
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
validateSequenceNumberSet(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobSetSequenceNumberIfModifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, 10)
actionType := pageblob.SequenceNumberActionTypeIncrement
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
ActionType: &actionType,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfModifiedSince: ¤tTime,
},
},
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobSetSequenceNumberIfUnmodifiedSinceTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, 10)
actionType := pageblob.SequenceNumberActionTypeIncrement
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
ActionType: &actionType,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
},
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
validateSequenceNumberSet(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobSetSequenceNumberIfUnmodifiedSinceFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := getPageBlobClient(blobName, containerClient)
pageBlobCreateResponse, err := pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
// _require.Equal(pageBlobCreateResponse.RawResponse.StatusCode, 201)
_require.NotNil(pageBlobCreateResponse.Date)
currentTime := testcommon.GetRelativeTimeFromAnchor(pageBlobCreateResponse.Date, -10)
actionType := pageblob.SequenceNumberActionTypeIncrement
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
ActionType: &actionType,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfUnmodifiedSince: ¤tTime,
},
},
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobSetSequenceNumberIfMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
resp, _ := pbClient.GetProperties(context.Background(), nil)
actionType := pageblob.SequenceNumberActionTypeIncrement
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
ActionType: &actionType,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: resp.ETag,
},
},
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
validateSequenceNumberSet(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestPageSetImmutabilityPolicy() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountImmutable, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
currentTime, err := time.Parse(time.UnixDate, "Fri Jun 11 20:00:00 GMT 2049")
_require.Nil(err)
policy := blob.ImmutabilityPolicySetting(blob.ImmutabilityPolicySettingUnlocked)
_require.Nil(err)
setImmutabilityPolicyOptions := &blob.SetImmutabilityPolicyOptions{
Mode: &policy,
ModifiedAccessConditions: nil,
}
_, err = pbClient.SetImmutabilityPolicy(context.Background(), currentTime, setImmutabilityPolicyOptions)
_require.Nil(err)
_, err = pbClient.SetLegalHold(context.Background(), false, nil)
_require.Nil(err)
_, err = pbClient.Delete(context.Background(), nil)
_require.NotNil(err)
_, err = pbClient.DeleteImmutabilityPolicy(context.Background(), nil)
_require.Nil(err)
_, err = pbClient.Delete(context.Background(), nil)
_require.Nil(err)
}
func (s *PageBlobRecordedTestsSuite) TestPageDeleteImmutabilityPolicy() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountImmutable, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
currentTime, err := time.Parse(time.UnixDate, "Fri Jun 11 20:00:00 GMT 2049")
_require.Nil(err)
policy := blob.ImmutabilityPolicySetting(blob.ImmutabilityPolicySettingUnlocked)
_require.Nil(err)
setImmutabilityPolicyOptions := &blob.SetImmutabilityPolicyOptions{
Mode: &policy,
ModifiedAccessConditions: nil,
}
_, err = pbClient.SetImmutabilityPolicy(context.Background(), currentTime, setImmutabilityPolicyOptions)
_require.Nil(err)
_, err = pbClient.DeleteImmutabilityPolicy(context.Background(), nil)
_require.Nil(err)
_, err = pbClient.Delete(context.Background(), nil)
_require.Nil(err)
}
func (s *PageBlobRecordedTestsSuite) TestPageSetLegalHold() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountImmutable, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
_, err = pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
_, err = pbClient.SetLegalHold(context.Background(), true, nil)
_require.Nil(err)
// should fail since time has not passed yet
_, err = pbClient.Delete(context.Background(), nil)
_require.NotNil(err)
_, err = pbClient.SetLegalHold(context.Background(), false, nil)
_require.Nil(err)
_, err = pbClient.Delete(context.Background(), nil)
_require.Nil(err)
}
func (s *PageBlobRecordedTestsSuite) TestBlobSetSequenceNumberIfMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
eTag := azcore.ETag("garbage")
actionType := pageblob.SequenceNumberActionTypeIncrement
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
ActionType: &actionType,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfMatch: &eTag,
},
},
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
func (s *PageBlobRecordedTestsSuite) TestBlobSetSequenceNumberIfNoneMatchTrue() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, "src"+blobName, containerClient)
eTag := azcore.ETag("garbage")
actionType := pageblob.SequenceNumberActionTypeIncrement
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
ActionType: &actionType,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: &eTag,
},
},
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.Nil(err)
validateSequenceNumberSet(_require, pbClient)
}
func (s *PageBlobRecordedTestsSuite) TestBlobSetSequenceNumberIfNoneMatchFalse() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, "src"+blobName, containerClient)
resp, _ := pbClient.GetProperties(context.Background(), nil)
actionType := pageblob.SequenceNumberActionTypeIncrement
updateSequenceNumberPageBlob := pageblob.UpdateSequenceNumberOptions{
ActionType: &actionType,
AccessConditions: &blob.AccessConditions{
ModifiedAccessConditions: &blob.ModifiedAccessConditions{
IfNoneMatch: resp.ETag,
},
},
}
_, err = pbClient.UpdateSequenceNumber(context.Background(), &updateSequenceNumberPageBlob)
_require.NotNil(err)
testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
}
//func setupStartIncrementalCopyTest(_require *require.Assertions, testName string) (containerClient *container.Client,
// pbClient *pageblob.Client, copyPBClient *pageblob.Client, snapshot string) {
//// var recording *testframework.Recording
// if _context != nil {
// recording = _context.recording
// }
// svcClient, err := testcommon.GetServiceClient(recording, testcommon.TestAccountDefault, nil)
// if err != nil {
// _require.Fail("Unable to fetch service client because " + err.Error())
// }
//
// containerName := testcommon.GenerateContainerName(testName)
// containerClient = testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// accessType := container.PublicAccessTypeBlob
// setAccessPolicyOptions := container.SetAccessPolicyOptions{
// Access: &accessType,
// }
// _, err = containerClient.SetAccessPolicy(context.Background(), &setAccessPolicyOptions)
// _require.Nil(err)
//
// pbClient = createNewPageBlob(context.Background(), _require, testcommon.GenerateBlobName(testName), containerClient)
// resp, _ := pbClient.CreateSnapshot(context.Background(), nil)
//
// copyPBClient = getPageBlobClient("copy"+testcommon.GenerateBlobName(testName), containerClient)
//
// // Must create the incremental copy pbClient so that the access conditions work on it
// resp2, err := copyPBClient.StartCopyIncremental(context.Background(), pbClient.URL(), *resp.Snapshot, nil)
// _require.Nil(err)
// waitForIncrementalCopy(_require, copyPBClient, &resp2)
//
// resp, _ = pbClient.CreateSnapshot(context.Background(), nil) // Take a new snapshot so the next copy will succeed
// snapshot = *resp.Snapshot
// return
//}
//func validateIncrementalCopy(_require *require.Assertions, copyPBClient *pageblob.Client, resp *pageblob.CopyIncrementalResponse) {
// t := waitForIncrementalCopy(_require, copyPBClient, resp)
//
// // If we can access the snapshot without error, we are satisfied that it was created as a result of the copy
// copySnapshotURL, err := copyPBClient.WithSnapshot(*t)
// _require.Nil(err)
// _, err = copySnapshotURL.GetProperties(context.Background(), nil)
// _require.Nil(err)
//}
//func (s *PageBlobRecordedTestsSuite) TestBlobStartIncrementalCopySnapshotNotExist() {
// _require := require.New(s.T())
// testName := s.T().Name()
//// svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
// if err != nil {
// _require.Fail("Unable to fetch service client because " + err.Error())
// }
//
// containerName := testcommon.GenerateContainerName(testName)
// containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// blobName := testcommon.GenerateBlobName(testName)
// pbClient := createNewPageBlob(context.Background(), _require, "src"+blobName, containerClient)
// copyPBClient := getPageBlobClient("dst"+blobName, containerClient)
//
// snapshot := time.Now().UTC().Format(blob.SnapshotTimeFormat)
// _, err = copyPBClient.StartCopyIncremental(context.Background(), pbClient.URL(), snapshot, nil)
// _require.NotNil(err)
//
// testcommon.ValidateBlobErrorCode(_require, err, bloberror.CannotVerifyCopySource)
//}
//func (s *PageBlobRecordedTestsSuite) TestBlobStartIncrementalCopyIfModifiedSinceTrue() {
// _require := require.New(s.T())
// testName := s.T().Name()
// containerClient, pbClient, copyPBClient, snapshot := setupStartIncrementalCopyTest(_require, testName)
//
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// currentTime := testcommon.GetRelativeTimeGMT(-20)
//
// copyIncrementalPageBlobOptions := pageblob.CopyIncrementalOptions{
// ModifiedAccessConditions: &blob.ModifiedAccessConditions{
// IfModifiedSince: ¤tTime,
// },
// }
// resp, err := copyPBClient.StartCopyIncremental(context.Background(), pbClient.URL(), snapshot, ©IncrementalPageBlobOptions)
// _require.Nil(err)
//
// validateIncrementalCopy(_require, copyPBClient, &resp)
//}
//
//func (s *PageBlobRecordedTestsSuite) TestBlobStartIncrementalCopyIfModifiedSinceFalse() {
// _require := require.New(s.T())
// testName := s.T().Name()
// containerClient, pbClient, copyPBClient, snapshot := setupStartIncrementalCopyTest(_require, testName)
//
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// currentTime := testcommon.GetRelativeTimeGMT(20)
//
// copyIncrementalPageBlobOptions := pageblob.CopyIncrementalOptions{
// ModifiedAccessConditions: &blob.ModifiedAccessConditions{
// IfModifiedSince: ¤tTime,
// },
// }
// _, err := copyPBClient.StartCopyIncremental(context.Background(), pbClient.URL(), snapshot, ©IncrementalPageBlobOptions)
// _require.NotNil(err)
//
// testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
//}
//
//func (s *PageBlobRecordedTestsSuite) TestBlobStartIncrementalCopyIfUnmodifiedSinceTrue() {
// _require := require.New(s.T())
// testName := s.T().Name()
// containerClient, pbClient, copyPBClient, snapshot := setupStartIncrementalCopyTest(_require, testName)
//
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// currentTime := testcommon.GetRelativeTimeGMT(20)
//
// copyIncrementalPageBlobOptions := pageblob.CopyIncrementalOptions{
// ModifiedAccessConditions: &blob.ModifiedAccessConditions{
// IfUnmodifiedSince: ¤tTime,
// },
// }
// resp, err := copyPBClient.StartCopyIncremental(context.Background(), pbClient.URL(), snapshot, ©IncrementalPageBlobOptions)
// _require.Nil(err)
//
// validateIncrementalCopy(_require, copyPBClient, &resp)
//}
//
//func (s *PageBlobRecordedTestsSuite) TestBlobStartIncrementalCopyIfUnmodifiedSinceFalse() {
// _require := require.New(s.T())
// testName := s.T().Name()
// containerClient, pbClient, copyPBClient, snapshot := setupStartIncrementalCopyTest(_require, testName)
//
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// currentTime := testcommon.GetRelativeTimeGMT(-20)
//
// copyIncrementalPageBlobOptions := pageblob.CopyIncrementalOptions{
// ModifiedAccessConditions: &blob.ModifiedAccessConditions{
// IfUnmodifiedSince: ¤tTime,
// },
// }
// _, err := copyPBClient.StartCopyIncremental(context.Background(), pbClient.URL(), snapshot, ©IncrementalPageBlobOptions)
// _require.NotNil(err)
//
// testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
//}
//
//func (s *PageBlobUnrecordedTestsSuite) TestBlobStartIncrementalCopyIfMatchTrue() {
// _require := require.New(s.T())
// testName := s.T().Name()
// containerClient, pbClient, copyPBClient, snapshot := setupStartIncrementalCopyTest(_require, testName)
// resp, _ := copyPBClient.GetProperties(context.Background(), nil)
//
// copyIncrementalPageBlobOptions := pageblob.CopyIncrementalOptions{
// ModifiedAccessConditions: &blob.ModifiedAccessConditions{
// IfMatch: resp.ETag,
// },
// }
// resp2, err := copyPBClient.StartCopyIncremental(context.Background(), pbClient.URL(), snapshot, ©IncrementalPageBlobOptions)
// _require.Nil(err)
//
// validateIncrementalCopy(_require, copyPBClient, &resp2)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//}
//
//func (s *PageBlobUnrecordedTestsSuite) TestBlobStartIncrementalCopyIfMatchFalse() {
// _require := require.New(s.T())
// testName := s.T().Name()
// containerClient, pbClient, copyPBClient, snapshot := setupStartIncrementalCopyTest(_require, testName)
//
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// eTag := "garbage"
// copyIncrementalPageBlobOptions := pageblob.CopyIncrementalOptions{
// ModifiedAccessConditions: &blob.ModifiedAccessConditions{
// IfMatch: &eTag,
// },
// }
// _, err := copyPBClient.StartCopyIncremental(context.Background(), pbClient.URL(), snapshot, ©IncrementalPageBlobOptions)
// _require.NotNil(err)
//
// testcommon.ValidateBlobErrorCode(_require, err, bloberror.TargetConditionNotMet)
//}
//func (s *PageBlobUnrecordedTestsSuite) TestBlobStartIncrementalCopyIfNoneMatchTrue() {
// _require := require.New(s.T())
// testName := s.T().Name()
// containerClient, pbClient, copyPBClient, snapshot := setupStartIncrementalCopyTest(_require, testName)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// eTag := "garbage"
// copyIncrementalPageBlobOptions := pageblob.CopyIncrementalOptions{
// ModifiedAccessConditions: &blob.ModifiedAccessConditions{
// IfNoneMatch: &eTag,
// },
// }
// resp, err := copyPBClient.StartCopyIncremental(context.Background(), pbClient.URL(), snapshot, ©IncrementalPageBlobOptions)
// _require.Nil(err)
//
// validateIncrementalCopy(_require, copyPBClient, &resp)
//}
//func (s *PageBlobUnrecordedTestsSuite) TestBlobStartIncrementalCopyIfNoneMatchFalse() {
// _require := require.New(s.T())
// testName := s.T().Name()
// containerClient, pbClient, copyPBClient, snapshot := setupStartIncrementalCopyTest(_require, testName)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// resp, _ := copyPBClient.GetProperties(context.Background(), nil)
//
// copyIncrementalPageBlobOptions := pageblob.CopyIncrementalOptions{
// ModifiedAccessConditions: &blob.ModifiedAccessConditions{
// IfNoneMatch: resp.ETag,
// },
// }
// _, err := copyPBClient.StartCopyIncremental(context.Background(), pbClient.URL(), snapshot, ©IncrementalPageBlobOptions)
// _require.NotNil(err)
//
// testcommon.ValidateBlobErrorCode(_require, err, bloberror.ConditionNotMet)
//}
func setAndCheckPageBlobTier(_require *require.Assertions, pbClient *pageblob.Client, tier blob.AccessTier) {
_, err := pbClient.SetTier(context.Background(), tier, nil)
_require.Nil(err)
resp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
_require.Equal(*resp.AccessTier, string(tier))
}
func (s *PageBlobRecordedTestsSuite) TestBlobSetTierAllTiersOnPageBlob() {
_require := require.New(s.T())
testName := s.T().Name()
premiumServiceClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountPremium, nil)
_require.NoError(err)
premContainerName := "prem" + testcommon.GenerateContainerName(testName)
premContainerClient := testcommon.CreateNewContainer(context.Background(), _require, premContainerName, premiumServiceClient)
defer testcommon.DeleteContainer(context.Background(), _require, premContainerClient)
pbName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, pbName, premContainerClient)
possibleTiers := []blob.AccessTier{
blob.AccessTierP4,
blob.AccessTierP6,
blob.AccessTierP10,
blob.AccessTierP20,
blob.AccessTierP30,
blob.AccessTierP40,
blob.AccessTierP50,
}
for _, possibleTier := range possibleTiers {
setAndCheckPageBlobTier(_require, pbClient, possibleTier)
}
}
func (s *PageBlobUnrecordedTestsSuite) TestPageBlockWithCPK() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, testcommon.GenerateContainerName(testName), svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
contentSize := 4 * 1024 * 1024 // 4MB
r, srcData := testcommon.GenerateData(contentSize)
pbName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlobWithCPK(context.Background(), _require, pbName, containerClient, int64(contentSize), &testcommon.TestCPKByValue, nil)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: int64(contentSize),
},
CpkInfo: &testcommon.TestCPKByValue,
}
uploadResp, err := pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
// _require.Equal(uploadResp.RawResponse.StatusCode, 201)
_require.EqualValues(uploadResp.EncryptionKeySHA256, testcommon.TestCPKByValue.EncryptionKeySHA256)
pager := pbClient.NewGetPageRangesPager(nil)
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
pageListResp := resp.PageList.PageRange
start, end := int64(0), int64(contentSize-1)
rawStart, rawEnd := rawPageRange(pageListResp[0])
_require.Equal(rawStart, start)
_require.Equal(rawEnd, end)
if err != nil {
break
}
}
// Get blob content without encryption key should fail the request.
_, err = pbClient.DownloadStream(context.Background(), nil)
_require.NotNil(err)
downloadBlobOptions := blob.DownloadStreamOptions{
CpkInfo: &testcommon.TestInvalidCPKByValue,
}
_, err = pbClient.DownloadStream(context.Background(), &downloadBlobOptions)
_require.NotNil(err)
// Download blob to do data integrity check.
downloadBlobOptions = blob.DownloadStreamOptions{
CpkInfo: &testcommon.TestCPKByValue,
}
downloadResp, err := pbClient.DownloadStream(context.Background(), &downloadBlobOptions)
_require.Nil(err)
destData, err := io.ReadAll(downloadResp.Body)
_require.Nil(err)
_require.EqualValues(destData, srcData)
_require.EqualValues(*downloadResp.EncryptionKeySHA256, *testcommon.TestCPKByValue.EncryptionKeySHA256)
}
func (s *PageBlobUnrecordedTestsSuite) TestPageBlockWithCPKScope() {
_require := require.New(s.T())
testName := s.T().Name()
encryptionScope := testcommon.GetCPKScopeInfo(s.T())
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, testcommon.GenerateContainerName(testName)+"01", svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
contentSize := 4 * 1024 * 1024 // 4MB
r, srcData := testcommon.GenerateData(contentSize)
pbName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlobWithCPK(context.Background(), _require, pbName, containerClient, int64(contentSize), nil, &encryptionScope)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: int64(contentSize),
},
CpkScopeInfo: &encryptionScope,
}
uploadResp, err := pbClient.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
// _require.Equal(uploadResp.RawResponse.StatusCode, 201)
_require.EqualValues(*encryptionScope.EncryptionScope, *uploadResp.EncryptionScope)
pager := pbClient.NewGetPageRangesPager(nil)
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
pageListResp := resp.PageList.PageRange
start, end := int64(0), int64(contentSize-1)
rawStart, rawEnd := rawPageRange(pageListResp[0])
_require.Equal(rawStart, start)
_require.Equal(rawEnd, end)
if err != nil {
break
}
}
// Download blob to do data integrity check.
downloadBlobOptions := blob.DownloadStreamOptions{
CpkScopeInfo: &encryptionScope,
}
downloadResp, err := pbClient.DownloadStream(context.Background(), &downloadBlobOptions)
_require.Nil(err)
destData, err := io.ReadAll(downloadResp.Body)
_require.Nil(err)
_require.EqualValues(destData, srcData)
_require.EqualValues(*downloadResp.EncryptionScope, *encryptionScope.EncryptionScope)
}
func (s *PageBlobUnrecordedTestsSuite) TestCreatePageBlobWithTags() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, testcommon.GenerateContainerName(testName), svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pbClient := createNewPageBlob(context.Background(), _require, "src"+testcommon.GenerateBlobName(testName), containerClient)
putResp, err := pbClient.UploadPages(context.Background(), testcommon.GetReaderToGeneratedBytes(1024), &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: 1024,
},
})
_require.Nil(err)
//_require.Equal(putResp.RawResponse.StatusCode, 201)
_require.Equal(putResp.LastModified.IsZero(), false)
_require.NotEqual(putResp.ETag, "")
_require.NotEqual(putResp.Version, "")
_, err = pbClient.SetTags(context.Background(), testcommon.BasicBlobTagsMap, nil)
_require.Nil(err)
//_require.Equal(setTagResp.RawResponse.StatusCode, 204)
gpResp, err := pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
_require.NotNil(gpResp)
_require.Equal(*gpResp.TagCount, int64(len(testcommon.BasicBlobTagsMap)))
blobGetTagsResponse, err := pbClient.GetTags(context.Background(), nil)
_require.Nil(err)
// _require.Equal(blobGetTagsResponse.RawResponse.StatusCode, 200)
blobTagsSet := blobGetTagsResponse.BlobTagSet
_require.NotNil(blobTagsSet)
_require.Len(blobTagsSet, len(testcommon.BasicBlobTagsMap))
for _, blobTag := range blobTagsSet {
_require.Equal(testcommon.BasicBlobTagsMap[*blobTag.Key], *blobTag.Value)
}
modifiedBlobTags := map[string]string{
"a0z1u2r3e4": "b0l1o2b3",
"b0l1o2b3": "s0d1k2",
}
_, err = pbClient.SetTags(context.Background(), modifiedBlobTags, nil)
_require.Nil(err)
//_require.Equal(setTagResp.RawResponse.StatusCode, 204)
gpResp, err = pbClient.GetProperties(context.Background(), nil)
_require.Nil(err)
_require.NotNil(gpResp)
_require.Equal(*gpResp.TagCount, int64(len(modifiedBlobTags)))
blobGetTagsResponse, err = pbClient.GetTags(context.Background(), nil)
_require.Nil(err)
// _require.Equal(blobGetTagsResponse.RawResponse.StatusCode, 200)
blobTagsSet = blobGetTagsResponse.BlobTagSet
_require.NotNil(blobTagsSet)
_require.Len(blobTagsSet, len(modifiedBlobTags))
for _, blobTag := range blobTagsSet {
_require.Equal(modifiedBlobTags[*blobTag.Key], *blobTag.Value)
}
}
func (s *PageBlobUnrecordedTestsSuite) TestPageBlobSetBlobTagForSnapshot() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, testcommon.GenerateContainerName(testName), svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pbClient := createNewPageBlob(context.Background(), _require, testcommon.GenerateBlobName(testName), containerClient)
_, err = pbClient.SetTags(context.Background(), testcommon.SpecialCharBlobTagsMap, nil)
_require.Nil(err)
resp, err := pbClient.CreateSnapshot(context.Background(), nil)
_require.Nil(err)
snapshotURL, _ := pbClient.WithSnapshot(*resp.Snapshot)
resp2, err := snapshotURL.GetProperties(context.Background(), nil)
_require.Nil(err)
_require.Equal(*resp2.TagCount, int64(len(testcommon.SpecialCharBlobTagsMap)))
blobGetTagsResponse, err := pbClient.GetTags(context.Background(), nil)
_require.Nil(err)
// _require.Equal(blobGetTagsResponse.RawResponse.StatusCode, 200)
blobTagsSet := blobGetTagsResponse.BlobTagSet
_require.NotNil(blobTagsSet)
_require.Len(blobTagsSet, len(testcommon.SpecialCharBlobTagsMap))
for _, blobTag := range blobTagsSet {
_require.Equal(testcommon.SpecialCharBlobTagsMap[*blobTag.Key], *blobTag.Value)
}
}
func (s *PageBlobRecordedTestsSuite) TestCreatePageBlobReturnsVID() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pbClob := createNewPageBlob(context.Background(), _require, testcommon.GenerateBlobName(testName), containerClient)
const contentSize = 1 * 1024
r, _ := testcommon.GenerateData(contentSize)
uploadPagesOptions := pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: contentSize,
},
}
putResp, err := pbClob.UploadPages(context.Background(), r, &uploadPagesOptions)
_require.Nil(err)
//_require.Equal(putResp.RawResponse.StatusCode, 201)
_require.Equal(putResp.LastModified.IsZero(), false)
_require.NotNil(putResp.ETag)
_require.NotEqual(*putResp.Version, "")
gpResp, err := pbClob.GetProperties(context.Background(), nil)
_require.Nil(err)
_require.NotNil(gpResp)
}
func (s *PageBlobRecordedTestsSuite) TestBlobResizeWithCPK() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.NoError(err)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, testcommon.GenerateContainerName(testName)+"01", svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pbName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlobWithCPK(context.Background(), _require, pbName, containerClient, pageblob.PageBytes*10, &testcommon.TestCPKByValue, nil)
resizePageBlobOptions := pageblob.ResizeOptions{
CpkInfo: &testcommon.TestCPKByValue,
}
_, err = pbClient.Resize(context.Background(), pageblob.PageBytes, &resizePageBlobOptions)
_require.Nil(err)
getBlobPropertiesOptions := blob.GetPropertiesOptions{
CpkInfo: &testcommon.TestCPKByValue,
}
resp, _ := pbClient.GetProperties(context.Background(), &getBlobPropertiesOptions)
_require.Equal(*resp.ContentLength, int64(pageblob.PageBytes))
}
func (s *PageBlobRecordedTestsSuite) TestPageBlockPermanentDelete() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountSoftDelete, nil)
_require.Nil(err)
// Create container and blob, upload blob to container
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
count := int64(1024)
reader, _ := testcommon.GenerateData(1024)
_, err = pbClient.UploadPages(context.Background(), reader, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: count,
},
})
_require.Nil(err)
parts, err := sas.ParseURL(pbClient.URL()) // Get parts for BlobURL
_require.Nil(err)
credential, err := testcommon.GetGenericCredential(testcommon.TestAccountDefault)
_require.Nil(err)
// Set Account SAS and set Permanent Delete to true
parts.SAS, err = sas.AccountSignatureValues{
Protocol: sas.ProtocolHTTPS, // Users MUST use HTTPS (not HTTP)
ExpiryTime: time.Now().UTC().Add(48 * time.Hour), // 48-hours before expiration
Permissions: to.Ptr(sas.AccountPermissions{Read: true, List: true, PermanentDelete: true}).String(),
Services: to.Ptr(sas.AccountServices{Blob: true}).String(),
ResourceTypes: to.Ptr(sas.AccountResourceTypes{Container: true, Object: true}).String(),
}.SignWithSharedKey(credential)
_require.Nil(err)
// Create snapshot of Blob and get snapshot URL
resp, err := pbClient.CreateSnapshot(context.Background(), &blob.CreateSnapshotOptions{})
_require.Nil(err)
snapshotURL, _ := pbClient.WithSnapshot(*resp.Snapshot)
// Check that there are two items in the container: one snapshot, one blob
pager := containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{Include: container.ListBlobsInclude{Snapshots: true}})
found := make([]*container.BlobItem, 0)
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
found = append(found, resp.Segment.BlobItems...)
if err != nil {
break
}
}
_require.Len(found, 2)
// Delete snapshot (snapshot will be soft deleted)
deleteSnapshotsOnly := blob.DeleteSnapshotsOptionTypeOnly
_, err = pbClient.Delete(context.Background(), &blob.DeleteOptions{DeleteSnapshots: &deleteSnapshotsOnly})
_require.Nil(err)
// Check that only blob exists (snapshot is soft-deleted)
pager = containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{
Include: container.ListBlobsInclude{Snapshots: true},
})
found = make([]*container.BlobItem, 0)
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
found = append(found, resp.Segment.BlobItems...)
if err != nil {
break
}
}
_require.Len(found, 1)
// Check that soft-deleted snapshot exists by including deleted items
pager = containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{
Include: container.ListBlobsInclude{Snapshots: true, Deleted: true},
})
found = make([]*container.BlobItem, 0)
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
found = append(found, resp.Segment.BlobItems...)
if err != nil {
break
}
}
_require.Len(found, 2)
// Options for PermanentDeleteOptions
perm := blob.DeleteTypePermanent
deleteBlobOptions := blob.DeleteOptions{
BlobDeleteType: &perm,
}
time.Sleep(time.Second * 30)
// Execute Delete with DeleteTypePermanent
pdResp, err := snapshotURL.Delete(context.Background(), &deleteBlobOptions)
_require.Nil(err)
_require.NotNil(pdResp)
_require.NotNil(pdResp)
// Check that only blob exists even after including snapshots and deleted items
pager = containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{
Include: container.ListBlobsInclude{Snapshots: true, Deleted: true}})
found = make([]*container.BlobItem, 0)
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
found = append(found, resp.Segment.BlobItems...)
if err != nil {
break
}
}
_require.Len(found, 1)
}
func (s *PageBlobRecordedTestsSuite) TestPageBlockPermanentDeleteWithoutPermission() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
_require.Nil(err)
// Create container and blob, upload blob to container
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
blobName := testcommon.GenerateBlobName(testName)
pbClient := createNewPageBlob(context.Background(), _require, blobName, containerClient)
count := int64(1024)
reader, _ := testcommon.GenerateData(1024)
_, err = pbClient.UploadPages(context.Background(), reader, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: count,
},
})
_require.Nil(err)
parts, err := sas.ParseURL(pbClient.URL()) // Get parts for BlobURL
_require.Nil(err)
credential, err := testcommon.GetGenericCredential(testcommon.TestAccountDefault)
_require.Nil(err)
// Set Account SAS
parts.SAS, err = sas.AccountSignatureValues{
Protocol: sas.ProtocolHTTPS, // Users MUST use HTTPS (not HTTP)
ExpiryTime: time.Now().UTC().Add(48 * time.Hour), // 48-hours before expiration
Permissions: to.Ptr(sas.AccountPermissions{Read: true, List: true}).String(),
Services: to.Ptr(sas.AccountServices{Blob: true}).String(),
ResourceTypes: to.Ptr(sas.AccountResourceTypes{Container: true, Object: true}).String(),
}.SignWithSharedKey(credential)
_require.Nil(err)
// Create snapshot of Blob and get snapshot URL
resp, err := pbClient.CreateSnapshot(context.Background(), &blob.CreateSnapshotOptions{})
_require.Nil(err)
snapshotURL, _ := pbClient.WithSnapshot(*resp.Snapshot)
// Check that there are two items in the container: one snapshot, one blob
pager := containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{Include: container.ListBlobsInclude{Snapshots: true}})
found := make([]*container.BlobItem, 0)
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
found = append(found, resp.Segment.BlobItems...)
if err != nil {
break
}
}
_require.Len(found, 2)
// Delete snapshot
deleteSnapshotsOnly := blob.DeleteSnapshotsOptionTypeOnly
_, err = pbClient.Delete(context.Background(), &blob.DeleteOptions{DeleteSnapshots: &deleteSnapshotsOnly})
_require.Nil(err)
// Check that only blob exists
pager = containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{
Include: container.ListBlobsInclude{Snapshots: true},
})
found = make([]*container.BlobItem, 0)
for pager.More() {
resp, err := pager.NextPage(context.Background())
_require.Nil(err)
found = append(found, resp.Segment.BlobItems...)
if err != nil {
break
}
}
_require.Len(found, 1)
// Options for PermanentDeleteOptions
perm := blob.DeleteTypePermanent
deleteBlobOptions := blob.DeleteOptions{
BlobDeleteType: &perm,
}
// Execute Delete with DeleteTypePermanent,should fail because permissions are not set and snapshot is not soft-deleted
_, err = snapshotURL.Delete(context.Background(), &deleteBlobOptions)
_require.NotNil(err)
}
//func (s *AZBlobUnrecordedTestsSuite) TestPageBlockFromURLWithCPK() {
// _require := require.New(s.T())
// testName := s.T().Name()
// svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
// if err != nil {
// s.Fail("Unable to fetch service client because " + err.Error())
// }
// containerClient := testcommon.CreateNewContainer(context.Background(), _require, testcommon.GenerateContainerName(testName)+"01", svcClient)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// contentSize := 8 * 1024 // 1MB
// r, srcData := getRandomDataAndReader(contentSize)
// md5Sum := md5.Sum(srcData)
// contentMD5 := md5Sum[:]
// ctx := ctx // Use default Background context
// srcPBName := "src" + testcommon.GenerateBlobName(testName)
// bbClient := createNewPageBlobWithSize(_require, srcPBName, containerClient, int64(contentSize))
// dstPBName := "dst" + testcommon.GenerateBlobName(testName)
// destBlob := createNewPageBlobWithCPK(_require, dstPBName, containerClient, int64(contentSize), &testcommon.TestCPKByValue, nil)
//
// offset, count := int64(0), int64(contentSize)
// uploadPagesOptions := pageblob.UploadPagesOptions{
// Offset: to.Ptr(offset), Count: to.Ptr(count),
// }
// _, err = bbClient.UploadPages(ctx, streaming.NopCloser(r), &uploadPagesOptions)
// _require.Nil(err)
// // _require.Equal(uploadResp.RawResponse.StatusCode, 201)
// srcBlobParts, _ := NewBlobURLParts(bbClient.URL())
//
// credential, err := getGenericCredential(nil, testcommon.TestAccountDefault)
// _require.Nil(err)
// srcBlobParts.SAS, err = BlobSASSignatureValues{
// Protocol: SASProtocolHTTPS,
// ExpiryTime: time.Now().UTC().Add(1 * time.Hour),
// ContainerName: srcBlobParts.ContainerName,
// BlobName: srcBlobParts.BlobName,
// Permissions: BlobSASPermissions{Read: true}.String(),
// }.Sign(credential)
// if err != nil {
// s.T().Fatal(err)
// }
//
// srcBlobURLWithSAS := srcBlobParts.URL()
// uploadPagesFromURLOptions := pageblob.UploadPagesFromURLOptions{
// SourceContentMD5: contentMD5,
// CpkInfo: &testcommon.TestCPKByValue,
// }
// resp, err := destBlob.UploadPagesFromURL(ctx, srcBlobURLWithSAS, 0, 0, int64(contentSize), &uploadPagesFromURLOptions)
// _require.Nil(err)
// // _require.Equal(resp.RawResponse.StatusCode, 201)
// _require.NotNil(resp.ETag)
// _require.NotNil(resp.LastModified)
// _require.NotNil(resp.ContentMD5)
// _require.EqualValues(resp.ContentMD5, contentMD5)
// _require.NotNil(resp.RequestID)
// _require.NotNil(resp.Version)
// _require.NotNil(resp.Date)
// _require.Equal((*resp.Date).IsZero(), false)
// _require.Equal(*resp.BlobSequenceNumber, int64(0))
// _require.Equal(*resp.IsServerEncrypted, true)
// _require.EqualValues(resp.EncryptionKeySHA256, testcommon.TestCPKByValue.EncryptionKeySHA256)
//
// _, err = destBlob.DownloadStream(ctx, nil)
// _require.NotNil(err)
//
// downloadBlobOptions := blob.downloadWriterAtOptions{
// CpkInfo: &testcommon.TestInvalidCPKByValue,
// }
// _, err = destBlob.DownloadStream(ctx, &downloadBlobOptions)
// _require.NotNil(err)
//
// // Download blob to do data integrity check.
// downloadBlobOptions = blob.downloadWriterAtOptions{
// CpkInfo: &testcommon.TestCPKByValue,
// }
// downloadResp, err := destBlob.DownloadStream(ctx, &downloadBlobOptions)
// _require.Nil(err)
// _require.EqualValues(*downloadResp.EncryptionKeySHA256, *testcommon.TestCPKByValue.EncryptionKeySHA256)
//
// destData, err := io.ReadAll(downloadResp.BodyReader(&blob.RetryReaderOptions{CpkInfo: &testcommon.TestCPKByValue}))
// _require.Nil(err)
// _require.EqualValues(destData, srcData)
//}
//func (s *AZBlobUnrecordedTestsSuite) TestPageBlockFromURLWithCPKScope() {
// _require := require.New(s.T())
// testName := s.T().Name()
// svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
// if err != nil {
// s.Fail("Unable to fetch service client because " + err.Error())
// }
// containerClient := testcommon.CreateNewContainer(context.Background(), _require, testcommon.GenerateContainerName(testName)+"01", svcClient)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// contentSize := 8 * 1024 // 1MB
// r, srcData := getRandomDataAndReader(contentSize)
// md5Sum := md5.Sum(srcData)
// contentMD5 := md5Sum[:]
// ctx := ctx // Use default Background context
// srcPBName := "src" + testcommon.GenerateBlobName(testName)
// srcPBClient := createNewPageBlobWithSize(_require, srcPBName, containerClient, int64(contentSize))
// dstPBName := "dst" + testcommon.GenerateBlobName(testName)
// dstPBBlob := createNewPageBlobWithCPK(_require, dstPBName, containerClient, int64(contentSize), nil, &testcommon.TestCPKByScope)
//
// offset, count := int64(0), int64(contentSize)
// uploadPagesOptions := pageblob.UploadPagesOptions{
// Offset: to.Ptr(offset), Count: to.Ptr(count),
// }
// _, err = srcPBClient.UploadPages(ctx, streaming.NopCloser(r), &uploadPagesOptions)
// _require.Nil(err)
// // _require.Equal(uploadResp.RawResponse.StatusCode, 201)
// srcBlobParts, _ := NewBlobURLParts(srcPBClient.URL())
//
// credential, err := getGenericCredential(nil, testcommon.TestAccountDefault)
// _require.Nil(err)
// srcBlobParts.SAS, err = BlobSASSignatureValues{
// Protocol: SASProtocolHTTPS,
// ExpiryTime: time.Now().UTC().Add(1 * time.Hour),
// ContainerName: srcBlobParts.ContainerName,
// BlobName: srcBlobParts.BlobName,
// Permissions: BlobSASPermissions{Read: true}.String(),
// }.Sign(credential)
// if err != nil {
// s.T().Fatal(err)
// }
//
// srcBlobURLWithSAS := srcBlobParts.URL()
// uploadPagesFromURLOptions := pageblob.UploadPagesFromURLOptions{
// SourceContentMD5: contentMD5,
// CpkScopeInfo: &testcommon.TestCPKByScope,
// }
// resp, err := dstPBBlob.UploadPagesFromURL(ctx, srcBlobURLWithSAS, 0, 0, int64(contentSize), &uploadPagesFromURLOptions)
// _require.Nil(err)
// // _require.Equal(resp.RawResponse.StatusCode, 201)
// _require.NotNil(resp.ETag)
// _require.NotNil(resp.LastModified)
// _require.NotNil(resp.ContentMD5)
// _require.EqualValues(resp.ContentMD5, contentMD5)
// _require.NotNil(resp.RequestID)
// _require.NotNil(resp.Version)
// _require.NotNil(resp.Date)
// _require.Equal((*resp.Date).IsZero(), false)
// _require.Equal(*resp.BlobSequenceNumber, int64(0))
// _require.Equal(*resp.IsServerEncrypted, true)
// _require.EqualValues(resp.EncryptionScope, testcommon.TestCPKByScope.EncryptionScope)
//
// // Download blob to do data integrity check.
// downloadBlobOptions := blob.downloadWriterAtOptions{
// CpkScopeInfo: &testcommon.TestCPKByScope,
// }
// downloadResp, err := dstPBBlob.DownloadStream(ctx, &downloadBlobOptions)
// _require.Nil(err)
// _require.EqualValues(*downloadResp.EncryptionScope, *testcommon.TestCPKByScope.EncryptionScope)
//
// destData, err := io.ReadAll(downloadResp.BodyReader(&blob.RetryReaderOptions{CpkInfo: &testcommon.TestCPKByValue}))
// _require.Nil(err)
// _require.EqualValues(destData, srcData)
//}
//func (s *AZBlobUnrecordedTestsSuite) TestUploadPagesFromURLWithMD5WithCPK() {
// _require := require.New(s.T())
// testName := s.T().Name()
// svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
// if err != nil {
// s.Fail("Unable to fetch service client because " + err.Error())
// }
// containerClient := testcommon.CreateNewContainer(context.Background(), _require, testcommon.GenerateContainerName(testName)+"01", svcClient)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// contentSize := 8 * 1024
// r, srcData := getRandomDataAndReader(contentSize)
// md5Sum := md5.Sum(srcData)
// contentMD5 := md5Sum[:]
// srcPBName := "src" + testcommon.GenerateBlobName(testName)
// srcBlob := createNewPageBlobWithSize(_require, srcPBName, containerClient, int64(contentSize))
//
// offset, count := int64(0), int64(contentSize)
// uploadPagesOptions := pageblob.UploadPagesOptions{
// Offset: to.Ptr(offset), Count: to.Ptr(count),
// }
// _, err = srcBlob.UploadPages(ctx, streaming.NopCloser(r), &uploadPagesOptions)
// _require.Nil(err)
// // _require.Equal(uploadResp.RawResponse.StatusCode, 201)
//
// srcBlobParts, _ := NewBlobURLParts(srcBlob.URL())
//
// credential, err := getGenericCredential(nil, testcommon.TestAccountDefault)
// _require.Nil(err)
// srcBlobParts.SAS, err = BlobSASSignatureValues{
// Protocol: SASProtocolHTTPS,
// ExpiryTime: time.Now().UTC().Add(1 * time.Hour),
// ContainerName: srcBlobParts.ContainerName,
// BlobName: srcBlobParts.BlobName,
// Permissions: BlobSASPermissions{Read: true}.String(),
// }.Sign(credential)
// if err != nil {
// s.T().Fatal(err)
// }
//
// srcBlobURLWithSAS := srcBlobParts.URL()
// dstPBName := "dst" + testcommon.GenerateBlobName(testName)
// destPBClient := createNewPageBlobWithCPK(_require, dstPBName, containerClient, int64(contentSize), &testcommon.TestCPKByValue, nil)
// uploadPagesFromURLOptions := pageblob.UploadPagesFromURLOptions{
// SourceContentMD5: contentMD5,
// CpkInfo: &testcommon.TestCPKByValue,
// }
// resp, err := destPBClient.UploadPagesFromURL(ctx, srcBlobURLWithSAS, 0, 0, int64(contentSize), &uploadPagesFromURLOptions)
// _require.Nil(err)
// // _require.Equal(resp.RawResponse.StatusCode, 201)
// _require.NotNil(resp.ETag)
// _require.NotNil(resp.LastModified)
// _require.NotNil(resp.ContentMD5)
// _require.EqualValues(resp.ContentMD5, contentMD5)
// _require.NotNil(resp.RequestID)
// _require.NotNil(resp.Version)
// _require.NotNil(resp.Date)
// _require.Equal((*resp.Date).IsZero(), false)
// _require.Equal(*resp.BlobSequenceNumber, int64(0))
// _require.Equal(*resp.IsServerEncrypted, true)
// _require.EqualValues(resp.EncryptionKeySHA256, testcommon.TestCPKByValue.EncryptionKeySHA256)
//
// _, err = destPBClient.DownloadStream(ctx, nil)
// _require.NotNil(err)
//
// downloadBlobOptions := blob.downloadWriterAtOptions{
// CpkInfo: &testcommon.TestInvalidCPKByValue,
// }
// _, err = destPBClient.DownloadStream(ctx, &downloadBlobOptions)
// _require.NotNil(err)
//
// // Download blob to do data integrity check.
// downloadBlobOptions = blob.downloadWriterAtOptions{
// CpkInfo: &testcommon.TestCPKByValue,
// }
// downloadResp, err := destPBClient.DownloadStream(ctx, &downloadBlobOptions)
// _require.Nil(err)
// _require.EqualValues(*downloadResp.EncryptionKeySHA256, *testcommon.TestCPKByValue.EncryptionKeySHA256)
//
// destData, err := io.ReadAll(downloadResp.BodyReader(&blob.RetryReaderOptions{CpkInfo: &testcommon.TestCPKByValue}))
// _require.Nil(err)
// _require.EqualValues(destData, srcData)
//
// _, badMD5 := getRandomDataAndReader(16)
// badContentMD5 := badMD5[:]
// uploadPagesFromURLOptions1 := pageblob.UploadPagesFromURLOptions{
// SourceContentMD5: badContentMD5,
// }
// _, err = destPBClient.UploadPagesFromURL(ctx, srcBlobURLWithSAS, 0, 0, int64(contentSize), &uploadPagesFromURLOptions1)
// _require.NotNil(err)
//
// testcommon.ValidateBlobErrorCode(_require, err, StorageErrorCodeMD5Mismatch)
//}
//func (s *AZBlobRecordedTestsSuite) TestClearDiffPagesWithCPK() {
// _require := require.New(s.T())
// testName := s.T().Name()
//// svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountDefault, nil)
// if err != nil {
// s.Fail("Unable to fetch service client because " + err.Error())
// }
// containerClient := testcommon.CreateNewContainer(context.Background(), _require, testcommon.GenerateContainerName(testName)+"01", svcClient)
// defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
//
// pbName := testcommon.GenerateBlobName(testName)
// pbClient := createNewPageBlobWithCPK(_require, pbName, containerClient, pageblob.PageBytes*10, &testcommon.TestCPKByValue, nil)
//
// contentSize := 2 * 1024
// r := getReaderToGeneratedBytes(contentSize)
// offset, _, count := int64(0), int64(contentSize-1), int64(contentSize)
// uploadPagesOptions := pageblob.UploadPagesOptions{Range: &HttpRange{offset, count}, CpkInfo: &testcommon.TestCPKByValue}
// _, err = pbClient.UploadPages(ctx, r, &uploadPagesOptions)
// _require.Nil(err)
//
// createBlobSnapshotOptions := blob.CreateSnapshotOptions{
// CpkInfo: &testcommon.TestCPKByValue,
// }
// snapshotResp, err := pbClient.CreateSnapshot(ctx, &createBlobSnapshotOptions)
// _require.Nil(err)
//
// offset1, end1, count1 := int64(contentSize), int64(2*contentSize-1), int64(contentSize)
// uploadPagesOptions1 := pageblob.UploadPagesOptions{Range: &HttpRange{offset1, count1}, CpkInfo: &testcommon.TestCPKByValue}
// _, err = pbClient.UploadPages(ctx, getReaderToGeneratedBytes(2048), &uploadPagesOptions1)
// _require.Nil(err)
//
// pageListResp, err := pbClient.NewGetPageRangesDiffPager(ctx, HttpRange{0, 4096}, *snapshotResp.Snapshot, nil)
// _require.Nil(err)
// pageRangeResp := pageListResp.PageList.Range
// _require.NotNil(pageRangeResp)
// _require.Len(pageRangeResp, 1)
// rawStart, rawEnd := pageRangeResp[0].Raw()
// _require.Equal(rawStart, offset1)
// _require.Equal(rawEnd, end1)
//
// clearPagesOptions := PageBlobClearPagesOptions{
// CpkInfo: &testcommon.TestCPKByValue,
// }
// clearResp, err := pbClient.ClearPages(ctx, HttpRange{2048, 2048}, &clearPagesOptions)
// _require.Nil(err)
// _require.Equal(clearResp.RawResponse.StatusCode, 201)
//
// pageListResp, err = pbClient.NewGetPageRangesDiffPager(ctx, HttpRange{0, 4095}, *snapshotResp.Snapshot, nil)
// _require.Nil(err)
// _require.Nil(pageListResp.PageList.Range)
//}
func (s *PageBlobRecordedTestsSuite) TestUndeletePageBlobVersion() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountSoftDelete, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pbClient := getPageBlobClient(testcommon.GenerateBlobName(testName), containerClient)
_, err = pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
_, err = pbClient.UploadPages(context.Background(), r, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
})
_require.Nil(err)
versions := make([]string, 0)
for i := 0; i < 5; i++ {
resp, err := pbClient.CreateSnapshot(context.Background(), nil)
_require.Nil(err)
_require.NotNil(resp.VersionID)
versions = append(versions, *resp.VersionID)
}
listPager := containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{
Include: container.ListBlobsInclude{Versions: true},
})
testcommon.ListBlobsCount(context.Background(), _require, listPager, 6)
// Deleting the 1st, 2nd and 3rd versions
for i := 0; i < 3; i++ {
pbClientWithVersionID, err := pbClient.WithVersionID(versions[i])
_require.Nil(err)
_, err = pbClientWithVersionID.Delete(context.Background(), nil)
_require.Nil(err)
}
// adding wait after delete
time.Sleep(time.Second * 10)
listPager = containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{
Include: container.ListBlobsInclude{Versions: true},
})
testcommon.ListBlobsCount(context.Background(), _require, listPager, 3)
_, err = pbClient.Undelete(context.Background(), nil)
_require.Nil(err)
// adding wait after undelete
time.Sleep(time.Second * 10)
listPager = containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{
Include: container.ListBlobsInclude{Versions: true},
})
testcommon.ListBlobsCount(context.Background(), _require, listPager, 6)
}
func (s *PageBlobRecordedTestsSuite) TestUndeletePageBlobSnapshot() {
_require := require.New(s.T())
testName := s.T().Name()
svcClient, err := testcommon.GetServiceClient(s.T(), testcommon.TestAccountSoftDelete, nil)
_require.NoError(err)
containerName := testcommon.GenerateContainerName(testName)
containerClient := testcommon.CreateNewContainer(context.Background(), _require, containerName, svcClient)
defer testcommon.DeleteContainer(context.Background(), _require, containerClient)
pbClient := getPageBlobClient(testcommon.GenerateBlobName(testName), containerClient)
_, err = pbClient.Create(context.Background(), pageblob.PageBytes*10, nil)
_require.Nil(err)
r, _ := testcommon.GenerateData(pageblob.PageBytes)
_, err = pbClient.UploadPages(context.Background(), r, &pageblob.UploadPagesOptions{
Range: blob.HTTPRange{
Count: pageblob.PageBytes,
},
})
_require.Nil(err)
snapshots := make([]string, 0)
for i := 0; i < 5; i++ {
resp, err := pbClient.CreateSnapshot(context.Background(), nil)
_require.Nil(err)
_require.NotNil(resp.Snapshot)
snapshots = append(snapshots, *resp.Snapshot)
}
listPager := containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{
Include: container.ListBlobsInclude{Snapshots: true},
})
testcommon.ListBlobsCount(context.Background(), _require, listPager, 6) // 5 snapshots and 1 current version
// Deleting the 1st, 2nd and 3rd snapshots
for i := 0; i < 3; i++ {
pbClientWithSnapshot, err := pbClient.WithSnapshot(snapshots[i])
_require.Nil(err)
_, err = pbClientWithSnapshot.Delete(context.Background(), nil)
_require.Nil(err)
}
// adding wait after delete
time.Sleep(time.Second * 10)
listPager = containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{
Include: container.ListBlobsInclude{Snapshots: true},
})
testcommon.ListBlobsCount(context.Background(), _require, listPager, 3) // 2 snapshots and 1 current version
_, err = pbClient.Undelete(context.Background(), nil)
_require.Nil(err)
// adding wait after undelete
time.Sleep(time.Second * 10)
listPager = containerClient.NewListBlobsFlatPager(&container.ListBlobsFlatOptions{
Include: container.ListBlobsInclude{Snapshots: true},
})
testcommon.ListBlobsCount(context.Background(), _require, listPager, 6) // 5 snapshots and 1 current version
}
|
{
"content_hash": "9820e0a08a95ff1424018da3fa461fe8",
"timestamp": "",
"source": "github",
"line_count": 4620,
"max_line_length": 235,
"avg_line_length": 38.74155844155844,
"alnum_prop": 0.7648195948286458,
"repo_name": "Azure/azure-sdk-for-go",
"id": "394020b9103a406224734bbbe1b314544b4963ce",
"size": "179180",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/storage/azblob/pageblob/client_test.go",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1629"
},
{
"name": "Bicep",
"bytes": "8394"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "1435"
},
{
"name": "Go",
"bytes": "5463500"
},
{
"name": "HTML",
"bytes": "8933"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "PowerShell",
"bytes": "504494"
},
{
"name": "Shell",
"bytes": "3893"
},
{
"name": "Smarty",
"bytes": "1723"
}
],
"symlink_target": ""
}
|
'use strict';
/**
* Properties describing any presence of blur within the image.
*
*/
class BlurProperties {
/**
* Create a BlurProperties.
* @member {string} [blurLevel] An enum value indicating level of blurriness.
* Possible values include: 'Low', 'Medium', 'High'
* @member {number} [value] A number indicating level of blurriness ranging
* from 0 to 1.
*/
constructor() {
}
/**
* Defines the metadata of BlurProperties
*
* @returns {object} metadata of BlurProperties
*
*/
mapper() {
return {
required: false,
serializedName: 'BlurProperties',
type: {
name: 'Composite',
className: 'BlurProperties',
modelProperties: {
blurLevel: {
required: false,
serializedName: 'blurLevel',
type: {
name: 'Enum',
allowedValues: [ 'Low', 'Medium', 'High' ]
}
},
value: {
required: false,
serializedName: 'value',
constraints: {
InclusiveMaximum: 1,
InclusiveMinimum: 0
},
type: {
name: 'Number'
}
}
}
}
};
}
}
module.exports = BlurProperties;
|
{
"content_hash": "fd193d3d1c55bf9cde28a502b5b4f063",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 79,
"avg_line_length": 21.71186440677966,
"alnum_prop": 0.5113192818110851,
"repo_name": "amarzavery/azure-sdk-for-node",
"id": "4f23a08e84065f94f1d6bc473c8f5445e85f7ea2",
"size": "1598",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/services/face/lib/models/blurProperties.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "661"
},
{
"name": "JavaScript",
"bytes": "85615658"
},
{
"name": "Shell",
"bytes": "437"
}
],
"symlink_target": ""
}
|
package org.assertj.core.api.charsequence;
import static org.assertj.core.api.Assertions.contentOf;
import static org.mockito.Mockito.verify;
import java.io.File;
import org.assertj.core.api.CharSequenceAssert;
import org.assertj.core.api.CharSequenceAssertBaseTest;
/**
* Tests for <code>{@link org.assertj.core.api.CharSequenceAssert#isXmlEqualToContentOf(java.io.File)}</code>.
*
* @author Joel Costigliola
*/
public class CharSequenceAssert_isXmlEqualToContentOf_Test extends CharSequenceAssertBaseTest {
private File xmlFile = new File("src/test/resources/expected.xml");;
@Override
protected CharSequenceAssert invoke_api_method() {
return assertions.isXmlEqualToContentOf(xmlFile);
}
@Override
protected void verify_internal_effects() {
verify(strings).assertXmlEqualsTo(getInfo(assertions), getActual(assertions), contentOf(xmlFile));
}
}
|
{
"content_hash": "2b96b879b90f81a7c43818e415621657",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 110,
"avg_line_length": 29.4,
"alnum_prop": 0.7811791383219955,
"repo_name": "yurloc/assertj-core",
"id": "0d78ab19dd6805ec6a93aef3228fd959d52e9c67",
"size": "1523",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/java/org/assertj/core/api/charsequence/CharSequenceAssert_isXmlEqualToContentOf_Test.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
<?php
/**
* Simple links list block
*
* @category Mage
* @package Mage_Core
* @author Magento Core Team <core@magentocommerce.com>
*/
class Mage_Page_Block_Template_Links_Block extends Mage_Core_Block_Template
{
/**
* First link flag
*
* @var bool
*/
protected $_isFirst = false;
/**
* Last link flag
*
* @var bool
*/
protected $_isLast = false;
/**
* Link label
*
* @var string
*/
protected $_label = null;
/**
* Link url
*
* @var string
*/
protected $_url = null;
/**
* Link title
*
* @var string
*/
protected $_title = null;
/**
* Li elemnt params
*
* @var string
*/
protected $_liPparams = null;
/**
* A elemnt params
*
* @var string
*/
protected $_aPparams = null;
/**
* Message before link text
*
* @var string
*/
protected $_beforeText = null;
/**
* Message after link text
*
* @var string
*/
protected $_afterText = null;
/**
* Position in link list
* @var int
*/
protected $_position = 0;
/**
* Set default template
*
*/
protected function _construct()
{
$this->setTemplate('page/template/linksblock.phtml');
}
/**
* Return link position in link list
*
* @return in
*/
public function getPosition()
{
return $this->_position;
}
/**
* Return first position flag
*
* @return bool
*/
public function getIsFirst()
{
return $this->_isFirst;
}
/**
* Set first list flag
*
* @param bool $value
* return Mage_Page_Block_Template_Links_Block
*/
public function setIsFirst($value)
{
$this->_isFirst = (bool)$value;
return $this;
}
/**
* Return last position flag
*
* @return bool
*/
public function getIsLast()
{
return $this->_isLast;
}
/**
* Set last list flag
*
* @param bool $value
* return Mage_Page_Block_Template_Links_Block
*/
public function setIsLast($value)
{
$this->_isLast = (bool)$value;
return $this;
}
/**
* Return link label
*
* @return string
*/
public function getLabel()
{
return $this->_label;
}
/**
* Return link title
*
* @return string
*/
public function getTitle()
{
return $this->_title;
}
/**
* Return link url
*
* @return string
*/
public function getLinkUrl()
{
return $this->_url;
}
}
|
{
"content_hash": "887dc1bb5dcb40d8ed3b638a83397fe3",
"timestamp": "",
"source": "github",
"line_count": 178,
"max_line_length": 75,
"avg_line_length": 15.426966292134832,
"alnum_prop": 0.48215586307356156,
"repo_name": "fabiensebban/magento",
"id": "71b5a9be49cd07685ae7f76108d89ff8dd1cfc0f",
"size": "3697",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "app/code/core/Mage/Page/Block/Template/Links/Block.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "20063"
},
{
"name": "ApacheConf",
"bytes": "8150"
},
{
"name": "Batchfile",
"bytes": "1036"
},
{
"name": "CSS",
"bytes": "1781578"
},
{
"name": "HTML",
"bytes": "5498183"
},
{
"name": "JavaScript",
"bytes": "1292137"
},
{
"name": "PHP",
"bytes": "48139973"
},
{
"name": "PowerShell",
"bytes": "1028"
},
{
"name": "Ruby",
"bytes": "288"
},
{
"name": "Shell",
"bytes": "3879"
},
{
"name": "XSLT",
"bytes": "2135"
}
],
"symlink_target": ""
}
|
from setuptools import setup, find_packages
import sys, os
version = '0.0.1'
setup(name='mysql_demo',
version=version,
description="mysql tools demo",
long_description="""\
mysql tools demo""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='python',
author='Eric Xiao',
author_email='ericssonxiao@gmail.com',
url='http://ericssonxiao.github.io/blog/',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
{
"content_hash": "cded0f2238b217cf3f8e0d1cf2f00f19",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 95,
"avg_line_length": 28.846153846153847,
"alnum_prop": 0.5973333333333334,
"repo_name": "ericssonxiao/myCookBook",
"id": "f2f7af67674e3c98b3cf3f0843d7bf03d4253c2a",
"size": "750",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/mysql_demo/mysql_demo/setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8035"
},
{
"name": "Groovy",
"bytes": "156"
},
{
"name": "Io",
"bytes": "0"
},
{
"name": "Java",
"bytes": "182888"
},
{
"name": "JavaScript",
"bytes": "2831"
},
{
"name": "Perl",
"bytes": "13532"
},
{
"name": "Python",
"bytes": "1080"
},
{
"name": "VimL",
"bytes": "2918"
}
],
"symlink_target": ""
}
|
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="{{ site.description }}">
<title>{% if page.title %}{{ page.title }} - {{ site.title }}{% else %}{{ site.title }}{% endif %}</title>
<link rel="canonical" href="{{ page.url | replace:'index.html','' | prepend: site.baseurl | prepend: site.url }}">
<link rel="stylesheet" href="{{ "/assets/css/main.min.css"}}">
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/highlight.js/9.1.0/styles/default.min.css">
<link href='//fonts.googleapis.com/css?family=Lora:400,700,400italic,700italic' rel='stylesheet' type='text/css'>
<link href='//fonts.googleapis.com/css?family=Open+Sans:300italic,400italic,600italic,700italic,800italic,400,300,600,700,800' rel='stylesheet' type='text/css'>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script>
<script src="https://oss.maxcdn.com/libs/respond.js/1.4.2/respond.min.js"></script>
<![endif]-->
{% seo %}
</head>
|
{
"content_hash": "cc47d074a9a96250e816e7f5a7c63339",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 164,
"avg_line_length": 53.12,
"alnum_prop": 0.651355421686747,
"repo_name": "loql/loql.github.io",
"id": "f2a761a09d64a01d8c616a83e8448f07273b23ec",
"size": "1328",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_includes/head.html",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16741"
},
{
"name": "HTML",
"bytes": "11820"
},
{
"name": "JavaScript",
"bytes": "54883"
},
{
"name": "Ruby",
"bytes": "3220"
}
],
"symlink_target": ""
}
|
<md-card>
<md-card-content>
<md-card>
<span x-large>Your Content Here</span>
<form (ngSubmit)="submitState(localState.value)" autocomplete="off">
<md-input
placeholder="Submit Local State to App State"
[value]="localState.value"
(input)="localState.value = $event.target.value"
autofocus>
</md-input>
<button md-raised-button color="primary">Submit Value</button>
</form>
<!--
<input type="text" [value]="localState.value" (input)="localState.value = $event.target.value" autofocus>
Rather than wiring up two-way data-binding ourselves with [value] and (input)
we can use Angular's [(ngModel)] syntax
<input type="text" [(ngModel)]="localState.value" autofocus>
-->
<md-card>
For hot module reloading run
<pre>npm run start:hmr</pre>
</md-card>
<hr>
<pre>this.localState = {{ localState | json }}</pre>
</md-card>
</md-card-content>
</md-card>
|
{
"content_hash": "1d550de8949a38b99e627288458c962d",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 115,
"avg_line_length": 31.875,
"alnum_prop": 0.5931372549019608,
"repo_name": "pedger/typegame2",
"id": "40b044b8bc5b65ccac5d5792996a3deb4cbfe00f",
"size": "1020",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/app/home/home.html",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "700"
},
{
"name": "HTML",
"bytes": "8304"
},
{
"name": "JavaScript",
"bytes": "30182"
},
{
"name": "TypeScript",
"bytes": "24567"
}
],
"symlink_target": ""
}
|
<!--
********************************************************************************
WARNING:
DO NOT EDIT "groovy/README.md"
IT IS AUTO-GENERATED
(from the other files in "groovy/" combined with a set of templates)
********************************************************************************
-->
# Quick reference
- **Maintained by**:
[the Apache Groovy project](https://github.com/groovy/docker-groovy)
- **Where to get help**:
[the Docker Community Forums](https://forums.docker.com/), [the Docker Community Slack](https://dockr.ly/slack), or [Stack Overflow](https://stackoverflow.com/search?tab=newest&q=docker)
# Supported tags and respective `Dockerfile` links
- [`3.0.6-jdk8`, `3.0-jdk8`, `3.0.6-jdk`, `3.0-jdk`, `jdk8`, `jdk`](https://github.com/groovy/docker-groovy/blob/a48a3fed614ad8a42f555cf6a5a8299ec47b658e/jdk8/Dockerfile)
- [`3.0.6-jre8`, `3.0-jre8`, `3.0.6-jre`, `3.0-jre`, `3.0.6`, `3.0`, `jre8`, `jre`, `latest`](https://github.com/groovy/docker-groovy/blob/a48a3fed614ad8a42f555cf6a5a8299ec47b658e/jre8/Dockerfile)
- [`3.0.6-jdk11`, `3.0-jdk11`, `jdk11`](https://github.com/groovy/docker-groovy/blob/a48a3fed614ad8a42f555cf6a5a8299ec47b658e/jdk11/Dockerfile)
- [`3.0.6-jre11`, `3.0-jre11`, `jre11`](https://github.com/groovy/docker-groovy/blob/a48a3fed614ad8a42f555cf6a5a8299ec47b658e/jre11/Dockerfile)
- [`3.0.6-jdk15`, `3.0-jdk15`, `jdk15`](https://github.com/groovy/docker-groovy/blob/a48a3fed614ad8a42f555cf6a5a8299ec47b658e/jdk15/Dockerfile)
- [`3.0.6-jre15`, `3.0-jre15`, `jre15`](https://github.com/groovy/docker-groovy/blob/a48a3fed614ad8a42f555cf6a5a8299ec47b658e/jre15/Dockerfile)
- [`4.0.0-alpha-2-jdk8`, `4.0-jdk8`, `4.0.0-alpha-2-jdk`, `4.0-jdk`](https://github.com/groovy/docker-groovy/blob/a5bc6d96b7e8a8bd82439fdaa048c222229d82ec/jdk8/Dockerfile)
- [`4.0.0-alpha-2-jre8`, `4.0-jre8`, `4.0.0-alpha-2-jre`, `4.0-jre`, `4.0.0-alpha-2`, `4.0`](https://github.com/groovy/docker-groovy/blob/a5bc6d96b7e8a8bd82439fdaa048c222229d82ec/jre8/Dockerfile)
- [`4.0.0-alpha-2-jdk11`, `4.0-jdk11`](https://github.com/groovy/docker-groovy/blob/a5bc6d96b7e8a8bd82439fdaa048c222229d82ec/jdk11/Dockerfile)
- [`4.0.0-alpha-2-jre11`, `4.0-jre11`](https://github.com/groovy/docker-groovy/blob/a5bc6d96b7e8a8bd82439fdaa048c222229d82ec/jre11/Dockerfile)
- [`4.0.0-alpha-2-jdk15`, `4.0-jdk15`](https://github.com/groovy/docker-groovy/blob/a5bc6d96b7e8a8bd82439fdaa048c222229d82ec/jdk15/Dockerfile)
- [`4.0.0-alpha-2-jre15`, `4.0-jre15`](https://github.com/groovy/docker-groovy/blob/a5bc6d96b7e8a8bd82439fdaa048c222229d82ec/jre15/Dockerfile)
# Quick reference (cont.)
- **Where to file issues**:
[https://github.com/groovy/docker-groovy/issues](https://github.com/groovy/docker-groovy/issues)
- **Supported architectures**: ([more info](https://github.com/docker-library/official-images#architectures-other-than-amd64))
[`amd64`](https://hub.docker.com/r/amd64/groovy/), [`arm32v7`](https://hub.docker.com/r/arm32v7/groovy/), [`arm64v8`](https://hub.docker.com/r/arm64v8/groovy/), [`ppc64le`](https://hub.docker.com/r/ppc64le/groovy/), [`s390x`](https://hub.docker.com/r/s390x/groovy/)
- **Published image artifact details**:
[repo-info repo's `repos/groovy/` directory](https://github.com/docker-library/repo-info/blob/master/repos/groovy) ([history](https://github.com/docker-library/repo-info/commits/master/repos/groovy))
(image metadata, transfer size, etc)
- **Image updates**:
[official-images repo's `library/groovy` label](https://github.com/docker-library/official-images/issues?q=label%3Alibrary%2Fgroovy)
[official-images repo's `library/groovy` file](https://github.com/docker-library/official-images/blob/master/library/groovy) ([history](https://github.com/docker-library/official-images/commits/master/library/groovy))
- **Source of this description**:
[docs repo's `groovy/` directory](https://github.com/docker-library/docs/tree/master/groovy) ([history](https://github.com/docker-library/docs/commits/master/groovy))
# What is Groovy?
[Apache Groovy](http://groovy-lang.org/) is a powerful, optionally typed and dynamic language, with static-typing and static compilation capabilities, for the Java platform aimed at improving developer productivity thanks to a concise, familiar and easy to learn syntax. It integrates smoothly with any Java program, and immediately delivers to your application powerful features, including scripting capabilities, Domain-Specific Language authoring, runtime and compile-time meta-programming and functional programming.

# How to use this image
Note that if you are mounting a volume and the uid running Docker is not `1000`, you should run as user `root` (`-u root`).
## Starting Groovysh
`docker run -it --rm groovy`
## Running a Groovy script
`docker run --rm -v "$PWD":/home/groovy/scripts -w /home/groovy/scripts groovy groovy <script> <script-args>`
## Reusing the Grapes cache
The local Grapes cache can be reused across containers by creating a volume and mounting it in `/home/groovy/.groovy/grapes`.
```console
docker volume create --name grapes-cache
docker run --rm -it -v grapes-cache:/home/groovy/.groovy/grapes groovy
```
# License
View [license information](http://www.apache.org/licenses/LICENSE-2.0.html) for the software contained in this image.
As with all Docker images, these likely also contain other software which may be under other licenses (such as Bash, etc from the base distribution, along with any direct or indirect dependencies of the primary software being contained).
Some additional license information which was able to be auto-detected might be found in [the `repo-info` repository's `groovy/` directory](https://github.com/docker-library/repo-info/tree/master/repos/groovy).
As for any pre-built image usage, it is the image user's responsibility to ensure that any use of this image complies with any relevant licenses for all software contained within.
|
{
"content_hash": "d33691ce52d257b56ef9cd1b14023449",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 520,
"avg_line_length": 64.42553191489361,
"alnum_prop": 0.7329920739762219,
"repo_name": "avoinea/docs",
"id": "ce4282795b1b9c17834aed43b3b3e2866fcfc727",
"size": "6056",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "groovy/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "1674"
},
{
"name": "Perl",
"bytes": "6540"
},
{
"name": "Shell",
"bytes": "22370"
}
],
"symlink_target": ""
}
|
import { PureComponent } from 'react';
import { connect } from 'react-redux';
import PropTypes from 'prop-types';
import { withRouter } from 'next/router';
// actions
import {
getDatasetsByTab, setPaginationPage, setPaginationTotal, resetDatasets,
} from 'redactions/admin/datasets';
// components
import DatasetList from './dataset-list-component';
class DatasetListContainer extends PureComponent {
static propTypes = {
pathname: PropTypes.string.isRequired,
tab: PropTypes.string.isRequired,
subtab: PropTypes.string.isRequired,
orderDirection: PropTypes.string.isRequired,
pagination: PropTypes.object.isRequired,
getDatasetsByTab: PropTypes.func.isRequired,
setPaginationPage: PropTypes.func.isRequired,
setPaginationTotal: PropTypes.func.isRequired,
resetDatasets: PropTypes.func.isRequired,
router: PropTypes.shape({
asPath: PropTypes.string.isRequired,
}).isRequired,
}
UNSAFE_componentWillMount() {
this.props.getDatasetsByTab(this.props.subtab);
}
UNSAFE_componentWillReceiveProps(nextProps) {
const {
subtab,
orderDirection,
pagination,
router: {
asPath,
},
} = this.props;
const { page } = pagination;
const isMyRW = asPath.startsWith('/myrw');
const tabChanged = subtab !== nextProps.subtab;
const paginationPageChanged = page !== nextProps.pagination.page;
const orderDirectionChanged = orderDirection !== nextProps.orderDirection;
if (tabChanged || paginationPageChanged || orderDirectionChanged) {
if (isMyRW && nextProps.tab === 'datasets' && nextProps.subtab !== 'edit') {
this.props.getDatasetsByTab(nextProps.subtab);
}
}
}
componentWillUnmount() {
this.props.resetDatasets();
}
render() {
return (<DatasetList {...this.props} />);
}
}
const mapStateToProps = (state) => ({
datasets: state.datasets.datasets.list,
filters: state.datasets.datasets.filters,
loading: state.datasets.datasets.loading,
orderDirection: state.datasets.datasets.orderDirection,
pagination: state.datasets.datasets.pagination,
user: state.user,
locale: state.common.locale,
});
const mapDispatchToProps = {
getDatasetsByTab,
setPaginationPage,
setPaginationTotal,
resetDatasets,
};
export default connect(mapStateToProps, mapDispatchToProps)(withRouter(DatasetListContainer));
|
{
"content_hash": "cc9005864b733c65353b7bcf2a45c763",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 94,
"avg_line_length": 29.0609756097561,
"alnum_prop": 0.7184221569450273,
"repo_name": "resource-watch/resource-watch",
"id": "928e397b11bd8283f8e6bce795727452c1f4a321",
"size": "2383",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "components/app/myrw/datasets/pages/my-rw-datasets/dataset-list/index.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1093"
},
{
"name": "Dockerfile",
"bytes": "2884"
},
{
"name": "HTML",
"bytes": "35114"
},
{
"name": "JavaScript",
"bytes": "1907141"
},
{
"name": "SCSS",
"bytes": "265182"
},
{
"name": "Shell",
"bytes": "1960"
},
{
"name": "TypeScript",
"bytes": "339456"
}
],
"symlink_target": ""
}
|
<!doctype html>
<html class="no-js" lang="en">
<head>
<meta charset="utf-8" />
<meta http-equiv="x-ua-compatible" content="ie=edge" />
<title>flickr Stream</title>
<meta name="description" content="">
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" src="//normalize-css.googlecode.com/svn/trunk/normalize.min.css" />
<link rel="stylesheet" href="build/css/main.min.css" />
</head>
<body>
<header>
<h1>flickr Stream</h1>
</header>
<footer>
<input name="search" placeholder="Search photo titles" id="search-input" />
</footer>
<script src="build/js/main.min.js"></script>
</body>
</html>
|
{
"content_hash": "a2d6009b6780efc906c4a38f2df92211",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 94,
"avg_line_length": 27.03846153846154,
"alnum_prop": 0.6230440967283073,
"repo_name": "benhunsaker/fedexcercise",
"id": "7f111c21ac0797e9f0fb4b754b67a1b80879b87b",
"size": "703",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "5604"
},
{
"name": "HTML",
"bytes": "703"
},
{
"name": "JavaScript",
"bytes": "5938"
}
],
"symlink_target": ""
}
|
package de.isse.wcsp;
import java.util.Arrays;
public class Tuple {
int[] tupleValues;
int cost;
public Tuple(int[] nextTuple, int cost) {
tupleValues = Arrays.copyOf(nextTuple, nextTuple.length);
this.cost = cost;
}
public int[] getValues() {
return tupleValues;
}
public int getCost() {
return cost;
}
}
|
{
"content_hash": "b725454dabaca04a5bfd87c1066ee127",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 59,
"avg_line_length": 14.954545454545455,
"alnum_prop": 0.6808510638297872,
"repo_name": "nevik/constraint-relationships-csemiring",
"id": "b8a92a4b26d55f86dfc6c43d044ceb17f386d1de",
"size": "329",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/de/isse/wcsp/Tuple.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "271444"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.